file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
main.rs | #![deny(bare_trait_objects)]
#![allow(dead_code)]
use lucet_module::{
FunctionSpec, Module, ModuleData, SerializedModule, TableElement, TrapManifest, TrapSite,
VersionInfo,
};
use byteorder::{LittleEndian, ReadBytesExt};
use colored::Colorize;
use object::{Object, ObjectSection, ObjectSymbol, SymbolKind, SymbolScope};
use std::env;
use std::fs::File;
use std::io::Cursor;
use std::io::Read;
use std::mem;
#[derive(Debug)]
struct ArtifactSummary<'a> {
buffer: &'a Vec<u8>,
obj: &'a object::File<'a>,
symbols: StandardSymbols<'a>,
data_segments: Option<DataSegments>,
serialized_module: Option<SerializedModule>,
exported_functions: Vec<&'a str>,
imported_symbols: Vec<&'a str>,
}
#[derive(Debug)]
struct | <'a> {
lucet_module: Option<object::read::Symbol<'a, 'a>>,
}
#[derive(Debug)]
struct DataSegments {
segments: Vec<DataSegment>,
}
#[derive(Debug)]
struct DataSegment {
offset: u32,
len: u32,
data: Vec<u8>,
}
impl<'a> ArtifactSummary<'a> {
fn new(buffer: &'a Vec<u8>, obj: &'a object::File<'_>) -> Self {
Self {
buffer,
obj,
symbols: StandardSymbols { lucet_module: None },
data_segments: None,
serialized_module: None,
exported_functions: Vec::new(),
imported_symbols: Vec::new(),
}
}
fn read_memory(&self, addr: u64, size: u64) -> Option<&'a [u8]> {
// `addr` is really more of an offset from the start of the segment.
for section in self.obj.sections() {
let bytes = section.data_range(addr, size).ok().flatten();
if bytes.is_some() {
return bytes;
}
}
None
}
fn gather(&mut self) {
for sym in self.obj.symbols() {
match sym.name() {
Ok(ref name) if name == &"lucet_module" => self.symbols.lucet_module = Some(sym),
Ok(ref name) if name == &"" => continue,
Err(_) => continue,
_ => {
if sym.kind() == SymbolKind::Text && sym.scope() == SymbolScope::Dynamic {
self.exported_functions.push(sym.name().unwrap());
} else if sym.scope() == SymbolScope::Unknown {
self.imported_symbols.push(sym.name().unwrap());
}
}
}
}
self.serialized_module = self.symbols.lucet_module.as_ref().map(|module_sym| {
let buffer = self
.read_memory(
module_sym.address(),
mem::size_of::<SerializedModule>() as u64,
)
.unwrap();
let mut rdr = Cursor::new(buffer);
let version = VersionInfo::read_from(&mut rdr).unwrap();
SerializedModule {
version,
module_data_ptr: rdr.read_u64::<LittleEndian>().unwrap(),
module_data_len: rdr.read_u64::<LittleEndian>().unwrap(),
tables_ptr: rdr.read_u64::<LittleEndian>().unwrap(),
tables_len: rdr.read_u64::<LittleEndian>().unwrap(),
function_manifest_ptr: rdr.read_u64::<LittleEndian>().unwrap(),
function_manifest_len: rdr.read_u64::<LittleEndian>().unwrap(),
}
});
}
fn get_symbol_name_for_addr(&self, addr: u64) -> Option<&str> {
self.obj.symbol_map().get(addr).map(|sym| sym.name())
}
}
fn main() {
let path = env::args().nth(1).unwrap();
let mut fd = File::open(path).expect("open");
let mut buffer = Vec::new();
fd.read_to_end(&mut buffer).expect("read");
let object = object::File::parse(&buffer).expect("parse");
let mut summary = ArtifactSummary::new(&buffer, &object);
summary.gather();
print_summary(summary);
}
/// Parse a trap manifest for function `f`, if it has one.
///
/// `parse_trap_manifest` may very understandably be confusing. Why not use `f.traps()`? In
/// `lucet-objdump` the module has been accessed by reading the file and following structures as
/// they exist at rest. This means pointers are not relocated, so slices that would be valid when
/// loaded through the platform's loader currently have pointers that are not valid for memory
/// access.
///
/// In particular, trap pointers are correct with respect to 0 being the start of the file (or,
/// buffer, after reading), which means we can (and must) rebuild a correct slice from the buffer.
fn parse_trap_manifest<'a>(
summary: &'a ArtifactSummary<'a>,
f: &FunctionSpec,
) -> Option<TrapManifest<'a>> {
if let Some(faulty_trap_manifest) = f.traps() {
let trap_ptr = faulty_trap_manifest.traps.as_ptr();
let traps_count = faulty_trap_manifest.traps.len();
let traps_byte_count = traps_count * std::mem::size_of::<TrapSite>();
if let Some(traps_byte_slice) =
summary.read_memory(trap_ptr as u64, traps_byte_count as u64)
{
let real_trap_ptr = traps_byte_slice.as_ptr() as *const TrapSite;
Some(TrapManifest {
traps: unsafe { std::slice::from_raw_parts(real_trap_ptr, traps_count) },
})
} else {
println!(
"Failed to read trap bytes for function {:?}, at {:p}",
f, trap_ptr
);
None
}
} else {
None
}
}
fn load_module<'b, 'a: 'b>(
summary: &'a ArtifactSummary<'a>,
serialized_module: &SerializedModule,
tables: &'b [&[TableElement]],
) -> Module<'b> {
let module_data_bytes = summary
.read_memory(
serialized_module.module_data_ptr,
serialized_module.module_data_len,
)
.unwrap();
let module_data =
ModuleData::deserialize(module_data_bytes).expect("ModuleData can be deserialized");
let function_manifest_bytes = summary
.read_memory(
serialized_module.function_manifest_ptr,
serialized_module.function_manifest_len,
)
.unwrap();
let function_manifest = unsafe {
std::slice::from_raw_parts(
function_manifest_bytes.as_ptr() as *const FunctionSpec,
serialized_module.function_manifest_len as usize,
)
};
Module {
version: serialized_module.version.clone(),
module_data,
tables,
function_manifest,
}
}
fn summarize_module<'a, 'b: 'a>(summary: &'a ArtifactSummary<'a>, module: &Module<'b>) {
let module_data = &module.module_data;
let tables = module.tables;
let function_manifest = module.function_manifest;
println!(" Heap Specification:");
if let Some(heap_spec) = module_data.heap_spec() {
println!(" {:9}: {} bytes", "Reserved", heap_spec.reserved_size);
println!(" {:9}: {} bytes", "Guard", heap_spec.guard_size);
println!(" {:9}: {} bytes", "Initial", heap_spec.initial_size);
if let Some(max_size) = heap_spec.max_size {
println!(" {:9}: {} bytes", "Maximum", max_size);
} else {
println!(" {:9}: None", "Maximum");
}
} else {
println!(" {}", "MISSING".red().bold());
}
println!();
println!(" Sparse Page Data:");
if let Some(sparse_page_data) = module_data.sparse_data() {
println!(" {:6}: {}", "Count", sparse_page_data.pages().len());
let mut allempty = true;
let mut anyempty = false;
for (i, page) in sparse_page_data.pages().iter().enumerate() {
match page {
Some(page) => {
allempty = false;
println!(
" Page[{}]: {:p}, size: {}",
i,
page.as_ptr(),
if page.len() != 4096 {
format!(
"{} (page size, expected 4096)",
format!("{}", page.len()).bold().red()
)
.red()
} else {
format!("{}", page.len()).green()
}
);
}
None => {
anyempty = true;
}
};
}
if allempty && !sparse_page_data.pages().is_empty() {
println!(" (all pages empty)");
} else if anyempty {
println!(" (empty pages omitted)");
}
} else {
println!(" {}", "MISSING!".red().bold());
}
println!();
println!("Tables:");
if tables.is_empty() {
println!(" No tables.");
} else {
for (i, table) in tables.iter().enumerate() {
println!(" Table {}: {:?}", i, table);
}
}
println!();
println!("Signatures:");
for (i, s) in module_data.signatures().iter().enumerate() {
println!(" Signature {}: {}", i, s);
}
println!();
println!("Functions:");
if function_manifest.len() != module_data.function_info().len() {
println!(
" {} function manifest and function info have diverging function counts",
"lucetc bug:".red().bold()
);
println!(
" function_manifest length : {}",
function_manifest.len()
);
println!(
" module data function count : {}",
module_data.function_info().len()
);
println!(" Will attempt to display information about functions anyway, but trap/code information may be misaligned with symbols and signatures.");
}
for (i, f) in function_manifest.iter().enumerate() {
let header_name = summary.get_symbol_name_for_addr(f.ptr().as_usize() as u64);
if i >= module_data.function_info().len() {
// This is one form of the above-mentioned bug case
// Half the function information is missing, so just report the issue and continue.
println!(
" Function {} {}",
i,
"is missing the module data part of its declaration".red()
);
match header_name {
Some(name) => {
println!(" ELF header name: {}", name);
}
None => {
println!(" No corresponding ELF symbol.");
}
};
break;
}
let colorize_name = |x: Option<&str>| match x {
Some(name) => name.green(),
None => "None".red().bold(),
};
let fn_meta = &module_data.function_info()[i];
println!(" Function {} (name: {}):", i, colorize_name(fn_meta.name));
if fn_meta.name != header_name {
println!(
" Name {} with name declared in ELF headers: {}",
"DISAGREES".red().bold(),
colorize_name(header_name)
);
}
println!(
" Signature (index {}): {}",
fn_meta.signature.as_u32() as usize,
module_data.signatures()[fn_meta.signature.as_u32() as usize]
);
println!(" Start: {:#010x}", f.ptr().as_usize());
println!(" Code length: {} bytes", f.code_len());
if let Some(trap_manifest) = parse_trap_manifest(&summary, f) {
let trap_count = trap_manifest.traps.len();
println!(" Trap information:");
if trap_count > 0 {
println!(
" {} {} ...",
trap_manifest.traps.len(),
if trap_count == 1 { "trap" } else { "traps" },
);
for trap in trap_manifest.traps {
println!(" $+{:#06x}: {:?}", trap.offset, trap.code);
}
} else {
println!(" No traps for this function");
}
}
}
println!();
println!("Globals:");
if !module_data.globals_spec().is_empty() {
for global_spec in module_data.globals_spec().iter() {
println!(" {:?}", global_spec.global());
for name in global_spec.export_names() {
println!(" Exported as: {}", name);
}
}
} else {
println!(" None");
}
println!();
println!("Exported Functions/Symbols:");
let mut exported_symbols = summary.exported_functions.clone();
for export in module_data.export_functions() {
match module_data.function_info()[export.fn_idx.as_u32() as usize].name {
Some(name) => {
println!(" Internal name: {}", name);
// The "internal name" is probably the first exported name for this function.
// Remove it from the exported_symbols list to not double-count
if let Some(idx) = exported_symbols.iter().position(|x| *x == name) {
exported_symbols.remove(idx);
}
}
None => {
println!(" No internal name");
}
}
// Export names do not have the guest_func_ prefix that symbol names get, and as such do
// not need to be removed from `exported_symbols` (which is built entirely from
// ELF-declared exports, with namespaced names)
println!(" Exported as: {}", export.names.join(", "));
}
if !exported_symbols.is_empty() {
println!();
println!(" Other exported symbols (from ELF headers):");
for export in exported_symbols {
println!(" {}", export);
}
}
println!();
println!("Imported Functions/Symbols:");
let mut imported_symbols = summary.imported_symbols.clone();
for import in module_data.import_functions() {
match module_data.function_info()[import.fn_idx.as_u32() as usize].name {
Some(name) => {
println!(" Internal name: {}", name);
}
None => {
println!(" No internal name");
}
}
println!(" Imported as: {}/{}", import.module, import.name);
// Remove from the imported_symbols list to not double-count imported functions
if let Some(idx) = imported_symbols.iter().position(|x| x == &import.name) {
imported_symbols.remove(idx);
}
}
if !imported_symbols.is_empty() {
println!();
println!(" Other imported symbols (from ELF headers):");
for import in &imported_symbols {
println!(" {}", import);
}
}
}
fn print_summary(summary: ArtifactSummary<'_>) {
println!("Required Symbols:");
println!(
" {:30}: {}",
"lucet_module",
exists_to_str(&summary.symbols.lucet_module)
);
if let Some(ref serialized_module) = summary.serialized_module {
println!("Native module components:");
println!(
" {:30}: {}",
"module_data_ptr",
ptr_to_str(serialized_module.module_data_ptr)
);
println!(
" {:30}: {}",
"module_data_len", serialized_module.module_data_len
);
println!(
" {:30}: {}",
"tables_ptr",
ptr_to_str(serialized_module.tables_ptr)
);
println!(" {:30}: {}", "tables_len", serialized_module.tables_len);
println!(
" {:30}: {}",
"function_manifest_ptr",
ptr_to_str(serialized_module.function_manifest_ptr)
);
println!(
" {:30}: {}",
"function_manifest_len", serialized_module.function_manifest_len
);
let tables_bytes = summary
.read_memory(
serialized_module.tables_ptr,
serialized_module.tables_len * mem::size_of::<&[TableElement]>() as u64,
)
.unwrap();
let tables = unsafe {
std::slice::from_raw_parts(
tables_bytes.as_ptr() as *const &[TableElement],
serialized_module.tables_len as usize,
)
};
let mut reconstructed_tables = Vec::new();
// same situation as trap tables - these slices are valid as if the module was
// dlopen'd, but we just read it as a flat file. So read through the ELF view and use
// pointers to that for the real slices.
for table in tables {
let table_bytes = summary
.read_memory(
table.as_ptr() as usize as u64,
(table.len() * mem::size_of::<TableElement>()) as u64,
)
.unwrap();
reconstructed_tables.push(unsafe {
std::slice::from_raw_parts(
table_bytes.as_ptr() as *const TableElement,
table.len() as usize,
)
});
}
let module = load_module(&summary, serialized_module, &reconstructed_tables);
println!("\nModule:");
summarize_module(&summary, &module);
} else {
println!("The symbol `lucet_module` is {}, so lucet-objdump cannot look at most of the interesting parts.", "MISSING".red().bold());
}
println!();
println!("Data Segments:");
if let Some(data_segments) = summary.data_segments {
println!(" {:6}: {}", "Count", data_segments.segments.len());
for segment in &data_segments.segments {
println!(
" {:7}: {:6} {:6}: {:6}",
"Offset", segment.offset, "Length", segment.len,
);
}
} else {
println!(" {}", "MISSING!".red().bold());
}
}
fn ptr_to_str(p: u64) -> colored::ColoredString {
if p != 0 {
format!("exists; address: {:#x}", p).green()
} else {
"MISSING!".red().bold()
}
}
fn exists_to_str<T>(p: &Option<T>) -> colored::ColoredString {
match p {
Some(_) => "exists".green(),
None => "MISSING!".red().bold(),
}
}
| StandardSymbols | identifier_name |
get.go | package routes
import (
"fmt"
"io"
"log"
"net/http"
"strconv"
"strings"
"time"
"github.com/boatilus/peppercorn/cookie"
"github.com/boatilus/peppercorn/db"
"github.com/boatilus/peppercorn/paths"
"github.com/boatilus/peppercorn/posts"
"github.com/boatilus/peppercorn/pwreset"
"github.com/boatilus/peppercorn/session"
"github.com/boatilus/peppercorn/templates"
"github.com/boatilus/peppercorn/users"
"github.com/boatilus/peppercorn/utility"
"github.com/pressly/chi"
"github.com/spf13/viper"
)
// IndexGetHandler is called for the `/` (index) route and directs the user either to the first
// page, or to the last page the user viewed.
func IndexGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
if len(u.LastViewed) == 0 {
// There's no value or we can't read from it, so we'll just sent the user to the first page.
http.Redirect(w, req, "/page/1", http.StatusSeeOther)
return
}
n, err := posts.GetOffset(u.LastViewed)
if err != nil {
// There's no value or we can't read from it, so we'll just sent the user to the first page.
http.Redirect(w, req, "/page/1", http.StatusSeeOther)
return
}
pn := utility.ComputePage(n, u.PPP)
uri := fmt.Sprintf("/page/%d#%s", pn, u.LastViewed)
http.Redirect(w, req, uri, http.StatusSeeOther)
}
func | (w http.ResponseWriter, req *http.Request) {
templates.SignIn.Execute(w, nil)
}
func SignOutGetHandler(w http.ResponseWriter, req *http.Request) {
// Destroy session
c, err := req.Cookie(session.GetKey())
if err != nil {
}
sid, err := cookie.Decode(c)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
log.Printf("Destroying session for SID \"%s\"", sid)
// Setting the Max-Age attribute to -1 effectively destroys the cookie, but we'll also null the
// content if the client decides to ignore Max-Age
c.MaxAge = -1
c.Value = ""
http.SetCookie(w, c)
if err = session.Destroy(sid); err != nil {
log.Printf("Error in deleting session with SID \"%s\"", sid)
}
http.Redirect(w, req, paths.Get.SignIn, http.StatusTemporaryRedirect)
}
// Of the format: /page/{num}
func PageGetHandler(w http.ResponseWriter, req *http.Request) {
var data struct {
CurrentUser *users.User
PostCount db.CountType
Posts []posts.Zip
PageNum db.CountType
TotalPages db.CountType
}
data.CurrentUser = users.FromContext(req.Context())
if data.CurrentUser == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
var err error
// TODO: We can run these following two queries in parallel.
data.PostCount, err = posts.Count()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
data.TotalPages = utility.ComputePage(data.PostCount, data.CurrentUser.PPP)
num := chi.URLParam(req, "num")
if num == "latest" {
data.PageNum = data.TotalPages
} else {
pageNum, err := strconv.ParseInt(num, 10, 32)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
data.PageNum = db.CountType(pageNum)
}
// To get the first post to load for this page, we must take into account the user's
// posts-per-page setting.
begin := ((data.PageNum * data.CurrentUser.PPP) - data.CurrentUser.PPP) + 1
ps, err := posts.GetRange(begin, data.CurrentUser.PPP)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Load the user's timezone setting so we can provide correct post timestamps.
loc, err := time.LoadLocation(data.CurrentUser.Timezone)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
now := time.Now()
for _, p := range ps {
u := users.Users[p.Author]
zip := posts.Zip{
ID: p.ID,
AuthorID: p.Author,
Content: p.Content,
Time: p.Time,
Avatar: u.Avatar,
AuthorName: u.Name,
Title: u.Title,
Count: begin,
PrettyTime: utility.FormatTime(p.Time.In(loc), now),
}
data.Posts = append(data.Posts, zip)
begin++
}
// Now that we've successfully gathered the data needed to render, we want to mark the most
// recent post the user's seen. For now, we'll do this even if it's far back in time, but ideally,
// we should only do so if it's newer than what the `LastViewed` property currently reflects.
numPosts := len(data.Posts)
last := data.Posts[numPosts-1]
if data.CurrentUser.LastViewed != last.ID {
data.CurrentUser.LastViewed = last.ID
if err := users.Update(data.CurrentUser); err != nil {
// This is a non-essential task, so simply log the error.
log.Printf("Could not update property LastViewed [%s] on user %q [%s]: %s", last.ID, data.CurrentUser.ID, data.CurrentUser.Name, err.Error())
}
}
templates.Index.Execute(w, data)
}
// SingleHandler is called for GET requests for the `/post/{num}` route and renders a single post
// by its computed post number.
func SingleGetHandler(w http.ResponseWriter, req *http.Request) {
num := chi.URLParam(req, "num")
n, err := strconv.ParseInt(num, 10, 32)
if err != nil {
msg := fmt.Sprintf("Bad request for route '/post/%v'. Expected '%v' to be a positive integer", num, num)
http.Error(w, msg, http.StatusBadRequest)
return
}
p, err := posts.GetOne(db.CountType(n))
if err != nil {
http.NotFound(w, req)
return
}
io.WriteString(w, p.Content)
}
// SingleRemoveGetHandler is called for GET requests for the `/post/{num}/delete` route and removes
// a single post, if the user is authorized to do so.
func SingleRemoveGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
// BUG: Due to some weirdness in Chi, the param here is "num", but we're actually getting supplied
// a post ID. We can't change the route param name due to this.
// See: https://github.com/pressly/chi/issues/78
id := chi.URLParam(req, "num")
if len(id) == 0 {
http.Error(w, "routes: ID cannot be empty", http.StatusBadRequest)
return
}
p, err := posts.GetByID(id)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if p.Author != u.ID {
msg := fmt.Sprintf("routes: user %q cannot delete post of user %q", u.ID, p.Author)
http.Error(w, msg, http.StatusUnauthorized)
return
}
if err := posts.Deactivate(id); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, req, "/page/latest", http.StatusSeeOther)
}
func CountGetHandler(w http.ResponseWriter, _ *http.Request) {
n, err := posts.Count()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
io.WriteString(w, strconv.Itoa(int(n)))
}
// MeGetHandler is the handler
func MeGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
ss, err := session.GetByUser(u.ID)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Reduce the session data retrieved into something more easily-consumable.
type sessionData struct {
Device string
IP string
Timestamp string
}
var sessions []sessionData
// Load the user's timezone setting so we can provide correct post timestamps.
loc, err := time.LoadLocation(u.Timezone)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
now := time.Now()
for i := range ss {
data := utility.ParseUserAgent(ss[i].UserAgent)
var ip string
// Running locally, an IP is displayed like "[::1]:57305". Ergo, if we're running locally,
// just pass the IP unchanged. Otherwise, split off the port from the IP address and only
// display that to the user.
if ss[i].IP[0] == '[' {
ip = ss[i].IP
} else {
ip = strings.Split(ss[i].IP, ":")[0]
}
s := sessionData{
Device: fmt.Sprintf("%s on %s", data.Browser, data.OS),
IP: ip,
Timestamp: utility.FormatTime(ss[i].Timestamp.In(loc), now),
}
sessions = append(sessions, s)
}
obEmail := utility.ObfuscateEmail(u.Email) // we'll obfuscate the email address for privacy
pppOptions := viper.GetStringSlice("ppp_options")
// To display a list of radio buttons for users to select the expiry time for 2FA sessions.
durationOpts := viper.Get("two_factor_auth.duration_options").([]interface{})
durations := make([]int64, len(durationOpts))
// For durations, we want to display them as the number of days, so we'll create Duration objects
// as cast them into int64s.
for i := range durationOpts {
d := time.Duration(durationOpts[i].(float64)) * time.Second
durations[i] = int64(d.Hours())
}
currentDuration := time.Duration(u.AuthDuration) * time.Second
o := struct {
Flash string
ObfuscatedEmail string
Name string
Title string
Avatar string
PPPOptions []string
PPP string
Has2FAEnabled bool
DurationOpts []int64
CurrentDuration int64
Timezones []string
UserTimezone string
Sessions []sessionData
}{
Flash: session.GetFlash(u.ID),
ObfuscatedEmail: obEmail,
Name: u.Name,
Title: u.Title,
Avatar: u.Avatar,
PPPOptions: pppOptions,
PPP: strconv.FormatInt(int64(u.PPP), 10),
Has2FAEnabled: u.Has2FAEnabled,
DurationOpts: durations,
CurrentDuration: int64(currentDuration.Hours()),
Timezones: viper.GetStringSlice("timezones"),
UserTimezone: u.Timezone,
Sessions: sessions,
}
templates.Me.Execute(w, &o)
}
// MeRevokeGetHandler is the handler called from the /me route to destroy a single session by :num,
// or all sessions with "all"
func MeRevokeGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
i, err := strconv.ParseInt(chi.URLParam(req, "num"), 10, 32)
if err != nil || i < 0 {
http.Error(w, "", http.StatusBadRequest)
return
}
if err := session.DestroyByIndex(u.ID, db.CountType(i)); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, req, paths.Get.Me, http.StatusSeeOther)
}
// ForgotGetHandler is the route called to send the user a password reset email.
func ForgotGetHandler(w http.ResponseWriter, req *http.Request) {
templates.Forgot.Execute(w, nil)
}
// ResetPasswordGetHandler is the route called to reset a user's password.
func ResetPasswordGetHandler(w http.ResponseWriter, req *http.Request) {
type data struct {
FlashMessage string
Token string
}
token := req.FormValue("token")
if token == "" {
templates.ResetPassword.Execute(w, data{FlashMessage: "Invalid reset token."})
return
}
valid, _ := pwreset.ValidateToken(token)
if !valid {
templates.ResetPassword.Execute(w, data{FlashMessage: "Reset is expired or doesn't exist."})
return
}
templates.ResetPassword.Execute(w, data{Token: token})
}
| SignInGetHandler | identifier_name |
get.go | package routes
import (
"fmt"
"io"
"log"
"net/http"
"strconv"
"strings"
"time"
"github.com/boatilus/peppercorn/cookie"
"github.com/boatilus/peppercorn/db"
"github.com/boatilus/peppercorn/paths"
"github.com/boatilus/peppercorn/posts"
"github.com/boatilus/peppercorn/pwreset"
"github.com/boatilus/peppercorn/session"
"github.com/boatilus/peppercorn/templates"
"github.com/boatilus/peppercorn/users"
"github.com/boatilus/peppercorn/utility"
"github.com/pressly/chi"
"github.com/spf13/viper"
)
// IndexGetHandler is called for the `/` (index) route and directs the user either to the first
// page, or to the last page the user viewed.
func IndexGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
if len(u.LastViewed) == 0 {
// There's no value or we can't read from it, so we'll just sent the user to the first page.
http.Redirect(w, req, "/page/1", http.StatusSeeOther)
return
}
n, err := posts.GetOffset(u.LastViewed)
if err != nil {
// There's no value or we can't read from it, so we'll just sent the user to the first page.
http.Redirect(w, req, "/page/1", http.StatusSeeOther)
return
}
pn := utility.ComputePage(n, u.PPP)
uri := fmt.Sprintf("/page/%d#%s", pn, u.LastViewed)
http.Redirect(w, req, uri, http.StatusSeeOther)
}
func SignInGetHandler(w http.ResponseWriter, req *http.Request) {
templates.SignIn.Execute(w, nil)
}
func SignOutGetHandler(w http.ResponseWriter, req *http.Request) {
// Destroy session
c, err := req.Cookie(session.GetKey())
if err != nil {
}
sid, err := cookie.Decode(c)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
log.Printf("Destroying session for SID \"%s\"", sid)
// Setting the Max-Age attribute to -1 effectively destroys the cookie, but we'll also null the
// content if the client decides to ignore Max-Age
c.MaxAge = -1
c.Value = ""
http.SetCookie(w, c)
if err = session.Destroy(sid); err != nil {
log.Printf("Error in deleting session with SID \"%s\"", sid)
}
http.Redirect(w, req, paths.Get.SignIn, http.StatusTemporaryRedirect)
}
// Of the format: /page/{num}
func PageGetHandler(w http.ResponseWriter, req *http.Request) {
var data struct {
CurrentUser *users.User
PostCount db.CountType
Posts []posts.Zip
PageNum db.CountType
TotalPages db.CountType
}
data.CurrentUser = users.FromContext(req.Context())
if data.CurrentUser == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
var err error
// TODO: We can run these following two queries in parallel.
data.PostCount, err = posts.Count()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
data.TotalPages = utility.ComputePage(data.PostCount, data.CurrentUser.PPP)
num := chi.URLParam(req, "num")
if num == "latest" {
data.PageNum = data.TotalPages
} else {
pageNum, err := strconv.ParseInt(num, 10, 32)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
data.PageNum = db.CountType(pageNum)
}
// To get the first post to load for this page, we must take into account the user's
// posts-per-page setting.
begin := ((data.PageNum * data.CurrentUser.PPP) - data.CurrentUser.PPP) + 1
ps, err := posts.GetRange(begin, data.CurrentUser.PPP)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Load the user's timezone setting so we can provide correct post timestamps.
loc, err := time.LoadLocation(data.CurrentUser.Timezone)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
now := time.Now()
for _, p := range ps {
u := users.Users[p.Author]
zip := posts.Zip{
ID: p.ID,
AuthorID: p.Author,
Content: p.Content,
Time: p.Time,
Avatar: u.Avatar,
AuthorName: u.Name,
Title: u.Title,
Count: begin,
PrettyTime: utility.FormatTime(p.Time.In(loc), now),
}
data.Posts = append(data.Posts, zip)
begin++
}
// Now that we've successfully gathered the data needed to render, we want to mark the most
// recent post the user's seen. For now, we'll do this even if it's far back in time, but ideally,
// we should only do so if it's newer than what the `LastViewed` property currently reflects.
numPosts := len(data.Posts)
last := data.Posts[numPosts-1]
if data.CurrentUser.LastViewed != last.ID {
data.CurrentUser.LastViewed = last.ID
if err := users.Update(data.CurrentUser); err != nil {
// This is a non-essential task, so simply log the error.
log.Printf("Could not update property LastViewed [%s] on user %q [%s]: %s", last.ID, data.CurrentUser.ID, data.CurrentUser.Name, err.Error())
}
}
templates.Index.Execute(w, data)
}
// SingleHandler is called for GET requests for the `/post/{num}` route and renders a single post
// by its computed post number.
func SingleGetHandler(w http.ResponseWriter, req *http.Request) {
num := chi.URLParam(req, "num")
n, err := strconv.ParseInt(num, 10, 32)
if err != nil {
msg := fmt.Sprintf("Bad request for route '/post/%v'. Expected '%v' to be a positive integer", num, num)
http.Error(w, msg, http.StatusBadRequest)
return
}
p, err := posts.GetOne(db.CountType(n))
if err != nil {
http.NotFound(w, req)
return
}
io.WriteString(w, p.Content)
}
// SingleRemoveGetHandler is called for GET requests for the `/post/{num}/delete` route and removes
// a single post, if the user is authorized to do so.
func SingleRemoveGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
// BUG: Due to some weirdness in Chi, the param here is "num", but we're actually getting supplied
// a post ID. We can't change the route param name due to this.
// See: https://github.com/pressly/chi/issues/78
id := chi.URLParam(req, "num")
if len(id) == 0 {
http.Error(w, "routes: ID cannot be empty", http.StatusBadRequest)
return
}
p, err := posts.GetByID(id)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if p.Author != u.ID {
msg := fmt.Sprintf("routes: user %q cannot delete post of user %q", u.ID, p.Author)
http.Error(w, msg, http.StatusUnauthorized)
return
}
if err := posts.Deactivate(id); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, req, "/page/latest", http.StatusSeeOther)
}
func CountGetHandler(w http.ResponseWriter, _ *http.Request) {
n, err := posts.Count()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
io.WriteString(w, strconv.Itoa(int(n)))
}
// MeGetHandler is the handler
func MeGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
ss, err := session.GetByUser(u.ID)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Reduce the session data retrieved into something more easily-consumable.
type sessionData struct {
Device string
IP string
Timestamp string
}
var sessions []sessionData
// Load the user's timezone setting so we can provide correct post timestamps.
loc, err := time.LoadLocation(u.Timezone)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
now := time.Now()
for i := range ss |
obEmail := utility.ObfuscateEmail(u.Email) // we'll obfuscate the email address for privacy
pppOptions := viper.GetStringSlice("ppp_options")
// To display a list of radio buttons for users to select the expiry time for 2FA sessions.
durationOpts := viper.Get("two_factor_auth.duration_options").([]interface{})
durations := make([]int64, len(durationOpts))
// For durations, we want to display them as the number of days, so we'll create Duration objects
// as cast them into int64s.
for i := range durationOpts {
d := time.Duration(durationOpts[i].(float64)) * time.Second
durations[i] = int64(d.Hours())
}
currentDuration := time.Duration(u.AuthDuration) * time.Second
o := struct {
Flash string
ObfuscatedEmail string
Name string
Title string
Avatar string
PPPOptions []string
PPP string
Has2FAEnabled bool
DurationOpts []int64
CurrentDuration int64
Timezones []string
UserTimezone string
Sessions []sessionData
}{
Flash: session.GetFlash(u.ID),
ObfuscatedEmail: obEmail,
Name: u.Name,
Title: u.Title,
Avatar: u.Avatar,
PPPOptions: pppOptions,
PPP: strconv.FormatInt(int64(u.PPP), 10),
Has2FAEnabled: u.Has2FAEnabled,
DurationOpts: durations,
CurrentDuration: int64(currentDuration.Hours()),
Timezones: viper.GetStringSlice("timezones"),
UserTimezone: u.Timezone,
Sessions: sessions,
}
templates.Me.Execute(w, &o)
}
// MeRevokeGetHandler is the handler called from the /me route to destroy a single session by :num,
// or all sessions with "all"
func MeRevokeGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
i, err := strconv.ParseInt(chi.URLParam(req, "num"), 10, 32)
if err != nil || i < 0 {
http.Error(w, "", http.StatusBadRequest)
return
}
if err := session.DestroyByIndex(u.ID, db.CountType(i)); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, req, paths.Get.Me, http.StatusSeeOther)
}
// ForgotGetHandler is the route called to send the user a password reset email.
func ForgotGetHandler(w http.ResponseWriter, req *http.Request) {
templates.Forgot.Execute(w, nil)
}
// ResetPasswordGetHandler is the route called to reset a user's password.
func ResetPasswordGetHandler(w http.ResponseWriter, req *http.Request) {
type data struct {
FlashMessage string
Token string
}
token := req.FormValue("token")
if token == "" {
templates.ResetPassword.Execute(w, data{FlashMessage: "Invalid reset token."})
return
}
valid, _ := pwreset.ValidateToken(token)
if !valid {
templates.ResetPassword.Execute(w, data{FlashMessage: "Reset is expired or doesn't exist."})
return
}
templates.ResetPassword.Execute(w, data{Token: token})
}
| {
data := utility.ParseUserAgent(ss[i].UserAgent)
var ip string
// Running locally, an IP is displayed like "[::1]:57305". Ergo, if we're running locally,
// just pass the IP unchanged. Otherwise, split off the port from the IP address and only
// display that to the user.
if ss[i].IP[0] == '[' {
ip = ss[i].IP
} else {
ip = strings.Split(ss[i].IP, ":")[0]
}
s := sessionData{
Device: fmt.Sprintf("%s on %s", data.Browser, data.OS),
IP: ip,
Timestamp: utility.FormatTime(ss[i].Timestamp.In(loc), now),
}
sessions = append(sessions, s)
} | conditional_block |
get.go | package routes
import (
"fmt"
"io"
"log"
"net/http"
"strconv"
"strings"
"time"
"github.com/boatilus/peppercorn/cookie"
"github.com/boatilus/peppercorn/db"
"github.com/boatilus/peppercorn/paths"
"github.com/boatilus/peppercorn/posts"
"github.com/boatilus/peppercorn/pwreset"
"github.com/boatilus/peppercorn/session"
"github.com/boatilus/peppercorn/templates"
"github.com/boatilus/peppercorn/users"
"github.com/boatilus/peppercorn/utility"
"github.com/pressly/chi"
"github.com/spf13/viper"
)
// IndexGetHandler is called for the `/` (index) route and directs the user either to the first
// page, or to the last page the user viewed.
func IndexGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
if len(u.LastViewed) == 0 {
// There's no value or we can't read from it, so we'll just sent the user to the first page.
http.Redirect(w, req, "/page/1", http.StatusSeeOther)
return
}
n, err := posts.GetOffset(u.LastViewed)
if err != nil {
// There's no value or we can't read from it, so we'll just sent the user to the first page.
http.Redirect(w, req, "/page/1", http.StatusSeeOther)
return
}
pn := utility.ComputePage(n, u.PPP)
uri := fmt.Sprintf("/page/%d#%s", pn, u.LastViewed)
http.Redirect(w, req, uri, http.StatusSeeOther)
}
func SignInGetHandler(w http.ResponseWriter, req *http.Request) {
templates.SignIn.Execute(w, nil)
}
func SignOutGetHandler(w http.ResponseWriter, req *http.Request) {
// Destroy session
c, err := req.Cookie(session.GetKey())
if err != nil {
}
sid, err := cookie.Decode(c)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
log.Printf("Destroying session for SID \"%s\"", sid)
// Setting the Max-Age attribute to -1 effectively destroys the cookie, but we'll also null the
// content if the client decides to ignore Max-Age
c.MaxAge = -1
c.Value = ""
http.SetCookie(w, c)
if err = session.Destroy(sid); err != nil {
log.Printf("Error in deleting session with SID \"%s\"", sid)
}
http.Redirect(w, req, paths.Get.SignIn, http.StatusTemporaryRedirect)
}
// Of the format: /page/{num}
func PageGetHandler(w http.ResponseWriter, req *http.Request) {
var data struct {
CurrentUser *users.User
PostCount db.CountType
Posts []posts.Zip
PageNum db.CountType
TotalPages db.CountType
}
data.CurrentUser = users.FromContext(req.Context())
if data.CurrentUser == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
var err error
// TODO: We can run these following two queries in parallel.
data.PostCount, err = posts.Count()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
data.TotalPages = utility.ComputePage(data.PostCount, data.CurrentUser.PPP)
num := chi.URLParam(req, "num")
if num == "latest" {
data.PageNum = data.TotalPages
} else {
pageNum, err := strconv.ParseInt(num, 10, 32)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
data.PageNum = db.CountType(pageNum)
}
// To get the first post to load for this page, we must take into account the user's
// posts-per-page setting.
begin := ((data.PageNum * data.CurrentUser.PPP) - data.CurrentUser.PPP) + 1
ps, err := posts.GetRange(begin, data.CurrentUser.PPP)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Load the user's timezone setting so we can provide correct post timestamps.
loc, err := time.LoadLocation(data.CurrentUser.Timezone)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
now := time.Now()
for _, p := range ps {
u := users.Users[p.Author]
zip := posts.Zip{
ID: p.ID,
AuthorID: p.Author,
Content: p.Content,
Time: p.Time,
Avatar: u.Avatar,
AuthorName: u.Name,
Title: u.Title,
Count: begin,
PrettyTime: utility.FormatTime(p.Time.In(loc), now),
}
data.Posts = append(data.Posts, zip)
begin++
}
// Now that we've successfully gathered the data needed to render, we want to mark the most
// recent post the user's seen. For now, we'll do this even if it's far back in time, but ideally,
// we should only do so if it's newer than what the `LastViewed` property currently reflects.
numPosts := len(data.Posts)
last := data.Posts[numPosts-1]
if data.CurrentUser.LastViewed != last.ID {
data.CurrentUser.LastViewed = last.ID
if err := users.Update(data.CurrentUser); err != nil {
// This is a non-essential task, so simply log the error.
log.Printf("Could not update property LastViewed [%s] on user %q [%s]: %s", last.ID, data.CurrentUser.ID, data.CurrentUser.Name, err.Error())
}
}
templates.Index.Execute(w, data)
}
// SingleHandler is called for GET requests for the `/post/{num}` route and renders a single post
// by its computed post number.
func SingleGetHandler(w http.ResponseWriter, req *http.Request) {
num := chi.URLParam(req, "num")
n, err := strconv.ParseInt(num, 10, 32)
if err != nil {
msg := fmt.Sprintf("Bad request for route '/post/%v'. Expected '%v' to be a positive integer", num, num)
http.Error(w, msg, http.StatusBadRequest)
return
}
p, err := posts.GetOne(db.CountType(n))
if err != nil {
http.NotFound(w, req)
return
}
io.WriteString(w, p.Content)
}
// SingleRemoveGetHandler is called for GET requests for the `/post/{num}/delete` route and removes
// a single post, if the user is authorized to do so.
func SingleRemoveGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
} | id := chi.URLParam(req, "num")
if len(id) == 0 {
http.Error(w, "routes: ID cannot be empty", http.StatusBadRequest)
return
}
p, err := posts.GetByID(id)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if p.Author != u.ID {
msg := fmt.Sprintf("routes: user %q cannot delete post of user %q", u.ID, p.Author)
http.Error(w, msg, http.StatusUnauthorized)
return
}
if err := posts.Deactivate(id); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, req, "/page/latest", http.StatusSeeOther)
}
func CountGetHandler(w http.ResponseWriter, _ *http.Request) {
n, err := posts.Count()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
io.WriteString(w, strconv.Itoa(int(n)))
}
// MeGetHandler is the handler
func MeGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
ss, err := session.GetByUser(u.ID)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Reduce the session data retrieved into something more easily-consumable.
type sessionData struct {
Device string
IP string
Timestamp string
}
var sessions []sessionData
// Load the user's timezone setting so we can provide correct post timestamps.
loc, err := time.LoadLocation(u.Timezone)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
now := time.Now()
for i := range ss {
data := utility.ParseUserAgent(ss[i].UserAgent)
var ip string
// Running locally, an IP is displayed like "[::1]:57305". Ergo, if we're running locally,
// just pass the IP unchanged. Otherwise, split off the port from the IP address and only
// display that to the user.
if ss[i].IP[0] == '[' {
ip = ss[i].IP
} else {
ip = strings.Split(ss[i].IP, ":")[0]
}
s := sessionData{
Device: fmt.Sprintf("%s on %s", data.Browser, data.OS),
IP: ip,
Timestamp: utility.FormatTime(ss[i].Timestamp.In(loc), now),
}
sessions = append(sessions, s)
}
obEmail := utility.ObfuscateEmail(u.Email) // we'll obfuscate the email address for privacy
pppOptions := viper.GetStringSlice("ppp_options")
// To display a list of radio buttons for users to select the expiry time for 2FA sessions.
durationOpts := viper.Get("two_factor_auth.duration_options").([]interface{})
durations := make([]int64, len(durationOpts))
// For durations, we want to display them as the number of days, so we'll create Duration objects
// as cast them into int64s.
for i := range durationOpts {
d := time.Duration(durationOpts[i].(float64)) * time.Second
durations[i] = int64(d.Hours())
}
currentDuration := time.Duration(u.AuthDuration) * time.Second
o := struct {
Flash string
ObfuscatedEmail string
Name string
Title string
Avatar string
PPPOptions []string
PPP string
Has2FAEnabled bool
DurationOpts []int64
CurrentDuration int64
Timezones []string
UserTimezone string
Sessions []sessionData
}{
Flash: session.GetFlash(u.ID),
ObfuscatedEmail: obEmail,
Name: u.Name,
Title: u.Title,
Avatar: u.Avatar,
PPPOptions: pppOptions,
PPP: strconv.FormatInt(int64(u.PPP), 10),
Has2FAEnabled: u.Has2FAEnabled,
DurationOpts: durations,
CurrentDuration: int64(currentDuration.Hours()),
Timezones: viper.GetStringSlice("timezones"),
UserTimezone: u.Timezone,
Sessions: sessions,
}
templates.Me.Execute(w, &o)
}
// MeRevokeGetHandler is the handler called from the /me route to destroy a single session by :num,
// or all sessions with "all"
func MeRevokeGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
i, err := strconv.ParseInt(chi.URLParam(req, "num"), 10, 32)
if err != nil || i < 0 {
http.Error(w, "", http.StatusBadRequest)
return
}
if err := session.DestroyByIndex(u.ID, db.CountType(i)); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, req, paths.Get.Me, http.StatusSeeOther)
}
// ForgotGetHandler is the route called to send the user a password reset email.
func ForgotGetHandler(w http.ResponseWriter, req *http.Request) {
templates.Forgot.Execute(w, nil)
}
// ResetPasswordGetHandler is the route called to reset a user's password.
func ResetPasswordGetHandler(w http.ResponseWriter, req *http.Request) {
type data struct {
FlashMessage string
Token string
}
token := req.FormValue("token")
if token == "" {
templates.ResetPassword.Execute(w, data{FlashMessage: "Invalid reset token."})
return
}
valid, _ := pwreset.ValidateToken(token)
if !valid {
templates.ResetPassword.Execute(w, data{FlashMessage: "Reset is expired or doesn't exist."})
return
}
templates.ResetPassword.Execute(w, data{Token: token})
} |
// BUG: Due to some weirdness in Chi, the param here is "num", but we're actually getting supplied
// a post ID. We can't change the route param name due to this.
// See: https://github.com/pressly/chi/issues/78 | random_line_split |
get.go | package routes
import (
"fmt"
"io"
"log"
"net/http"
"strconv"
"strings"
"time"
"github.com/boatilus/peppercorn/cookie"
"github.com/boatilus/peppercorn/db"
"github.com/boatilus/peppercorn/paths"
"github.com/boatilus/peppercorn/posts"
"github.com/boatilus/peppercorn/pwreset"
"github.com/boatilus/peppercorn/session"
"github.com/boatilus/peppercorn/templates"
"github.com/boatilus/peppercorn/users"
"github.com/boatilus/peppercorn/utility"
"github.com/pressly/chi"
"github.com/spf13/viper"
)
// IndexGetHandler is called for the `/` (index) route and directs the user either to the first
// page, or to the last page the user viewed.
func IndexGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
if len(u.LastViewed) == 0 {
// There's no value or we can't read from it, so we'll just sent the user to the first page.
http.Redirect(w, req, "/page/1", http.StatusSeeOther)
return
}
n, err := posts.GetOffset(u.LastViewed)
if err != nil {
// There's no value or we can't read from it, so we'll just sent the user to the first page.
http.Redirect(w, req, "/page/1", http.StatusSeeOther)
return
}
pn := utility.ComputePage(n, u.PPP)
uri := fmt.Sprintf("/page/%d#%s", pn, u.LastViewed)
http.Redirect(w, req, uri, http.StatusSeeOther)
}
func SignInGetHandler(w http.ResponseWriter, req *http.Request) {
templates.SignIn.Execute(w, nil)
}
func SignOutGetHandler(w http.ResponseWriter, req *http.Request) {
// Destroy session
c, err := req.Cookie(session.GetKey())
if err != nil {
}
sid, err := cookie.Decode(c)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
log.Printf("Destroying session for SID \"%s\"", sid)
// Setting the Max-Age attribute to -1 effectively destroys the cookie, but we'll also null the
// content if the client decides to ignore Max-Age
c.MaxAge = -1
c.Value = ""
http.SetCookie(w, c)
if err = session.Destroy(sid); err != nil {
log.Printf("Error in deleting session with SID \"%s\"", sid)
}
http.Redirect(w, req, paths.Get.SignIn, http.StatusTemporaryRedirect)
}
// Of the format: /page/{num}
func PageGetHandler(w http.ResponseWriter, req *http.Request) {
var data struct {
CurrentUser *users.User
PostCount db.CountType
Posts []posts.Zip
PageNum db.CountType
TotalPages db.CountType
}
data.CurrentUser = users.FromContext(req.Context())
if data.CurrentUser == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
var err error
// TODO: We can run these following two queries in parallel.
data.PostCount, err = posts.Count()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
data.TotalPages = utility.ComputePage(data.PostCount, data.CurrentUser.PPP)
num := chi.URLParam(req, "num")
if num == "latest" {
data.PageNum = data.TotalPages
} else {
pageNum, err := strconv.ParseInt(num, 10, 32)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
data.PageNum = db.CountType(pageNum)
}
// To get the first post to load for this page, we must take into account the user's
// posts-per-page setting.
begin := ((data.PageNum * data.CurrentUser.PPP) - data.CurrentUser.PPP) + 1
ps, err := posts.GetRange(begin, data.CurrentUser.PPP)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Load the user's timezone setting so we can provide correct post timestamps.
loc, err := time.LoadLocation(data.CurrentUser.Timezone)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
now := time.Now()
for _, p := range ps {
u := users.Users[p.Author]
zip := posts.Zip{
ID: p.ID,
AuthorID: p.Author,
Content: p.Content,
Time: p.Time,
Avatar: u.Avatar,
AuthorName: u.Name,
Title: u.Title,
Count: begin,
PrettyTime: utility.FormatTime(p.Time.In(loc), now),
}
data.Posts = append(data.Posts, zip)
begin++
}
// Now that we've successfully gathered the data needed to render, we want to mark the most
// recent post the user's seen. For now, we'll do this even if it's far back in time, but ideally,
// we should only do so if it's newer than what the `LastViewed` property currently reflects.
numPosts := len(data.Posts)
last := data.Posts[numPosts-1]
if data.CurrentUser.LastViewed != last.ID {
data.CurrentUser.LastViewed = last.ID
if err := users.Update(data.CurrentUser); err != nil {
// This is a non-essential task, so simply log the error.
log.Printf("Could not update property LastViewed [%s] on user %q [%s]: %s", last.ID, data.CurrentUser.ID, data.CurrentUser.Name, err.Error())
}
}
templates.Index.Execute(w, data)
}
// SingleHandler is called for GET requests for the `/post/{num}` route and renders a single post
// by its computed post number.
func SingleGetHandler(w http.ResponseWriter, req *http.Request) {
num := chi.URLParam(req, "num")
n, err := strconv.ParseInt(num, 10, 32)
if err != nil {
msg := fmt.Sprintf("Bad request for route '/post/%v'. Expected '%v' to be a positive integer", num, num)
http.Error(w, msg, http.StatusBadRequest)
return
}
p, err := posts.GetOne(db.CountType(n))
if err != nil {
http.NotFound(w, req)
return
}
io.WriteString(w, p.Content)
}
// SingleRemoveGetHandler is called for GET requests for the `/post/{num}/delete` route and removes
// a single post, if the user is authorized to do so.
func SingleRemoveGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
// BUG: Due to some weirdness in Chi, the param here is "num", but we're actually getting supplied
// a post ID. We can't change the route param name due to this.
// See: https://github.com/pressly/chi/issues/78
id := chi.URLParam(req, "num")
if len(id) == 0 {
http.Error(w, "routes: ID cannot be empty", http.StatusBadRequest)
return
}
p, err := posts.GetByID(id)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if p.Author != u.ID {
msg := fmt.Sprintf("routes: user %q cannot delete post of user %q", u.ID, p.Author)
http.Error(w, msg, http.StatusUnauthorized)
return
}
if err := posts.Deactivate(id); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, req, "/page/latest", http.StatusSeeOther)
}
func CountGetHandler(w http.ResponseWriter, _ *http.Request) {
n, err := posts.Count()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
io.WriteString(w, strconv.Itoa(int(n)))
}
// MeGetHandler is the handler
func MeGetHandler(w http.ResponseWriter, req *http.Request) |
// MeRevokeGetHandler is the handler called from the /me route to destroy a single session by :num,
// or all sessions with "all"
func MeRevokeGetHandler(w http.ResponseWriter, req *http.Request) {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
i, err := strconv.ParseInt(chi.URLParam(req, "num"), 10, 32)
if err != nil || i < 0 {
http.Error(w, "", http.StatusBadRequest)
return
}
if err := session.DestroyByIndex(u.ID, db.CountType(i)); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
http.Redirect(w, req, paths.Get.Me, http.StatusSeeOther)
}
// ForgotGetHandler is the route called to send the user a password reset email.
func ForgotGetHandler(w http.ResponseWriter, req *http.Request) {
templates.Forgot.Execute(w, nil)
}
// ResetPasswordGetHandler is the route called to reset a user's password.
func ResetPasswordGetHandler(w http.ResponseWriter, req *http.Request) {
type data struct {
FlashMessage string
Token string
}
token := req.FormValue("token")
if token == "" {
templates.ResetPassword.Execute(w, data{FlashMessage: "Invalid reset token."})
return
}
valid, _ := pwreset.ValidateToken(token)
if !valid {
templates.ResetPassword.Execute(w, data{FlashMessage: "Reset is expired or doesn't exist."})
return
}
templates.ResetPassword.Execute(w, data{Token: token})
}
| {
u := users.FromContext(req.Context())
if u == nil {
http.Error(w, "Could not read user data from request context", http.StatusInternalServerError)
return
}
ss, err := session.GetByUser(u.ID)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Reduce the session data retrieved into something more easily-consumable.
type sessionData struct {
Device string
IP string
Timestamp string
}
var sessions []sessionData
// Load the user's timezone setting so we can provide correct post timestamps.
loc, err := time.LoadLocation(u.Timezone)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
now := time.Now()
for i := range ss {
data := utility.ParseUserAgent(ss[i].UserAgent)
var ip string
// Running locally, an IP is displayed like "[::1]:57305". Ergo, if we're running locally,
// just pass the IP unchanged. Otherwise, split off the port from the IP address and only
// display that to the user.
if ss[i].IP[0] == '[' {
ip = ss[i].IP
} else {
ip = strings.Split(ss[i].IP, ":")[0]
}
s := sessionData{
Device: fmt.Sprintf("%s on %s", data.Browser, data.OS),
IP: ip,
Timestamp: utility.FormatTime(ss[i].Timestamp.In(loc), now),
}
sessions = append(sessions, s)
}
obEmail := utility.ObfuscateEmail(u.Email) // we'll obfuscate the email address for privacy
pppOptions := viper.GetStringSlice("ppp_options")
// To display a list of radio buttons for users to select the expiry time for 2FA sessions.
durationOpts := viper.Get("two_factor_auth.duration_options").([]interface{})
durations := make([]int64, len(durationOpts))
// For durations, we want to display them as the number of days, so we'll create Duration objects
// as cast them into int64s.
for i := range durationOpts {
d := time.Duration(durationOpts[i].(float64)) * time.Second
durations[i] = int64(d.Hours())
}
currentDuration := time.Duration(u.AuthDuration) * time.Second
o := struct {
Flash string
ObfuscatedEmail string
Name string
Title string
Avatar string
PPPOptions []string
PPP string
Has2FAEnabled bool
DurationOpts []int64
CurrentDuration int64
Timezones []string
UserTimezone string
Sessions []sessionData
}{
Flash: session.GetFlash(u.ID),
ObfuscatedEmail: obEmail,
Name: u.Name,
Title: u.Title,
Avatar: u.Avatar,
PPPOptions: pppOptions,
PPP: strconv.FormatInt(int64(u.PPP), 10),
Has2FAEnabled: u.Has2FAEnabled,
DurationOpts: durations,
CurrentDuration: int64(currentDuration.Hours()),
Timezones: viper.GetStringSlice("timezones"),
UserTimezone: u.Timezone,
Sessions: sessions,
}
templates.Me.Execute(w, &o)
} | identifier_body |
vm.rs | use rand::random;
/// Core CPU implementation.
use std::error::Error;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use crate::display::{Display, FONT_SET};
use crate::keypad::{Keypad, Keystate};
/// The default CPU clock, in Hz.
pub const CPU_CLOCK: u32 = 600;
/// The timers clock, in Hz.
pub const TIMERS_CLOCK: u32 = 60;
/// The index of the register used for the 'carry flag'.
/// VF is used according to the CHIP 8 specifications.
pub const FLAG: usize = 15;
/// The size of the stack.
const STACK_SIZE: usize = 16;
/// CHIP 8 virtual machine.
/// The references used to implement this particular interpreter include :
/// http://en.wikipedia.org/wiki/CHIP-8
/// http://mattmik.com/chip8.html
/// http://devernay.free.fr/hacks/chip8/C8TECH10.HTM
pub struct Chip8 {
/// The current opcode.
opcode: u16,
/// The chip's 4096 bytes of memory.
pub memory: [u8; 4096], // TEMPORARY pub for debug purposes
/// The chip's 16 registers, from V0 to VF.
/// VF is used for the 'carry flag'.
v: [u8; 16],
/// Index register.
i: usize,
/// Program counter.
pc: usize,
/// The stack, used for subroutine operations.
/// By default has 16 levels of nesting.
pub stack: [u16; STACK_SIZE],
/// Stack pointer.
pub sp: usize,
// Timer registers, must be updated at 60 Hz by the emulator.
pub delay_timer: u8,
pub sound_timer: u8,
/// Screen component.
pub display: Display,
/// Input component.
pub keypad: Keypad,
/// Is the virtual machine waiting for a keypress ?
/// If so, when any key is pressed store its index in VX where X is
/// the value stored in this tuple.
pub wait_for_key: (bool, u8),
/// Implementation option.
/// Should the shifting opcodes 8XY6 and 8XYE use the original implementation,
/// i.e. set VX to VY shifted respectively right and left by one bit ?
/// If false, the VM will instead consider as many ROMs seem to do that Y=X.
/// See http://mattmik.com/chip8.html for more detail.
shift_op_use_vy: bool,
}
/// Macro for handling invalid/unimplemented opcodes.
/// As of now only prints a error message, could maybe panic in the future.
macro_rules! op_not_implemented {
($op: expr, $pc: expr) => {
println!(
"Not implemented opcode {:0>4X} at {:0>5X}",
$op as usize, $pc
);
};
}
impl Chip8 {
/// Create and return a new, initialized Chip8 virtual machine.
pub fn new() -> Chip8 {
let mut chip8 = Chip8 {
opcode: 0u16,
memory: [0u8; 4096],
v: [0u8; 16],
i: 0usize,
pc: 0usize,
stack: [0u16; STACK_SIZE],
sp: 0usize,
delay_timer: 0u8,
sound_timer: 0u8,
display: Display::new(),
keypad: Keypad::new(),
wait_for_key: (false, 0x0),
shift_op_use_vy: false,
};
// load the font set in memory in the space [0x0, 0x200[ = [0, 80[
for i in 0..80 {
chip8.memory[i] = FONT_SET[i];
}
// the program space starts at 0x200
chip8.pc = 0x200;
chip8
}
/// Reinitialize the virtual machine's state but keep the loaded program
/// in memory.
pub fn reset(&mut self) {
self.opcode = 0u16;
self.v = [0u8; 16];
self.i = 0usize;
self.pc = 0x200;
self.stack = [0u16; STACK_SIZE];
self.sp = 0usize;
self.delay_timer = 0u8;
self.sound_timer = 0u8;
self.display = Display::new();
self.keypad = Keypad::new();
self.wait_for_key = (false, 0x0);
}
/// Set the shift_op_use_vy flag.
pub fn should_shift_op_use_vy(&mut self, b: bool) {
self.shift_op_use_vy = b;
}
/// Is the CPU waiting for a key press ?
pub fn is_waiting_for_key(&self) -> bool {
self.wait_for_key.0
}
/// Called by the emulator application to inform the virtual machine
/// waiting for a key pressed that a key has been pressed.
pub fn end_wait_for_key(&mut self, key_index: usize) {
if !self.is_waiting_for_key() {
warn!(concat!(
"Chip8::end_wait_for_key_press called but the VM ",
"wasn't waiting for a key press - ignoring"
));
return;
}
self.v[self.wait_for_key.1 as usize] = key_index as u8;
self.wait_for_key.0 = false;
self.pc += 2;
}
/// Get the value stored in the register VX.
pub fn register(&self, x: usize) -> u8 {
self.v[x]
}
/// Get the index register.
pub fn index(&self) -> usize {
self.i
}
/// Get the program counter value.
pub fn pc(&self) -> usize {
self.pc
}
/// Load a Chip8 rom from the given filepath.
/// If the operation fails, return a String explaining why.
pub fn load(&mut self, filepath: &Path) -> Option<String> {
let file = match File::open(filepath) {
Ok(f) => f,
Err(ref why) => {
return Some(format!(
"couldn't open rom file \"{}\" : {}",
filepath.display(),
Error::description(why)
));
}
};
for (i, b) in file.bytes().enumerate() {
//if b.is_none() /* EOF */ { break; }
match b {
Ok(byte) => self.memory[self.pc + i] = byte,
Err(e) => {
return Some(format!("error while reading ROM : {}", e.to_string()));
}
}
}
None
}
/// Emulate a Chip8 CPU cycle.
/// Return true if the loaded program is done.
pub fn emulate_cycle(&mut self) -> bool {
// Is the program finished ?
if self.pc >= 4094 {
return true;
}
// Fetch and execute the opcode to execute ;
// an opcode being 2 bytes long, we need to read 2 bytes from memory
let op = (self.memory[self.pc] as u16) << 8 | (self.memory[self.pc + 1] as u16);
// println!("{:0>4X} {:0>4X}", self.opcode, self.pc); // DEBUG
self.opcode = op;
self.execute_opcode(op);
false
}
/// Execute a single opcode.
pub fn execute_opcode(&mut self, op: u16) {
// For easier matching, get the values (nibbles) A, B, C, D
// if the opcode is 0xABCD.
let opcode_tuple = (
((op & 0xF000) >> 12) as u8,
((op & 0x0F00) >> 8) as u8,
((op & 0x00F0) >> 4) as u8,
(op & 0x000F) as u8,
);
//println!("{:0>4X}/{:X},{:X},{:X},{:X}", self.opcode, a, b, c, d);
// Opcode decoding
match opcode_tuple {
(0x0, 0x0, 0xE, 0x0) => self.cls(),
(0x0, 0x0, 0xE, 0xE) => self.ret(),
// 0NNN = sys addr : ignore
(0x1, _, _, _) => self.jump_addr(op & 0x0FFF),
(0x2, _, _, _) => self.call_addr(op & 0x0FFF),
(0x3, x, _, _) => self.se_vx_nn(x, (op & 0x00FF) as u8),
(0x4, x, _, _) => self.sne_vx_nn(x, (op & 0x00FF) as u8),
(0x5, x, y, 0x0) => self.se_vx_vy(x, y),
(0x6, x, _, _) => self.ld_vx_nn(x, (op & 0x00FF) as u8),
(0x7, x, _, _) => self.add_vx_nn(x, (op & 0x00FF) as u8),
(0x8, x, y, 0x0) => self.ld_vx_vy(x, y),
(0x8, x, y, 0x1) => self.or_vx_vy(x, y),
(0x8, x, y, 0x2) => self.and_vx_vy(x, y),
(0x8, x, y, 0x3) => self.xor_vx_vy(x, y),
(0x8, x, y, 0x4) => self.add_vx_vy(x, y),
(0x8, x, y, 0x5) => self.sub_vx_vy(x, y),
(0x8, x, y, 0x6) => self.shr_vx_vy(x, y),
(0x8, x, y, 0x7) => self.subn_vx_vy(x, y),
(0x8, x, y, 0xE) => self.shl_vx_vy(x, y),
(0x9, x, y, 0x0) => self.sne_vx_vy(x, y),
(0xA, _, _, _) => self.ld_i_addr(op & 0x0FFF),
(0xB, _, _, _) => {
let v0 = self.v[0] as u16; // sacrifice to the god of borrows
self.jump_addr(op & 0x0FFF + v0);
}
(0xC, x, _, _) => self.rnd_vx_nn(x, (op & 0x00FF) as u8),
(0xD, x, y, n) => self.drw_vx_vy_n(x, y, n),
(0xE, x, 0x9, 0xE) => self.skp_vx(x),
(0xE, x, 0xA, 0x1) => self.sknp_vx(x),
(0xF, x, 0x0, 0x7) => self.ld_vx_dt(x),
(0xF, x, 0x0, 0xA) => self.ld_vx_key(x),
(0xF, x, 0x1, 0x5) => self.ld_dt_vx(x),
(0xF, x, 0x1, 0x8) => self.ld_st_vx(x),
(0xF, x, 0x1, 0xE) => self.add_i_vx(x),
(0xF, x, 0x2, 0x9) => self.ld_i_font_vx(x),
(0xF, x, 0x3, 0x3) => self.ld_mem_i_bcd_vx(x),
(0xF, x, 0x5, 0x5) => self.ld_mem_i_regs(x),
(0xF, x, 0x6, 0x5) => self.ld_regs_mem_i(x),
_ => op_not_implemented!(op, self.pc),
}
}
/// Clear the screen.
fn cls(&mut self) {
self.display.clear();
self.pc += 2;
}
/// Return from a subroutine, by setting the program counter to the address
/// popped from the stack.
fn ret(&mut self) {
self.sp -= 1;
let addr = self.stack[self.sp];
self.jump_addr(addr);
self.pc += 2;
}
/// Jump to the given address of the form 0x0NNN.
fn jump_addr(&mut self, addr: u16) {
self.pc = addr as usize;
}
/// Execute the subroutine at the provided address pushing the current
/// program counter to the stack and jumping to the given address of the
/// form 0x0NNN.
/// TODO : handle stack overflow error ?
fn call_addr(&mut self, addr: u16) {
self.stack[self.sp] = self.pc as u16;
self.sp += 1;
self.jump_addr(addr);
}
/// Skip the next instruction if the value of register VX is equal to 0xNN.
fn se_vx_nn(&mut self, x: u8, nn: u8) {
self.pc += if self.v[x as usize] == nn { 4 } else { 2 };
}
/// Skip the next instruction if the value of register VX isn't equal to
/// 0xNN.
fn sne_vx_nn(&mut self, x: u8, nn: u8) {
self.pc += if self.v[x as usize] != nn { 4 } else { 2 };
}
/// Skip the next instruction if the value of register VX is equal to the
/// value of register VY.
fn se_vx_vy(&mut self, x: u8, y: u8) {
self.pc += if self.v[x as usize] == self.v[y as usize] {
4
} else {
2
};
}
/// Skip the next instruction if the value of register VX is not equal to
/// the value of register VY.
fn sne_vx_vy(&mut self, x: u8, y: u8) {
self.pc += if self.v[x as usize] != self.v[y as usize] {
4
} else {
2
};
}
/// Skip the next instruction if the key of index VX is currently pressed.
fn skp_vx(&mut self, x: u8) {
self.pc += match self.keypad.get_key_state(self.v[x as usize] as usize) {
Keystate::Pressed => 4,
Keystate::Released => 2,
};
}
/// Skip the next instruction if the key of index VX is currently released.
fn sknp_vx(&mut self, x: u8) {
self.pc += match self.keypad.get_key_state(self.v[x as usize] as usize) {
Keystate::Pressed => 2,
Keystate::Released => 4,
};
}
/// Store the value 0xNN in the the register VX.
fn ld_vx_nn(&mut self, x: u8, nn: u8) {
self.v[x as usize] = nn;
self.pc += 2;
}
/// Store the value of the register VY in the register VX.
fn ld_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] = self.v[y as usize];
self.pc += 2;
}
/// Store the memory address 0x0NNN in the register I.
fn ld_i_addr(&mut self, addr: u16) {
self.i = addr as usize;
self.pc += 2;
}
/// Add the value 0xNN to the register VX, wrapping around the result if
/// needed (VX is an unsigned byte so its maximum value is 255).
fn add_vx_nn(&mut self, x: u8, nn: u8) {
let new_vx_u16 = self.v[x as usize] as u16 + nn as u16; // no overflow
self.v[x as usize] = new_vx_u16 as u8; // wrap around the value
self.pc += 2;
}
/// Add the value of register VX to the value of register I.
fn add_i_vx(&mut self, x: u8) {
self.i += self.v[x as usize] as usize;
self.pc += 2;
}
/// Set VX to (VX OR VY).
fn or_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] |= self.v[y as usize];
self.pc += 2;
}
/// Set VX to (VX AND VY).
fn and_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] &= self.v[y as usize];
self.pc += 2;
}
/// Set VX to (VX XOR VY).
fn xor_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] ^= self.v[y as usize];
self.pc += 2;
}
/// Add the value of register VY to the value of register VX.
/// Set V_FLAG to 0x1 if a carry occurs, and to 0x0 otherwise.
fn add_vx_vy(&mut self, x: u8, y: u8) {
let new_vx_u16 = self.v[x as usize] as u16 + self.v[y as usize] as u16;
self.v[x as usize] = new_vx_u16 as u8;
self.v[FLAG] = if new_vx_u16 > 255 { 0x1 } else { 0x0 };
self.pc += 2;
}
/// Substract the value of register VY from the value of register VX, and
/// store the (wrapped) result in register VX.
/// Set V_FLAG to 0x1 if a borrow occurs, and to 0x0 otherwise.
fn sub_vx_vy(&mut self, x: u8, y: u8) {
let new_vx_i8 = self.v[x as usize] as i8 - self.v[y as usize] as i8;
self.v[x as usize] = new_vx_i8 as u8;
self.v[FLAG] = if new_vx_i8 < 0 { 0x1 } else { 0x0 };
self.pc += 2;
}
/// Substract the value of register VX from the value of register VY, and | fn subn_vx_vy(&mut self, x: u8, y: u8) {
let new_vx_i8 = self.v[y as usize] as i8 - self.v[x as usize] as i8;
self.v[x as usize] = new_vx_i8 as u8;
self.v[FLAG] = if new_vx_i8 < 0 { 0x1 } else { 0x0 };
self.pc += 2;
}
/// Store the value of the register VY shifted right one bit in register VX
/// and set register VF to the least significant bit prior to the shift.
/// NB : references disagree on this opcode, we use the one defined here :
/// http://mattmik.com/chip8.html
/// If shift_op_use_vy is false, will consider VX instead of VY.
fn shr_vx_vy(&mut self, x: u8, y: u8) {
let shift_on = if self.shift_op_use_vy { y } else { x };
self.v[FLAG] = self.v[shift_on as usize] & 0x01;
self.v[x as usize] = self.v[shift_on as usize] >> 1;
self.pc += 2;
}
/// Same as 'shr_vx_vy' but with a left shift.
/// Set register VF to the most significant bit prior to the shift.
/// If shift_op_use_vy is false, will consider VX instead of VY.
fn shl_vx_vy(&mut self, x: u8, y: u8) {
let shift_on = if self.shift_op_use_vy { y } else { x };
self.v[FLAG] = self.v[shift_on as usize] & 0x80;
self.v[x as usize] = self.v[shift_on as usize] << 1;
self.pc += 2;
}
/// Set VX to a random byte with a mask of 0xNN.
fn rnd_vx_nn(&mut self, x: u8, nn: u8) {
self.v[x as usize] = random::<u8>() & nn;
self.pc += 2;
}
/// Draw a sprite at position VX, VY with 0xN bytes of sprite data starting
/// at the address stored in I. N is thus the height of the sprite.
/// The drawing is implemented by 'Display' as a XOR operation.
/// VF will act here as a collision flag, i.e. if any set pixel is erased
/// set it to 0x1, and to 0x0 otherwise.
fn drw_vx_vy_n(&mut self, x: u8, y: u8, n: u8) {
let pos_x = self.v[x as usize] as usize;
let pos_y = self.v[y as usize] as usize;
let mem_start = self.i;
let mem_end = self.i + n as usize;
if self
.display
.draw(pos_x, pos_y, &self.memory[mem_start..mem_end])
{
self.v[FLAG] = 0x1;
} else {
self.v[FLAG] = 0x0;
}
self.pc += 2;
}
/// Store the current value of the delay timer in register VX.
fn ld_vx_dt(&mut self, x: u8) {
self.v[x as usize] = self.delay_timer;
self.pc += 2;
}
/// Set the delay timer to the value stored in register VX.
fn ld_dt_vx(&mut self, x: u8) {
self.delay_timer = self.v[x as usize];
self.pc += 2;
}
/// Set the sound timer to the value stored in register VX.
fn ld_st_vx(&mut self, x: u8) {
self.sound_timer = self.v[x as usize];
self.pc += 2;
}
/// Wait for a key press and store the result in the register VX.
/// Implementation : the emulation application must trigger the
/// 'end_wait_for_key_press' function ; this allows to achieve better
/// decoupling from the framerate.
fn ld_vx_key(&mut self, x: u8) {
self.wait_for_key = (true, x);
/*for i in 0..16 {
match self.keypad.get_key_state(i) {
Keystate::Pressed => {
self.v[x as usize] = i as u8;
self.pc += 2;
break;
}
Keystate::Released => {},
}
}*/
}
/// Set I to the memory address of the sprite data corresponding to the
/// hexadecimal digit (0x0..0xF) stored in register VX.
/// Will use the internal fontset stored in memory.
fn ld_i_font_vx(&mut self, x: u8) {
// the font set is in the memory range 0x0..0x80
// and each character is represented by 5 bytes
self.i = (self.v[x as usize] * 5) as usize;
self.pc += 2;
}
/// Store the Binary-Coded Decimal equivalent of the value stored in
/// register VX in memory at the addresses I, I+1, and I+2.
fn ld_mem_i_bcd_vx(&mut self, x: u8) {
// VX is a byte : its decimal value is in 0..256
let vx = self.v[x as usize];
self.memory[self.i] = vx / 100;
self.memory[self.i + 1] = (vx / 10) % 10;
self.memory[self.i + 2] = (vx % 100) % 10;
self.pc += 2;
}
/// Store the values of registers V0 to VX inclusive in memory starting at
/// the address I, and set I to I + X + 1 after operation.
fn ld_mem_i_regs(&mut self, x: u8) {
let x_usize = x as usize;
for j in 0..(x_usize + 1) {
self.memory[self.i + j] = self.v[j];
}
self.i += x_usize + 1;
self.pc += 2;
}
/// Fill registers V0 to VX inclusive with the values stored in memory
/// starting at the address I.
fn ld_regs_mem_i(&mut self, x: u8) {
let x_usize = x as usize;
for j in 0..(x_usize + 1) {
self.v[j] = self.memory[self.i + j];
}
self.i += x_usize + 1;
self.pc += 2;
}
} | /// store the (wrapped) result in register VX.
/// Set V_FLAG to 0x1 if a borrow occurs, and to 0x0 otherwise. | random_line_split |
vm.rs | use rand::random;
/// Core CPU implementation.
use std::error::Error;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use crate::display::{Display, FONT_SET};
use crate::keypad::{Keypad, Keystate};
/// The default CPU clock, in Hz.
pub const CPU_CLOCK: u32 = 600;
/// The timers clock, in Hz.
pub const TIMERS_CLOCK: u32 = 60;
/// The index of the register used for the 'carry flag'.
/// VF is used according to the CHIP 8 specifications.
pub const FLAG: usize = 15;
/// The size of the stack.
const STACK_SIZE: usize = 16;
/// CHIP 8 virtual machine.
/// The references used to implement this particular interpreter include :
/// http://en.wikipedia.org/wiki/CHIP-8
/// http://mattmik.com/chip8.html
/// http://devernay.free.fr/hacks/chip8/C8TECH10.HTM
pub struct Chip8 {
/// The current opcode.
opcode: u16,
/// The chip's 4096 bytes of memory.
pub memory: [u8; 4096], // TEMPORARY pub for debug purposes
/// The chip's 16 registers, from V0 to VF.
/// VF is used for the 'carry flag'.
v: [u8; 16],
/// Index register.
i: usize,
/// Program counter.
pc: usize,
/// The stack, used for subroutine operations.
/// By default has 16 levels of nesting.
pub stack: [u16; STACK_SIZE],
/// Stack pointer.
pub sp: usize,
// Timer registers, must be updated at 60 Hz by the emulator.
pub delay_timer: u8,
pub sound_timer: u8,
/// Screen component.
pub display: Display,
/// Input component.
pub keypad: Keypad,
/// Is the virtual machine waiting for a keypress ?
/// If so, when any key is pressed store its index in VX where X is
/// the value stored in this tuple.
pub wait_for_key: (bool, u8),
/// Implementation option.
/// Should the shifting opcodes 8XY6 and 8XYE use the original implementation,
/// i.e. set VX to VY shifted respectively right and left by one bit ?
/// If false, the VM will instead consider as many ROMs seem to do that Y=X.
/// See http://mattmik.com/chip8.html for more detail.
shift_op_use_vy: bool,
}
/// Macro for handling invalid/unimplemented opcodes.
/// As of now only prints a error message, could maybe panic in the future.
macro_rules! op_not_implemented {
($op: expr, $pc: expr) => {
println!(
"Not implemented opcode {:0>4X} at {:0>5X}",
$op as usize, $pc
);
};
}
impl Chip8 {
/// Create and return a new, initialized Chip8 virtual machine.
pub fn new() -> Chip8 {
let mut chip8 = Chip8 {
opcode: 0u16,
memory: [0u8; 4096],
v: [0u8; 16],
i: 0usize,
pc: 0usize,
stack: [0u16; STACK_SIZE],
sp: 0usize,
delay_timer: 0u8,
sound_timer: 0u8,
display: Display::new(),
keypad: Keypad::new(),
wait_for_key: (false, 0x0),
shift_op_use_vy: false,
};
// load the font set in memory in the space [0x0, 0x200[ = [0, 80[
for i in 0..80 {
chip8.memory[i] = FONT_SET[i];
}
// the program space starts at 0x200
chip8.pc = 0x200;
chip8
}
/// Reinitialize the virtual machine's state but keep the loaded program
/// in memory.
pub fn reset(&mut self) {
self.opcode = 0u16;
self.v = [0u8; 16];
self.i = 0usize;
self.pc = 0x200;
self.stack = [0u16; STACK_SIZE];
self.sp = 0usize;
self.delay_timer = 0u8;
self.sound_timer = 0u8;
self.display = Display::new();
self.keypad = Keypad::new();
self.wait_for_key = (false, 0x0);
}
/// Set the shift_op_use_vy flag.
pub fn should_shift_op_use_vy(&mut self, b: bool) {
self.shift_op_use_vy = b;
}
/// Is the CPU waiting for a key press ?
pub fn is_waiting_for_key(&self) -> bool {
self.wait_for_key.0
}
/// Called by the emulator application to inform the virtual machine
/// waiting for a key pressed that a key has been pressed.
pub fn end_wait_for_key(&mut self, key_index: usize) {
if !self.is_waiting_for_key() {
warn!(concat!(
"Chip8::end_wait_for_key_press called but the VM ",
"wasn't waiting for a key press - ignoring"
));
return;
}
self.v[self.wait_for_key.1 as usize] = key_index as u8;
self.wait_for_key.0 = false;
self.pc += 2;
}
/// Get the value stored in the register VX.
pub fn register(&self, x: usize) -> u8 {
self.v[x]
}
/// Get the index register.
pub fn index(&self) -> usize {
self.i
}
/// Get the program counter value.
pub fn pc(&self) -> usize {
self.pc
}
/// Load a Chip8 rom from the given filepath.
/// If the operation fails, return a String explaining why.
pub fn load(&mut self, filepath: &Path) -> Option<String> {
let file = match File::open(filepath) {
Ok(f) => f,
Err(ref why) => {
return Some(format!(
"couldn't open rom file \"{}\" : {}",
filepath.display(),
Error::description(why)
));
}
};
for (i, b) in file.bytes().enumerate() {
//if b.is_none() /* EOF */ { break; }
match b {
Ok(byte) => self.memory[self.pc + i] = byte,
Err(e) => {
return Some(format!("error while reading ROM : {}", e.to_string()));
}
}
}
None
}
/// Emulate a Chip8 CPU cycle.
/// Return true if the loaded program is done.
pub fn emulate_cycle(&mut self) -> bool {
// Is the program finished ?
if self.pc >= 4094 {
return true;
}
// Fetch and execute the opcode to execute ;
// an opcode being 2 bytes long, we need to read 2 bytes from memory
let op = (self.memory[self.pc] as u16) << 8 | (self.memory[self.pc + 1] as u16);
// println!("{:0>4X} {:0>4X}", self.opcode, self.pc); // DEBUG
self.opcode = op;
self.execute_opcode(op);
false
}
/// Execute a single opcode.
pub fn execute_opcode(&mut self, op: u16) |
/// Clear the screen.
fn cls(&mut self) {
self.display.clear();
self.pc += 2;
}
/// Return from a subroutine, by setting the program counter to the address
/// popped from the stack.
fn ret(&mut self) {
self.sp -= 1;
let addr = self.stack[self.sp];
self.jump_addr(addr);
self.pc += 2;
}
/// Jump to the given address of the form 0x0NNN.
fn jump_addr(&mut self, addr: u16) {
self.pc = addr as usize;
}
/// Execute the subroutine at the provided address pushing the current
/// program counter to the stack and jumping to the given address of the
/// form 0x0NNN.
/// TODO : handle stack overflow error ?
fn call_addr(&mut self, addr: u16) {
self.stack[self.sp] = self.pc as u16;
self.sp += 1;
self.jump_addr(addr);
}
/// Skip the next instruction if the value of register VX is equal to 0xNN.
fn se_vx_nn(&mut self, x: u8, nn: u8) {
self.pc += if self.v[x as usize] == nn { 4 } else { 2 };
}
/// Skip the next instruction if the value of register VX isn't equal to
/// 0xNN.
fn sne_vx_nn(&mut self, x: u8, nn: u8) {
self.pc += if self.v[x as usize] != nn { 4 } else { 2 };
}
/// Skip the next instruction if the value of register VX is equal to the
/// value of register VY.
fn se_vx_vy(&mut self, x: u8, y: u8) {
self.pc += if self.v[x as usize] == self.v[y as usize] {
4
} else {
2
};
}
/// Skip the next instruction if the value of register VX is not equal to
/// the value of register VY.
fn sne_vx_vy(&mut self, x: u8, y: u8) {
self.pc += if self.v[x as usize] != self.v[y as usize] {
4
} else {
2
};
}
/// Skip the next instruction if the key of index VX is currently pressed.
fn skp_vx(&mut self, x: u8) {
self.pc += match self.keypad.get_key_state(self.v[x as usize] as usize) {
Keystate::Pressed => 4,
Keystate::Released => 2,
};
}
/// Skip the next instruction if the key of index VX is currently released.
fn sknp_vx(&mut self, x: u8) {
self.pc += match self.keypad.get_key_state(self.v[x as usize] as usize) {
Keystate::Pressed => 2,
Keystate::Released => 4,
};
}
/// Store the value 0xNN in the the register VX.
fn ld_vx_nn(&mut self, x: u8, nn: u8) {
self.v[x as usize] = nn;
self.pc += 2;
}
/// Store the value of the register VY in the register VX.
fn ld_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] = self.v[y as usize];
self.pc += 2;
}
/// Store the memory address 0x0NNN in the register I.
fn ld_i_addr(&mut self, addr: u16) {
self.i = addr as usize;
self.pc += 2;
}
/// Add the value 0xNN to the register VX, wrapping around the result if
/// needed (VX is an unsigned byte so its maximum value is 255).
fn add_vx_nn(&mut self, x: u8, nn: u8) {
let new_vx_u16 = self.v[x as usize] as u16 + nn as u16; // no overflow
self.v[x as usize] = new_vx_u16 as u8; // wrap around the value
self.pc += 2;
}
/// Add the value of register VX to the value of register I.
fn add_i_vx(&mut self, x: u8) {
self.i += self.v[x as usize] as usize;
self.pc += 2;
}
/// Set VX to (VX OR VY).
fn or_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] |= self.v[y as usize];
self.pc += 2;
}
/// Set VX to (VX AND VY).
fn and_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] &= self.v[y as usize];
self.pc += 2;
}
/// Set VX to (VX XOR VY).
fn xor_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] ^= self.v[y as usize];
self.pc += 2;
}
/// Add the value of register VY to the value of register VX.
/// Set V_FLAG to 0x1 if a carry occurs, and to 0x0 otherwise.
fn add_vx_vy(&mut self, x: u8, y: u8) {
let new_vx_u16 = self.v[x as usize] as u16 + self.v[y as usize] as u16;
self.v[x as usize] = new_vx_u16 as u8;
self.v[FLAG] = if new_vx_u16 > 255 { 0x1 } else { 0x0 };
self.pc += 2;
}
/// Substract the value of register VY from the value of register VX, and
/// store the (wrapped) result in register VX.
/// Set V_FLAG to 0x1 if a borrow occurs, and to 0x0 otherwise.
fn sub_vx_vy(&mut self, x: u8, y: u8) {
let new_vx_i8 = self.v[x as usize] as i8 - self.v[y as usize] as i8;
self.v[x as usize] = new_vx_i8 as u8;
self.v[FLAG] = if new_vx_i8 < 0 { 0x1 } else { 0x0 };
self.pc += 2;
}
/// Substract the value of register VX from the value of register VY, and
/// store the (wrapped) result in register VX.
/// Set V_FLAG to 0x1 if a borrow occurs, and to 0x0 otherwise.
fn subn_vx_vy(&mut self, x: u8, y: u8) {
let new_vx_i8 = self.v[y as usize] as i8 - self.v[x as usize] as i8;
self.v[x as usize] = new_vx_i8 as u8;
self.v[FLAG] = if new_vx_i8 < 0 { 0x1 } else { 0x0 };
self.pc += 2;
}
/// Store the value of the register VY shifted right one bit in register VX
/// and set register VF to the least significant bit prior to the shift.
/// NB : references disagree on this opcode, we use the one defined here :
/// http://mattmik.com/chip8.html
/// If shift_op_use_vy is false, will consider VX instead of VY.
fn shr_vx_vy(&mut self, x: u8, y: u8) {
let shift_on = if self.shift_op_use_vy { y } else { x };
self.v[FLAG] = self.v[shift_on as usize] & 0x01;
self.v[x as usize] = self.v[shift_on as usize] >> 1;
self.pc += 2;
}
/// Same as 'shr_vx_vy' but with a left shift.
/// Set register VF to the most significant bit prior to the shift.
/// If shift_op_use_vy is false, will consider VX instead of VY.
fn shl_vx_vy(&mut self, x: u8, y: u8) {
let shift_on = if self.shift_op_use_vy { y } else { x };
self.v[FLAG] = self.v[shift_on as usize] & 0x80;
self.v[x as usize] = self.v[shift_on as usize] << 1;
self.pc += 2;
}
/// Set VX to a random byte with a mask of 0xNN.
fn rnd_vx_nn(&mut self, x: u8, nn: u8) {
self.v[x as usize] = random::<u8>() & nn;
self.pc += 2;
}
/// Draw a sprite at position VX, VY with 0xN bytes of sprite data starting
/// at the address stored in I. N is thus the height of the sprite.
/// The drawing is implemented by 'Display' as a XOR operation.
/// VF will act here as a collision flag, i.e. if any set pixel is erased
/// set it to 0x1, and to 0x0 otherwise.
fn drw_vx_vy_n(&mut self, x: u8, y: u8, n: u8) {
let pos_x = self.v[x as usize] as usize;
let pos_y = self.v[y as usize] as usize;
let mem_start = self.i;
let mem_end = self.i + n as usize;
if self
.display
.draw(pos_x, pos_y, &self.memory[mem_start..mem_end])
{
self.v[FLAG] = 0x1;
} else {
self.v[FLAG] = 0x0;
}
self.pc += 2;
}
/// Store the current value of the delay timer in register VX.
fn ld_vx_dt(&mut self, x: u8) {
self.v[x as usize] = self.delay_timer;
self.pc += 2;
}
/// Set the delay timer to the value stored in register VX.
fn ld_dt_vx(&mut self, x: u8) {
self.delay_timer = self.v[x as usize];
self.pc += 2;
}
/// Set the sound timer to the value stored in register VX.
fn ld_st_vx(&mut self, x: u8) {
self.sound_timer = self.v[x as usize];
self.pc += 2;
}
/// Wait for a key press and store the result in the register VX.
/// Implementation : the emulation application must trigger the
/// 'end_wait_for_key_press' function ; this allows to achieve better
/// decoupling from the framerate.
fn ld_vx_key(&mut self, x: u8) {
self.wait_for_key = (true, x);
/*for i in 0..16 {
match self.keypad.get_key_state(i) {
Keystate::Pressed => {
self.v[x as usize] = i as u8;
self.pc += 2;
break;
}
Keystate::Released => {},
}
}*/
}
/// Set I to the memory address of the sprite data corresponding to the
/// hexadecimal digit (0x0..0xF) stored in register VX.
/// Will use the internal fontset stored in memory.
fn ld_i_font_vx(&mut self, x: u8) {
// the font set is in the memory range 0x0..0x80
// and each character is represented by 5 bytes
self.i = (self.v[x as usize] * 5) as usize;
self.pc += 2;
}
/// Store the Binary-Coded Decimal equivalent of the value stored in
/// register VX in memory at the addresses I, I+1, and I+2.
fn ld_mem_i_bcd_vx(&mut self, x: u8) {
// VX is a byte : its decimal value is in 0..256
let vx = self.v[x as usize];
self.memory[self.i] = vx / 100;
self.memory[self.i + 1] = (vx / 10) % 10;
self.memory[self.i + 2] = (vx % 100) % 10;
self.pc += 2;
}
/// Store the values of registers V0 to VX inclusive in memory starting at
/// the address I, and set I to I + X + 1 after operation.
fn ld_mem_i_regs(&mut self, x: u8) {
let x_usize = x as usize;
for j in 0..(x_usize + 1) {
self.memory[self.i + j] = self.v[j];
}
self.i += x_usize + 1;
self.pc += 2;
}
/// Fill registers V0 to VX inclusive with the values stored in memory
/// starting at the address I.
fn ld_regs_mem_i(&mut self, x: u8) {
let x_usize = x as usize;
for j in 0..(x_usize + 1) {
self.v[j] = self.memory[self.i + j];
}
self.i += x_usize + 1;
self.pc += 2;
}
}
| {
// For easier matching, get the values (nibbles) A, B, C, D
// if the opcode is 0xABCD.
let opcode_tuple = (
((op & 0xF000) >> 12) as u8,
((op & 0x0F00) >> 8) as u8,
((op & 0x00F0) >> 4) as u8,
(op & 0x000F) as u8,
);
//println!("{:0>4X}/{:X},{:X},{:X},{:X}", self.opcode, a, b, c, d);
// Opcode decoding
match opcode_tuple {
(0x0, 0x0, 0xE, 0x0) => self.cls(),
(0x0, 0x0, 0xE, 0xE) => self.ret(),
// 0NNN = sys addr : ignore
(0x1, _, _, _) => self.jump_addr(op & 0x0FFF),
(0x2, _, _, _) => self.call_addr(op & 0x0FFF),
(0x3, x, _, _) => self.se_vx_nn(x, (op & 0x00FF) as u8),
(0x4, x, _, _) => self.sne_vx_nn(x, (op & 0x00FF) as u8),
(0x5, x, y, 0x0) => self.se_vx_vy(x, y),
(0x6, x, _, _) => self.ld_vx_nn(x, (op & 0x00FF) as u8),
(0x7, x, _, _) => self.add_vx_nn(x, (op & 0x00FF) as u8),
(0x8, x, y, 0x0) => self.ld_vx_vy(x, y),
(0x8, x, y, 0x1) => self.or_vx_vy(x, y),
(0x8, x, y, 0x2) => self.and_vx_vy(x, y),
(0x8, x, y, 0x3) => self.xor_vx_vy(x, y),
(0x8, x, y, 0x4) => self.add_vx_vy(x, y),
(0x8, x, y, 0x5) => self.sub_vx_vy(x, y),
(0x8, x, y, 0x6) => self.shr_vx_vy(x, y),
(0x8, x, y, 0x7) => self.subn_vx_vy(x, y),
(0x8, x, y, 0xE) => self.shl_vx_vy(x, y),
(0x9, x, y, 0x0) => self.sne_vx_vy(x, y),
(0xA, _, _, _) => self.ld_i_addr(op & 0x0FFF),
(0xB, _, _, _) => {
let v0 = self.v[0] as u16; // sacrifice to the god of borrows
self.jump_addr(op & 0x0FFF + v0);
}
(0xC, x, _, _) => self.rnd_vx_nn(x, (op & 0x00FF) as u8),
(0xD, x, y, n) => self.drw_vx_vy_n(x, y, n),
(0xE, x, 0x9, 0xE) => self.skp_vx(x),
(0xE, x, 0xA, 0x1) => self.sknp_vx(x),
(0xF, x, 0x0, 0x7) => self.ld_vx_dt(x),
(0xF, x, 0x0, 0xA) => self.ld_vx_key(x),
(0xF, x, 0x1, 0x5) => self.ld_dt_vx(x),
(0xF, x, 0x1, 0x8) => self.ld_st_vx(x),
(0xF, x, 0x1, 0xE) => self.add_i_vx(x),
(0xF, x, 0x2, 0x9) => self.ld_i_font_vx(x),
(0xF, x, 0x3, 0x3) => self.ld_mem_i_bcd_vx(x),
(0xF, x, 0x5, 0x5) => self.ld_mem_i_regs(x),
(0xF, x, 0x6, 0x5) => self.ld_regs_mem_i(x),
_ => op_not_implemented!(op, self.pc),
}
} | identifier_body |
vm.rs | use rand::random;
/// Core CPU implementation.
use std::error::Error;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use crate::display::{Display, FONT_SET};
use crate::keypad::{Keypad, Keystate};
/// The default CPU clock, in Hz.
pub const CPU_CLOCK: u32 = 600;
/// The timers clock, in Hz.
pub const TIMERS_CLOCK: u32 = 60;
/// The index of the register used for the 'carry flag'.
/// VF is used according to the CHIP 8 specifications.
pub const FLAG: usize = 15;
/// The size of the stack.
const STACK_SIZE: usize = 16;
/// CHIP 8 virtual machine.
/// The references used to implement this particular interpreter include :
/// http://en.wikipedia.org/wiki/CHIP-8
/// http://mattmik.com/chip8.html
/// http://devernay.free.fr/hacks/chip8/C8TECH10.HTM
pub struct Chip8 {
/// The current opcode.
opcode: u16,
/// The chip's 4096 bytes of memory.
pub memory: [u8; 4096], // TEMPORARY pub for debug purposes
/// The chip's 16 registers, from V0 to VF.
/// VF is used for the 'carry flag'.
v: [u8; 16],
/// Index register.
i: usize,
/// Program counter.
pc: usize,
/// The stack, used for subroutine operations.
/// By default has 16 levels of nesting.
pub stack: [u16; STACK_SIZE],
/// Stack pointer.
pub sp: usize,
// Timer registers, must be updated at 60 Hz by the emulator.
pub delay_timer: u8,
pub sound_timer: u8,
/// Screen component.
pub display: Display,
/// Input component.
pub keypad: Keypad,
/// Is the virtual machine waiting for a keypress ?
/// If so, when any key is pressed store its index in VX where X is
/// the value stored in this tuple.
pub wait_for_key: (bool, u8),
/// Implementation option.
/// Should the shifting opcodes 8XY6 and 8XYE use the original implementation,
/// i.e. set VX to VY shifted respectively right and left by one bit ?
/// If false, the VM will instead consider as many ROMs seem to do that Y=X.
/// See http://mattmik.com/chip8.html for more detail.
shift_op_use_vy: bool,
}
/// Macro for handling invalid/unimplemented opcodes.
/// As of now only prints a error message, could maybe panic in the future.
macro_rules! op_not_implemented {
($op: expr, $pc: expr) => {
println!(
"Not implemented opcode {:0>4X} at {:0>5X}",
$op as usize, $pc
);
};
}
impl Chip8 {
/// Create and return a new, initialized Chip8 virtual machine.
pub fn new() -> Chip8 {
let mut chip8 = Chip8 {
opcode: 0u16,
memory: [0u8; 4096],
v: [0u8; 16],
i: 0usize,
pc: 0usize,
stack: [0u16; STACK_SIZE],
sp: 0usize,
delay_timer: 0u8,
sound_timer: 0u8,
display: Display::new(),
keypad: Keypad::new(),
wait_for_key: (false, 0x0),
shift_op_use_vy: false,
};
// load the font set in memory in the space [0x0, 0x200[ = [0, 80[
for i in 0..80 {
chip8.memory[i] = FONT_SET[i];
}
// the program space starts at 0x200
chip8.pc = 0x200;
chip8
}
/// Reinitialize the virtual machine's state but keep the loaded program
/// in memory.
pub fn reset(&mut self) {
self.opcode = 0u16;
self.v = [0u8; 16];
self.i = 0usize;
self.pc = 0x200;
self.stack = [0u16; STACK_SIZE];
self.sp = 0usize;
self.delay_timer = 0u8;
self.sound_timer = 0u8;
self.display = Display::new();
self.keypad = Keypad::new();
self.wait_for_key = (false, 0x0);
}
/// Set the shift_op_use_vy flag.
pub fn | (&mut self, b: bool) {
self.shift_op_use_vy = b;
}
/// Is the CPU waiting for a key press ?
pub fn is_waiting_for_key(&self) -> bool {
self.wait_for_key.0
}
/// Called by the emulator application to inform the virtual machine
/// waiting for a key pressed that a key has been pressed.
pub fn end_wait_for_key(&mut self, key_index: usize) {
if !self.is_waiting_for_key() {
warn!(concat!(
"Chip8::end_wait_for_key_press called but the VM ",
"wasn't waiting for a key press - ignoring"
));
return;
}
self.v[self.wait_for_key.1 as usize] = key_index as u8;
self.wait_for_key.0 = false;
self.pc += 2;
}
/// Get the value stored in the register VX.
pub fn register(&self, x: usize) -> u8 {
self.v[x]
}
/// Get the index register.
pub fn index(&self) -> usize {
self.i
}
/// Get the program counter value.
pub fn pc(&self) -> usize {
self.pc
}
/// Load a Chip8 rom from the given filepath.
/// If the operation fails, return a String explaining why.
pub fn load(&mut self, filepath: &Path) -> Option<String> {
let file = match File::open(filepath) {
Ok(f) => f,
Err(ref why) => {
return Some(format!(
"couldn't open rom file \"{}\" : {}",
filepath.display(),
Error::description(why)
));
}
};
for (i, b) in file.bytes().enumerate() {
//if b.is_none() /* EOF */ { break; }
match b {
Ok(byte) => self.memory[self.pc + i] = byte,
Err(e) => {
return Some(format!("error while reading ROM : {}", e.to_string()));
}
}
}
None
}
/// Emulate a Chip8 CPU cycle.
/// Return true if the loaded program is done.
pub fn emulate_cycle(&mut self) -> bool {
// Is the program finished ?
if self.pc >= 4094 {
return true;
}
// Fetch and execute the opcode to execute ;
// an opcode being 2 bytes long, we need to read 2 bytes from memory
let op = (self.memory[self.pc] as u16) << 8 | (self.memory[self.pc + 1] as u16);
// println!("{:0>4X} {:0>4X}", self.opcode, self.pc); // DEBUG
self.opcode = op;
self.execute_opcode(op);
false
}
/// Execute a single opcode.
pub fn execute_opcode(&mut self, op: u16) {
// For easier matching, get the values (nibbles) A, B, C, D
// if the opcode is 0xABCD.
let opcode_tuple = (
((op & 0xF000) >> 12) as u8,
((op & 0x0F00) >> 8) as u8,
((op & 0x00F0) >> 4) as u8,
(op & 0x000F) as u8,
);
//println!("{:0>4X}/{:X},{:X},{:X},{:X}", self.opcode, a, b, c, d);
// Opcode decoding
match opcode_tuple {
(0x0, 0x0, 0xE, 0x0) => self.cls(),
(0x0, 0x0, 0xE, 0xE) => self.ret(),
// 0NNN = sys addr : ignore
(0x1, _, _, _) => self.jump_addr(op & 0x0FFF),
(0x2, _, _, _) => self.call_addr(op & 0x0FFF),
(0x3, x, _, _) => self.se_vx_nn(x, (op & 0x00FF) as u8),
(0x4, x, _, _) => self.sne_vx_nn(x, (op & 0x00FF) as u8),
(0x5, x, y, 0x0) => self.se_vx_vy(x, y),
(0x6, x, _, _) => self.ld_vx_nn(x, (op & 0x00FF) as u8),
(0x7, x, _, _) => self.add_vx_nn(x, (op & 0x00FF) as u8),
(0x8, x, y, 0x0) => self.ld_vx_vy(x, y),
(0x8, x, y, 0x1) => self.or_vx_vy(x, y),
(0x8, x, y, 0x2) => self.and_vx_vy(x, y),
(0x8, x, y, 0x3) => self.xor_vx_vy(x, y),
(0x8, x, y, 0x4) => self.add_vx_vy(x, y),
(0x8, x, y, 0x5) => self.sub_vx_vy(x, y),
(0x8, x, y, 0x6) => self.shr_vx_vy(x, y),
(0x8, x, y, 0x7) => self.subn_vx_vy(x, y),
(0x8, x, y, 0xE) => self.shl_vx_vy(x, y),
(0x9, x, y, 0x0) => self.sne_vx_vy(x, y),
(0xA, _, _, _) => self.ld_i_addr(op & 0x0FFF),
(0xB, _, _, _) => {
let v0 = self.v[0] as u16; // sacrifice to the god of borrows
self.jump_addr(op & 0x0FFF + v0);
}
(0xC, x, _, _) => self.rnd_vx_nn(x, (op & 0x00FF) as u8),
(0xD, x, y, n) => self.drw_vx_vy_n(x, y, n),
(0xE, x, 0x9, 0xE) => self.skp_vx(x),
(0xE, x, 0xA, 0x1) => self.sknp_vx(x),
(0xF, x, 0x0, 0x7) => self.ld_vx_dt(x),
(0xF, x, 0x0, 0xA) => self.ld_vx_key(x),
(0xF, x, 0x1, 0x5) => self.ld_dt_vx(x),
(0xF, x, 0x1, 0x8) => self.ld_st_vx(x),
(0xF, x, 0x1, 0xE) => self.add_i_vx(x),
(0xF, x, 0x2, 0x9) => self.ld_i_font_vx(x),
(0xF, x, 0x3, 0x3) => self.ld_mem_i_bcd_vx(x),
(0xF, x, 0x5, 0x5) => self.ld_mem_i_regs(x),
(0xF, x, 0x6, 0x5) => self.ld_regs_mem_i(x),
_ => op_not_implemented!(op, self.pc),
}
}
/// Clear the screen.
fn cls(&mut self) {
self.display.clear();
self.pc += 2;
}
/// Return from a subroutine, by setting the program counter to the address
/// popped from the stack.
fn ret(&mut self) {
self.sp -= 1;
let addr = self.stack[self.sp];
self.jump_addr(addr);
self.pc += 2;
}
/// Jump to the given address of the form 0x0NNN.
fn jump_addr(&mut self, addr: u16) {
self.pc = addr as usize;
}
/// Execute the subroutine at the provided address pushing the current
/// program counter to the stack and jumping to the given address of the
/// form 0x0NNN.
/// TODO : handle stack overflow error ?
fn call_addr(&mut self, addr: u16) {
self.stack[self.sp] = self.pc as u16;
self.sp += 1;
self.jump_addr(addr);
}
/// Skip the next instruction if the value of register VX is equal to 0xNN.
fn se_vx_nn(&mut self, x: u8, nn: u8) {
self.pc += if self.v[x as usize] == nn { 4 } else { 2 };
}
/// Skip the next instruction if the value of register VX isn't equal to
/// 0xNN.
fn sne_vx_nn(&mut self, x: u8, nn: u8) {
self.pc += if self.v[x as usize] != nn { 4 } else { 2 };
}
/// Skip the next instruction if the value of register VX is equal to the
/// value of register VY.
fn se_vx_vy(&mut self, x: u8, y: u8) {
self.pc += if self.v[x as usize] == self.v[y as usize] {
4
} else {
2
};
}
/// Skip the next instruction if the value of register VX is not equal to
/// the value of register VY.
fn sne_vx_vy(&mut self, x: u8, y: u8) {
self.pc += if self.v[x as usize] != self.v[y as usize] {
4
} else {
2
};
}
/// Skip the next instruction if the key of index VX is currently pressed.
fn skp_vx(&mut self, x: u8) {
self.pc += match self.keypad.get_key_state(self.v[x as usize] as usize) {
Keystate::Pressed => 4,
Keystate::Released => 2,
};
}
/// Skip the next instruction if the key of index VX is currently released.
fn sknp_vx(&mut self, x: u8) {
self.pc += match self.keypad.get_key_state(self.v[x as usize] as usize) {
Keystate::Pressed => 2,
Keystate::Released => 4,
};
}
/// Store the value 0xNN in the the register VX.
fn ld_vx_nn(&mut self, x: u8, nn: u8) {
self.v[x as usize] = nn;
self.pc += 2;
}
/// Store the value of the register VY in the register VX.
fn ld_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] = self.v[y as usize];
self.pc += 2;
}
/// Store the memory address 0x0NNN in the register I.
fn ld_i_addr(&mut self, addr: u16) {
self.i = addr as usize;
self.pc += 2;
}
/// Add the value 0xNN to the register VX, wrapping around the result if
/// needed (VX is an unsigned byte so its maximum value is 255).
fn add_vx_nn(&mut self, x: u8, nn: u8) {
let new_vx_u16 = self.v[x as usize] as u16 + nn as u16; // no overflow
self.v[x as usize] = new_vx_u16 as u8; // wrap around the value
self.pc += 2;
}
/// Add the value of register VX to the value of register I.
fn add_i_vx(&mut self, x: u8) {
self.i += self.v[x as usize] as usize;
self.pc += 2;
}
/// Set VX to (VX OR VY).
fn or_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] |= self.v[y as usize];
self.pc += 2;
}
/// Set VX to (VX AND VY).
fn and_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] &= self.v[y as usize];
self.pc += 2;
}
/// Set VX to (VX XOR VY).
fn xor_vx_vy(&mut self, x: u8, y: u8) {
self.v[x as usize] ^= self.v[y as usize];
self.pc += 2;
}
/// Add the value of register VY to the value of register VX.
/// Set V_FLAG to 0x1 if a carry occurs, and to 0x0 otherwise.
fn add_vx_vy(&mut self, x: u8, y: u8) {
let new_vx_u16 = self.v[x as usize] as u16 + self.v[y as usize] as u16;
self.v[x as usize] = new_vx_u16 as u8;
self.v[FLAG] = if new_vx_u16 > 255 { 0x1 } else { 0x0 };
self.pc += 2;
}
/// Substract the value of register VY from the value of register VX, and
/// store the (wrapped) result in register VX.
/// Set V_FLAG to 0x1 if a borrow occurs, and to 0x0 otherwise.
fn sub_vx_vy(&mut self, x: u8, y: u8) {
let new_vx_i8 = self.v[x as usize] as i8 - self.v[y as usize] as i8;
self.v[x as usize] = new_vx_i8 as u8;
self.v[FLAG] = if new_vx_i8 < 0 { 0x1 } else { 0x0 };
self.pc += 2;
}
/// Substract the value of register VX from the value of register VY, and
/// store the (wrapped) result in register VX.
/// Set V_FLAG to 0x1 if a borrow occurs, and to 0x0 otherwise.
fn subn_vx_vy(&mut self, x: u8, y: u8) {
let new_vx_i8 = self.v[y as usize] as i8 - self.v[x as usize] as i8;
self.v[x as usize] = new_vx_i8 as u8;
self.v[FLAG] = if new_vx_i8 < 0 { 0x1 } else { 0x0 };
self.pc += 2;
}
/// Store the value of the register VY shifted right one bit in register VX
/// and set register VF to the least significant bit prior to the shift.
/// NB : references disagree on this opcode, we use the one defined here :
/// http://mattmik.com/chip8.html
/// If shift_op_use_vy is false, will consider VX instead of VY.
fn shr_vx_vy(&mut self, x: u8, y: u8) {
let shift_on = if self.shift_op_use_vy { y } else { x };
self.v[FLAG] = self.v[shift_on as usize] & 0x01;
self.v[x as usize] = self.v[shift_on as usize] >> 1;
self.pc += 2;
}
/// Same as 'shr_vx_vy' but with a left shift.
/// Set register VF to the most significant bit prior to the shift.
/// If shift_op_use_vy is false, will consider VX instead of VY.
fn shl_vx_vy(&mut self, x: u8, y: u8) {
let shift_on = if self.shift_op_use_vy { y } else { x };
self.v[FLAG] = self.v[shift_on as usize] & 0x80;
self.v[x as usize] = self.v[shift_on as usize] << 1;
self.pc += 2;
}
/// Set VX to a random byte with a mask of 0xNN.
fn rnd_vx_nn(&mut self, x: u8, nn: u8) {
self.v[x as usize] = random::<u8>() & nn;
self.pc += 2;
}
/// Draw a sprite at position VX, VY with 0xN bytes of sprite data starting
/// at the address stored in I. N is thus the height of the sprite.
/// The drawing is implemented by 'Display' as a XOR operation.
/// VF will act here as a collision flag, i.e. if any set pixel is erased
/// set it to 0x1, and to 0x0 otherwise.
fn drw_vx_vy_n(&mut self, x: u8, y: u8, n: u8) {
let pos_x = self.v[x as usize] as usize;
let pos_y = self.v[y as usize] as usize;
let mem_start = self.i;
let mem_end = self.i + n as usize;
if self
.display
.draw(pos_x, pos_y, &self.memory[mem_start..mem_end])
{
self.v[FLAG] = 0x1;
} else {
self.v[FLAG] = 0x0;
}
self.pc += 2;
}
/// Store the current value of the delay timer in register VX.
fn ld_vx_dt(&mut self, x: u8) {
self.v[x as usize] = self.delay_timer;
self.pc += 2;
}
/// Set the delay timer to the value stored in register VX.
fn ld_dt_vx(&mut self, x: u8) {
self.delay_timer = self.v[x as usize];
self.pc += 2;
}
/// Set the sound timer to the value stored in register VX.
fn ld_st_vx(&mut self, x: u8) {
self.sound_timer = self.v[x as usize];
self.pc += 2;
}
/// Wait for a key press and store the result in the register VX.
/// Implementation : the emulation application must trigger the
/// 'end_wait_for_key_press' function ; this allows to achieve better
/// decoupling from the framerate.
fn ld_vx_key(&mut self, x: u8) {
self.wait_for_key = (true, x);
/*for i in 0..16 {
match self.keypad.get_key_state(i) {
Keystate::Pressed => {
self.v[x as usize] = i as u8;
self.pc += 2;
break;
}
Keystate::Released => {},
}
}*/
}
/// Set I to the memory address of the sprite data corresponding to the
/// hexadecimal digit (0x0..0xF) stored in register VX.
/// Will use the internal fontset stored in memory.
fn ld_i_font_vx(&mut self, x: u8) {
// the font set is in the memory range 0x0..0x80
// and each character is represented by 5 bytes
self.i = (self.v[x as usize] * 5) as usize;
self.pc += 2;
}
/// Store the Binary-Coded Decimal equivalent of the value stored in
/// register VX in memory at the addresses I, I+1, and I+2.
fn ld_mem_i_bcd_vx(&mut self, x: u8) {
// VX is a byte : its decimal value is in 0..256
let vx = self.v[x as usize];
self.memory[self.i] = vx / 100;
self.memory[self.i + 1] = (vx / 10) % 10;
self.memory[self.i + 2] = (vx % 100) % 10;
self.pc += 2;
}
/// Store the values of registers V0 to VX inclusive in memory starting at
/// the address I, and set I to I + X + 1 after operation.
fn ld_mem_i_regs(&mut self, x: u8) {
let x_usize = x as usize;
for j in 0..(x_usize + 1) {
self.memory[self.i + j] = self.v[j];
}
self.i += x_usize + 1;
self.pc += 2;
}
/// Fill registers V0 to VX inclusive with the values stored in memory
/// starting at the address I.
fn ld_regs_mem_i(&mut self, x: u8) {
let x_usize = x as usize;
for j in 0..(x_usize + 1) {
self.v[j] = self.memory[self.i + j];
}
self.i += x_usize + 1;
self.pc += 2;
}
}
| should_shift_op_use_vy | identifier_name |
base-command.ts | import AWS, { Organizations, STS } from 'aws-sdk';
import { AssumeRoleRequest } from 'aws-sdk/clients/sts';
import { SharedIniFileCredentialsOptions } from 'aws-sdk/lib/credentials/shared_ini_file_credentials';
import { Command } from 'commander';
import { existsSync, readFileSync } from 'fs';
import * as ini from 'ini';
import { AwsOrganization } from '../aws-provider/aws-organization';
import { AwsOrganizationReader } from '../aws-provider/aws-organization-reader';
import { AwsOrganizationWriter } from '../aws-provider/aws-organization-writer';
import { AwsUtil } from '../aws-util';
import { ConsoleUtil } from '../console-util';
import { OrganizationBinder } from '../org-binder/org-binder';
import { TaskProvider } from '../org-binder/org-tasks-provider';
import { OrgFormationError } from '../org-formation-error';
import { TemplateRoot } from '../parser/parser';
import { PersistedState } from '../state/persisted-state';
import { S3StorageProvider } from '../state/storage-provider';
import { DefaultTemplate, DefaultTemplateWriter } from '../writer/default-template-writer';
export abstract class BaseCliCommand<T extends ICommandArgs> {
protected command: Command;
protected firstArg: any;
constructor(command?: Command, name?: string, description?: string, firstArgName?: string) {
if (command !== undefined && name !== undefined) {
this.command = command.command(name);
if (description !== undefined) {
this.command.description(description);
}
this.command.allowUnknownOption(false);
this.addOptions(this.command);
this.command.action(async (firstArg: string) => {
if (firstArgName && (typeof firstArg !== 'object')) {
this.command[firstArgName] = firstArg;
}
this.invoke();
});
}
}
public async generateDefaultTemplate(): Promise<DefaultTemplate> {
const organizations = new Organizations({ region: 'us-east-1' });
const awsReader = new AwsOrganizationReader(organizations);
const awsOrganization = new AwsOrganization(awsReader);
const writer = new DefaultTemplateWriter(awsOrganization);
const template = await writer.generateDefaultTemplate();
template.template = template.template.replace(/( *)-\n\1 {2}/g, '$1- ');
const parsedTemplate = TemplateRoot.createFromContents(template.template, './');
template.state.setPreviousTemplate(parsedTemplate.source);
return template;
}
public async getState(command: ICommandArgs): Promise<PersistedState> {
if (command.state) {
return command.state;
}
const storageProvider = await this.getStateBucket(command);
const accountId = await AwsUtil.GetMasterAccountId();
try {
const state = await PersistedState.Load(storageProvider, accountId);
command.state = state;
return state;
} catch (err) {
if (err && err.code === 'NoSuchBucket') {
throw new OrgFormationError(`unable to load previously committed state, reason: bucket '${storageProvider.bucketName}' does not exist in current account.`);
}
throw err;
}
}
public async invoke() {
try {
await this.initialize(this.command as any as ICommandArgs);
await this.performCommand(this.command as any as T);
} catch (err) {
if (err instanceof OrgFormationError) {
ConsoleUtil.LogError(err.message);
} else {
if (err.code && err.requestId) {
ConsoleUtil.LogError(`error: ${err.code}, aws-request-id: ${err.requestId}`);
ConsoleUtil.LogError(err.message);
} else {
ConsoleUtil.LogError(`unexpected error occurred...`, err);
}
}
process.exitCode = 1;
}
}
protected abstract async performCommand(command: T): Promise<void>;
protected addOptions(command: Command) {
command.option('--state-bucket-name [state-bucket-name]', 'bucket name that contains state file', 'organization-formation-${AWS::AccountId}');
command.option('--state-object [state-object]', 'key for object used to store state', 'state.json');
command.option('--profile [profile]', 'aws profile to use');
}
protected async getOrganizationBinder(template: TemplateRoot, state: PersistedState) {
const organizations = new Organizations({ region: 'us-east-1' });
const awsReader = new AwsOrganizationReader(organizations);
const awsOrganization = new AwsOrganization(awsReader);
await awsOrganization.initialize();
const awsWriter = new AwsOrganizationWriter(organizations, awsOrganization);
const taskProvider = new TaskProvider(template, state, awsWriter);
const binder = new OrganizationBinder(template, state, taskProvider);
return binder;
}
protected async createOrGetStateBucket(command: ICommandArgs, region: string): Promise<S3StorageProvider> { | try {
await storageProvider.create(region);
} catch (err) {
if (err && err.code === 'BucketAlreadyOwnedByYou') {
return storageProvider;
}
throw err;
}
return storageProvider;
}
protected async getStateBucket(command: ICommandArgs): Promise<S3StorageProvider> {
const objectKey = command.stateObject;
const stateBucketName = await this.GetStateBucketName(command);
const storageProvider = await S3StorageProvider.Create(stateBucketName, objectKey);
return storageProvider;
}
protected async GetStateBucketName(command: ICommandArgs): Promise<string> {
const bucketName = command.stateBucketName || 'organization-formation-${AWS::AccountId}';
if (bucketName.indexOf('${AWS::AccountId}') >= 0) {
const accountId = await AwsUtil.GetMasterAccountId();
return bucketName.replace('${AWS::AccountId}', accountId);
}
return bucketName;
}
protected parseStackParameters(commandParameters?: string | {}) {
if (commandParameters && typeof commandParameters === 'object') {
return commandParameters;
}
const parameters: Record<string, string> = {};
if (commandParameters && typeof commandParameters === 'string') {
const parameterParts = commandParameters.split(' ');
for (const parameterPart of parameterParts) {
const parameterAttributes = parameterPart.split(',');
if (parameterAttributes.length === 1) {
const parts = parameterAttributes[0].split('=');
if (parts.length !== 2) {
throw new OrgFormationError(`error reading parameter ${parameterAttributes[0]}. Expected either key=val or ParameterKey=key,ParameterVaue=val.`);
}
parameters[parts[0]] = parts[1];
} else {
const key = parameterAttributes.find((x) => x.startsWith('ParameterKey='));
const value = parameterAttributes.find((x) => x.startsWith('ParameterValue='));
if (key === undefined || value === undefined) {
throw new OrgFormationError(`error reading parameter ${parameterAttributes[0]}. Expected ParameterKey=key,ParameterVaue=val`);
}
const paramKey = key.substr(13);
const paramVal = value.substr(15);
parameters[paramKey] = paramVal;
}
}
}
return parameters;
}
private async customInitializationIncludingMFASupport(command: ICommandArgs) {
const profileName = command.profile ? command.profile : 'default';
const homeDir = require('os').homedir();
// todo: add support for windows?
if (!existsSync(homeDir + '/.aws/config')) {
return;
}
const awsconfig = readFileSync(homeDir + '/.aws/config').toString('utf8');
const contents = ini.parse(awsconfig);
const profile = contents['profile ' + profileName];
if (profile && profile.source_profile) {
const awssecrets = readFileSync(homeDir + '/.aws/credentials').toString('utf8');
const secrets = ini.parse(awssecrets);
const creds = secrets[profile.source_profile];
const sts = new STS({ credentials: { accessKeyId: creds.aws_access_key_id, secretAccessKey: creds.aws_secret_access_key } });
const token = await ConsoleUtil.Readline(`👋 Enter MFA code for ${profile.mfa_serial}`);
const assumeRoleReq: AssumeRoleRequest = {
RoleArn: profile.role_arn,
RoleSessionName: 'organization-build',
SerialNumber: profile.mfa_serial,
TokenCode: token,
};
try {
const tokens = await sts.assumeRole(assumeRoleReq).promise();
AWS.config.credentials = { accessKeyId: tokens.Credentials.AccessKeyId, secretAccessKey: tokens.Credentials.SecretAccessKey, sessionToken: tokens.Credentials.SessionToken };
} catch (err) {
throw new OrgFormationError(`unable to assume role, error: \n${err}`);
}
}
}
private async initialize(command: ICommandArgs) {
if (command.initialized) { return; }
try {
await this.customInitializationIncludingMFASupport(command);
} catch (err) {
if (err instanceof OrgFormationError) {
throw err;
}
ConsoleUtil.LogInfo(`custom initialization failed, not support for MFA token\n${err}`);
}
const options: SharedIniFileCredentialsOptions = {};
if (command.profile) {
options.profile = command.profile;
}
const credentials = new AWS.SharedIniFileCredentials(options);
if (credentials.accessKeyId) {
AWS.config.credentials = credentials;
}
command.initialized = true;
}
}
export interface ICommandArgs {
stateBucketName: string;
stateObject: string;
profile?: string;
state?: PersistedState;
initialized?: boolean;
} | const storageProvider = await this.getStateBucket(command); | random_line_split |
base-command.ts | import AWS, { Organizations, STS } from 'aws-sdk';
import { AssumeRoleRequest } from 'aws-sdk/clients/sts';
import { SharedIniFileCredentialsOptions } from 'aws-sdk/lib/credentials/shared_ini_file_credentials';
import { Command } from 'commander';
import { existsSync, readFileSync } from 'fs';
import * as ini from 'ini';
import { AwsOrganization } from '../aws-provider/aws-organization';
import { AwsOrganizationReader } from '../aws-provider/aws-organization-reader';
import { AwsOrganizationWriter } from '../aws-provider/aws-organization-writer';
import { AwsUtil } from '../aws-util';
import { ConsoleUtil } from '../console-util';
import { OrganizationBinder } from '../org-binder/org-binder';
import { TaskProvider } from '../org-binder/org-tasks-provider';
import { OrgFormationError } from '../org-formation-error';
import { TemplateRoot } from '../parser/parser';
import { PersistedState } from '../state/persisted-state';
import { S3StorageProvider } from '../state/storage-provider';
import { DefaultTemplate, DefaultTemplateWriter } from '../writer/default-template-writer';
export abstract class BaseCliCommand<T extends ICommandArgs> {
protected command: Command;
protected firstArg: any;
constructor(command?: Command, name?: string, description?: string, firstArgName?: string) |
public async generateDefaultTemplate(): Promise<DefaultTemplate> {
const organizations = new Organizations({ region: 'us-east-1' });
const awsReader = new AwsOrganizationReader(organizations);
const awsOrganization = new AwsOrganization(awsReader);
const writer = new DefaultTemplateWriter(awsOrganization);
const template = await writer.generateDefaultTemplate();
template.template = template.template.replace(/( *)-\n\1 {2}/g, '$1- ');
const parsedTemplate = TemplateRoot.createFromContents(template.template, './');
template.state.setPreviousTemplate(parsedTemplate.source);
return template;
}
public async getState(command: ICommandArgs): Promise<PersistedState> {
if (command.state) {
return command.state;
}
const storageProvider = await this.getStateBucket(command);
const accountId = await AwsUtil.GetMasterAccountId();
try {
const state = await PersistedState.Load(storageProvider, accountId);
command.state = state;
return state;
} catch (err) {
if (err && err.code === 'NoSuchBucket') {
throw new OrgFormationError(`unable to load previously committed state, reason: bucket '${storageProvider.bucketName}' does not exist in current account.`);
}
throw err;
}
}
public async invoke() {
try {
await this.initialize(this.command as any as ICommandArgs);
await this.performCommand(this.command as any as T);
} catch (err) {
if (err instanceof OrgFormationError) {
ConsoleUtil.LogError(err.message);
} else {
if (err.code && err.requestId) {
ConsoleUtil.LogError(`error: ${err.code}, aws-request-id: ${err.requestId}`);
ConsoleUtil.LogError(err.message);
} else {
ConsoleUtil.LogError(`unexpected error occurred...`, err);
}
}
process.exitCode = 1;
}
}
protected abstract async performCommand(command: T): Promise<void>;
protected addOptions(command: Command) {
command.option('--state-bucket-name [state-bucket-name]', 'bucket name that contains state file', 'organization-formation-${AWS::AccountId}');
command.option('--state-object [state-object]', 'key for object used to store state', 'state.json');
command.option('--profile [profile]', 'aws profile to use');
}
protected async getOrganizationBinder(template: TemplateRoot, state: PersistedState) {
const organizations = new Organizations({ region: 'us-east-1' });
const awsReader = new AwsOrganizationReader(organizations);
const awsOrganization = new AwsOrganization(awsReader);
await awsOrganization.initialize();
const awsWriter = new AwsOrganizationWriter(organizations, awsOrganization);
const taskProvider = new TaskProvider(template, state, awsWriter);
const binder = new OrganizationBinder(template, state, taskProvider);
return binder;
}
protected async createOrGetStateBucket(command: ICommandArgs, region: string): Promise<S3StorageProvider> {
const storageProvider = await this.getStateBucket(command);
try {
await storageProvider.create(region);
} catch (err) {
if (err && err.code === 'BucketAlreadyOwnedByYou') {
return storageProvider;
}
throw err;
}
return storageProvider;
}
protected async getStateBucket(command: ICommandArgs): Promise<S3StorageProvider> {
const objectKey = command.stateObject;
const stateBucketName = await this.GetStateBucketName(command);
const storageProvider = await S3StorageProvider.Create(stateBucketName, objectKey);
return storageProvider;
}
protected async GetStateBucketName(command: ICommandArgs): Promise<string> {
const bucketName = command.stateBucketName || 'organization-formation-${AWS::AccountId}';
if (bucketName.indexOf('${AWS::AccountId}') >= 0) {
const accountId = await AwsUtil.GetMasterAccountId();
return bucketName.replace('${AWS::AccountId}', accountId);
}
return bucketName;
}
protected parseStackParameters(commandParameters?: string | {}) {
if (commandParameters && typeof commandParameters === 'object') {
return commandParameters;
}
const parameters: Record<string, string> = {};
if (commandParameters && typeof commandParameters === 'string') {
const parameterParts = commandParameters.split(' ');
for (const parameterPart of parameterParts) {
const parameterAttributes = parameterPart.split(',');
if (parameterAttributes.length === 1) {
const parts = parameterAttributes[0].split('=');
if (parts.length !== 2) {
throw new OrgFormationError(`error reading parameter ${parameterAttributes[0]}. Expected either key=val or ParameterKey=key,ParameterVaue=val.`);
}
parameters[parts[0]] = parts[1];
} else {
const key = parameterAttributes.find((x) => x.startsWith('ParameterKey='));
const value = parameterAttributes.find((x) => x.startsWith('ParameterValue='));
if (key === undefined || value === undefined) {
throw new OrgFormationError(`error reading parameter ${parameterAttributes[0]}. Expected ParameterKey=key,ParameterVaue=val`);
}
const paramKey = key.substr(13);
const paramVal = value.substr(15);
parameters[paramKey] = paramVal;
}
}
}
return parameters;
}
private async customInitializationIncludingMFASupport(command: ICommandArgs) {
const profileName = command.profile ? command.profile : 'default';
const homeDir = require('os').homedir();
// todo: add support for windows?
if (!existsSync(homeDir + '/.aws/config')) {
return;
}
const awsconfig = readFileSync(homeDir + '/.aws/config').toString('utf8');
const contents = ini.parse(awsconfig);
const profile = contents['profile ' + profileName];
if (profile && profile.source_profile) {
const awssecrets = readFileSync(homeDir + '/.aws/credentials').toString('utf8');
const secrets = ini.parse(awssecrets);
const creds = secrets[profile.source_profile];
const sts = new STS({ credentials: { accessKeyId: creds.aws_access_key_id, secretAccessKey: creds.aws_secret_access_key } });
const token = await ConsoleUtil.Readline(`👋 Enter MFA code for ${profile.mfa_serial}`);
const assumeRoleReq: AssumeRoleRequest = {
RoleArn: profile.role_arn,
RoleSessionName: 'organization-build',
SerialNumber: profile.mfa_serial,
TokenCode: token,
};
try {
const tokens = await sts.assumeRole(assumeRoleReq).promise();
AWS.config.credentials = { accessKeyId: tokens.Credentials.AccessKeyId, secretAccessKey: tokens.Credentials.SecretAccessKey, sessionToken: tokens.Credentials.SessionToken };
} catch (err) {
throw new OrgFormationError(`unable to assume role, error: \n${err}`);
}
}
}
private async initialize(command: ICommandArgs) {
if (command.initialized) { return; }
try {
await this.customInitializationIncludingMFASupport(command);
} catch (err) {
if (err instanceof OrgFormationError) {
throw err;
}
ConsoleUtil.LogInfo(`custom initialization failed, not support for MFA token\n${err}`);
}
const options: SharedIniFileCredentialsOptions = {};
if (command.profile) {
options.profile = command.profile;
}
const credentials = new AWS.SharedIniFileCredentials(options);
if (credentials.accessKeyId) {
AWS.config.credentials = credentials;
}
command.initialized = true;
}
}
export interface ICommandArgs {
stateBucketName: string;
stateObject: string;
profile?: string;
state?: PersistedState;
initialized?: boolean;
}
| {
if (command !== undefined && name !== undefined) {
this.command = command.command(name);
if (description !== undefined) {
this.command.description(description);
}
this.command.allowUnknownOption(false);
this.addOptions(this.command);
this.command.action(async (firstArg: string) => {
if (firstArgName && (typeof firstArg !== 'object')) {
this.command[firstArgName] = firstArg;
}
this.invoke();
});
}
} | identifier_body |
base-command.ts | import AWS, { Organizations, STS } from 'aws-sdk';
import { AssumeRoleRequest } from 'aws-sdk/clients/sts';
import { SharedIniFileCredentialsOptions } from 'aws-sdk/lib/credentials/shared_ini_file_credentials';
import { Command } from 'commander';
import { existsSync, readFileSync } from 'fs';
import * as ini from 'ini';
import { AwsOrganization } from '../aws-provider/aws-organization';
import { AwsOrganizationReader } from '../aws-provider/aws-organization-reader';
import { AwsOrganizationWriter } from '../aws-provider/aws-organization-writer';
import { AwsUtil } from '../aws-util';
import { ConsoleUtil } from '../console-util';
import { OrganizationBinder } from '../org-binder/org-binder';
import { TaskProvider } from '../org-binder/org-tasks-provider';
import { OrgFormationError } from '../org-formation-error';
import { TemplateRoot } from '../parser/parser';
import { PersistedState } from '../state/persisted-state';
import { S3StorageProvider } from '../state/storage-provider';
import { DefaultTemplate, DefaultTemplateWriter } from '../writer/default-template-writer';
export abstract class BaseCliCommand<T extends ICommandArgs> {
protected command: Command;
protected firstArg: any;
constructor(command?: Command, name?: string, description?: string, firstArgName?: string) {
if (command !== undefined && name !== undefined) {
this.command = command.command(name);
if (description !== undefined) {
this.command.description(description);
}
this.command.allowUnknownOption(false);
this.addOptions(this.command);
this.command.action(async (firstArg: string) => {
if (firstArgName && (typeof firstArg !== 'object')) {
this.command[firstArgName] = firstArg;
}
this.invoke();
});
}
}
public async generateDefaultTemplate(): Promise<DefaultTemplate> {
const organizations = new Organizations({ region: 'us-east-1' });
const awsReader = new AwsOrganizationReader(organizations);
const awsOrganization = new AwsOrganization(awsReader);
const writer = new DefaultTemplateWriter(awsOrganization);
const template = await writer.generateDefaultTemplate();
template.template = template.template.replace(/( *)-\n\1 {2}/g, '$1- ');
const parsedTemplate = TemplateRoot.createFromContents(template.template, './');
template.state.setPreviousTemplate(parsedTemplate.source);
return template;
}
public async getState(command: ICommandArgs): Promise<PersistedState> {
if (command.state) {
return command.state;
}
const storageProvider = await this.getStateBucket(command);
const accountId = await AwsUtil.GetMasterAccountId();
try {
const state = await PersistedState.Load(storageProvider, accountId);
command.state = state;
return state;
} catch (err) {
if (err && err.code === 'NoSuchBucket') {
throw new OrgFormationError(`unable to load previously committed state, reason: bucket '${storageProvider.bucketName}' does not exist in current account.`);
}
throw err;
}
}
public async invoke() {
try {
await this.initialize(this.command as any as ICommandArgs);
await this.performCommand(this.command as any as T);
} catch (err) {
if (err instanceof OrgFormationError) {
ConsoleUtil.LogError(err.message);
} else {
if (err.code && err.requestId) {
ConsoleUtil.LogError(`error: ${err.code}, aws-request-id: ${err.requestId}`);
ConsoleUtil.LogError(err.message);
} else {
ConsoleUtil.LogError(`unexpected error occurred...`, err);
}
}
process.exitCode = 1;
}
}
protected abstract async performCommand(command: T): Promise<void>;
protected addOptions(command: Command) {
command.option('--state-bucket-name [state-bucket-name]', 'bucket name that contains state file', 'organization-formation-${AWS::AccountId}');
command.option('--state-object [state-object]', 'key for object used to store state', 'state.json');
command.option('--profile [profile]', 'aws profile to use');
}
protected async getOrganizationBinder(template: TemplateRoot, state: PersistedState) {
const organizations = new Organizations({ region: 'us-east-1' });
const awsReader = new AwsOrganizationReader(organizations);
const awsOrganization = new AwsOrganization(awsReader);
await awsOrganization.initialize();
const awsWriter = new AwsOrganizationWriter(organizations, awsOrganization);
const taskProvider = new TaskProvider(template, state, awsWriter);
const binder = new OrganizationBinder(template, state, taskProvider);
return binder;
}
protected async createOrGetStateBucket(command: ICommandArgs, region: string): Promise<S3StorageProvider> {
const storageProvider = await this.getStateBucket(command);
try {
await storageProvider.create(region);
} catch (err) {
if (err && err.code === 'BucketAlreadyOwnedByYou') {
return storageProvider;
}
throw err;
}
return storageProvider;
}
protected async getStateBucket(command: ICommandArgs): Promise<S3StorageProvider> {
const objectKey = command.stateObject;
const stateBucketName = await this.GetStateBucketName(command);
const storageProvider = await S3StorageProvider.Create(stateBucketName, objectKey);
return storageProvider;
}
protected async GetStateBucketName(command: ICommandArgs): Promise<string> {
const bucketName = command.stateBucketName || 'organization-formation-${AWS::AccountId}';
if (bucketName.indexOf('${AWS::AccountId}') >= 0) {
const accountId = await AwsUtil.GetMasterAccountId();
return bucketName.replace('${AWS::AccountId}', accountId);
}
return bucketName;
}
protected parseStackParameters(commandParameters?: string | {}) {
if (commandParameters && typeof commandParameters === 'object') {
return commandParameters;
}
const parameters: Record<string, string> = {};
if (commandParameters && typeof commandParameters === 'string') {
const parameterParts = commandParameters.split(' ');
for (const parameterPart of parameterParts) {
const parameterAttributes = parameterPart.split(',');
if (parameterAttributes.length === 1) {
const parts = parameterAttributes[0].split('=');
if (parts.length !== 2) {
throw new OrgFormationError(`error reading parameter ${parameterAttributes[0]}. Expected either key=val or ParameterKey=key,ParameterVaue=val.`);
}
parameters[parts[0]] = parts[1];
} else |
}
}
return parameters;
}
private async customInitializationIncludingMFASupport(command: ICommandArgs) {
const profileName = command.profile ? command.profile : 'default';
const homeDir = require('os').homedir();
// todo: add support for windows?
if (!existsSync(homeDir + '/.aws/config')) {
return;
}
const awsconfig = readFileSync(homeDir + '/.aws/config').toString('utf8');
const contents = ini.parse(awsconfig);
const profile = contents['profile ' + profileName];
if (profile && profile.source_profile) {
const awssecrets = readFileSync(homeDir + '/.aws/credentials').toString('utf8');
const secrets = ini.parse(awssecrets);
const creds = secrets[profile.source_profile];
const sts = new STS({ credentials: { accessKeyId: creds.aws_access_key_id, secretAccessKey: creds.aws_secret_access_key } });
const token = await ConsoleUtil.Readline(`👋 Enter MFA code for ${profile.mfa_serial}`);
const assumeRoleReq: AssumeRoleRequest = {
RoleArn: profile.role_arn,
RoleSessionName: 'organization-build',
SerialNumber: profile.mfa_serial,
TokenCode: token,
};
try {
const tokens = await sts.assumeRole(assumeRoleReq).promise();
AWS.config.credentials = { accessKeyId: tokens.Credentials.AccessKeyId, secretAccessKey: tokens.Credentials.SecretAccessKey, sessionToken: tokens.Credentials.SessionToken };
} catch (err) {
throw new OrgFormationError(`unable to assume role, error: \n${err}`);
}
}
}
private async initialize(command: ICommandArgs) {
if (command.initialized) { return; }
try {
await this.customInitializationIncludingMFASupport(command);
} catch (err) {
if (err instanceof OrgFormationError) {
throw err;
}
ConsoleUtil.LogInfo(`custom initialization failed, not support for MFA token\n${err}`);
}
const options: SharedIniFileCredentialsOptions = {};
if (command.profile) {
options.profile = command.profile;
}
const credentials = new AWS.SharedIniFileCredentials(options);
if (credentials.accessKeyId) {
AWS.config.credentials = credentials;
}
command.initialized = true;
}
}
export interface ICommandArgs {
stateBucketName: string;
stateObject: string;
profile?: string;
state?: PersistedState;
initialized?: boolean;
}
| {
const key = parameterAttributes.find((x) => x.startsWith('ParameterKey='));
const value = parameterAttributes.find((x) => x.startsWith('ParameterValue='));
if (key === undefined || value === undefined) {
throw new OrgFormationError(`error reading parameter ${parameterAttributes[0]}. Expected ParameterKey=key,ParameterVaue=val`);
}
const paramKey = key.substr(13);
const paramVal = value.substr(15);
parameters[paramKey] = paramVal;
} | conditional_block |
base-command.ts | import AWS, { Organizations, STS } from 'aws-sdk';
import { AssumeRoleRequest } from 'aws-sdk/clients/sts';
import { SharedIniFileCredentialsOptions } from 'aws-sdk/lib/credentials/shared_ini_file_credentials';
import { Command } from 'commander';
import { existsSync, readFileSync } from 'fs';
import * as ini from 'ini';
import { AwsOrganization } from '../aws-provider/aws-organization';
import { AwsOrganizationReader } from '../aws-provider/aws-organization-reader';
import { AwsOrganizationWriter } from '../aws-provider/aws-organization-writer';
import { AwsUtil } from '../aws-util';
import { ConsoleUtil } from '../console-util';
import { OrganizationBinder } from '../org-binder/org-binder';
import { TaskProvider } from '../org-binder/org-tasks-provider';
import { OrgFormationError } from '../org-formation-error';
import { TemplateRoot } from '../parser/parser';
import { PersistedState } from '../state/persisted-state';
import { S3StorageProvider } from '../state/storage-provider';
import { DefaultTemplate, DefaultTemplateWriter } from '../writer/default-template-writer';
export abstract class BaseCliCommand<T extends ICommandArgs> {
protected command: Command;
protected firstArg: any;
| (command?: Command, name?: string, description?: string, firstArgName?: string) {
if (command !== undefined && name !== undefined) {
this.command = command.command(name);
if (description !== undefined) {
this.command.description(description);
}
this.command.allowUnknownOption(false);
this.addOptions(this.command);
this.command.action(async (firstArg: string) => {
if (firstArgName && (typeof firstArg !== 'object')) {
this.command[firstArgName] = firstArg;
}
this.invoke();
});
}
}
public async generateDefaultTemplate(): Promise<DefaultTemplate> {
const organizations = new Organizations({ region: 'us-east-1' });
const awsReader = new AwsOrganizationReader(organizations);
const awsOrganization = new AwsOrganization(awsReader);
const writer = new DefaultTemplateWriter(awsOrganization);
const template = await writer.generateDefaultTemplate();
template.template = template.template.replace(/( *)-\n\1 {2}/g, '$1- ');
const parsedTemplate = TemplateRoot.createFromContents(template.template, './');
template.state.setPreviousTemplate(parsedTemplate.source);
return template;
}
public async getState(command: ICommandArgs): Promise<PersistedState> {
if (command.state) {
return command.state;
}
const storageProvider = await this.getStateBucket(command);
const accountId = await AwsUtil.GetMasterAccountId();
try {
const state = await PersistedState.Load(storageProvider, accountId);
command.state = state;
return state;
} catch (err) {
if (err && err.code === 'NoSuchBucket') {
throw new OrgFormationError(`unable to load previously committed state, reason: bucket '${storageProvider.bucketName}' does not exist in current account.`);
}
throw err;
}
}
public async invoke() {
try {
await this.initialize(this.command as any as ICommandArgs);
await this.performCommand(this.command as any as T);
} catch (err) {
if (err instanceof OrgFormationError) {
ConsoleUtil.LogError(err.message);
} else {
if (err.code && err.requestId) {
ConsoleUtil.LogError(`error: ${err.code}, aws-request-id: ${err.requestId}`);
ConsoleUtil.LogError(err.message);
} else {
ConsoleUtil.LogError(`unexpected error occurred...`, err);
}
}
process.exitCode = 1;
}
}
protected abstract async performCommand(command: T): Promise<void>;
protected addOptions(command: Command) {
command.option('--state-bucket-name [state-bucket-name]', 'bucket name that contains state file', 'organization-formation-${AWS::AccountId}');
command.option('--state-object [state-object]', 'key for object used to store state', 'state.json');
command.option('--profile [profile]', 'aws profile to use');
}
protected async getOrganizationBinder(template: TemplateRoot, state: PersistedState) {
const organizations = new Organizations({ region: 'us-east-1' });
const awsReader = new AwsOrganizationReader(organizations);
const awsOrganization = new AwsOrganization(awsReader);
await awsOrganization.initialize();
const awsWriter = new AwsOrganizationWriter(organizations, awsOrganization);
const taskProvider = new TaskProvider(template, state, awsWriter);
const binder = new OrganizationBinder(template, state, taskProvider);
return binder;
}
protected async createOrGetStateBucket(command: ICommandArgs, region: string): Promise<S3StorageProvider> {
const storageProvider = await this.getStateBucket(command);
try {
await storageProvider.create(region);
} catch (err) {
if (err && err.code === 'BucketAlreadyOwnedByYou') {
return storageProvider;
}
throw err;
}
return storageProvider;
}
protected async getStateBucket(command: ICommandArgs): Promise<S3StorageProvider> {
const objectKey = command.stateObject;
const stateBucketName = await this.GetStateBucketName(command);
const storageProvider = await S3StorageProvider.Create(stateBucketName, objectKey);
return storageProvider;
}
protected async GetStateBucketName(command: ICommandArgs): Promise<string> {
const bucketName = command.stateBucketName || 'organization-formation-${AWS::AccountId}';
if (bucketName.indexOf('${AWS::AccountId}') >= 0) {
const accountId = await AwsUtil.GetMasterAccountId();
return bucketName.replace('${AWS::AccountId}', accountId);
}
return bucketName;
}
protected parseStackParameters(commandParameters?: string | {}) {
if (commandParameters && typeof commandParameters === 'object') {
return commandParameters;
}
const parameters: Record<string, string> = {};
if (commandParameters && typeof commandParameters === 'string') {
const parameterParts = commandParameters.split(' ');
for (const parameterPart of parameterParts) {
const parameterAttributes = parameterPart.split(',');
if (parameterAttributes.length === 1) {
const parts = parameterAttributes[0].split('=');
if (parts.length !== 2) {
throw new OrgFormationError(`error reading parameter ${parameterAttributes[0]}. Expected either key=val or ParameterKey=key,ParameterVaue=val.`);
}
parameters[parts[0]] = parts[1];
} else {
const key = parameterAttributes.find((x) => x.startsWith('ParameterKey='));
const value = parameterAttributes.find((x) => x.startsWith('ParameterValue='));
if (key === undefined || value === undefined) {
throw new OrgFormationError(`error reading parameter ${parameterAttributes[0]}. Expected ParameterKey=key,ParameterVaue=val`);
}
const paramKey = key.substr(13);
const paramVal = value.substr(15);
parameters[paramKey] = paramVal;
}
}
}
return parameters;
}
private async customInitializationIncludingMFASupport(command: ICommandArgs) {
const profileName = command.profile ? command.profile : 'default';
const homeDir = require('os').homedir();
// todo: add support for windows?
if (!existsSync(homeDir + '/.aws/config')) {
return;
}
const awsconfig = readFileSync(homeDir + '/.aws/config').toString('utf8');
const contents = ini.parse(awsconfig);
const profile = contents['profile ' + profileName];
if (profile && profile.source_profile) {
const awssecrets = readFileSync(homeDir + '/.aws/credentials').toString('utf8');
const secrets = ini.parse(awssecrets);
const creds = secrets[profile.source_profile];
const sts = new STS({ credentials: { accessKeyId: creds.aws_access_key_id, secretAccessKey: creds.aws_secret_access_key } });
const token = await ConsoleUtil.Readline(`👋 Enter MFA code for ${profile.mfa_serial}`);
const assumeRoleReq: AssumeRoleRequest = {
RoleArn: profile.role_arn,
RoleSessionName: 'organization-build',
SerialNumber: profile.mfa_serial,
TokenCode: token,
};
try {
const tokens = await sts.assumeRole(assumeRoleReq).promise();
AWS.config.credentials = { accessKeyId: tokens.Credentials.AccessKeyId, secretAccessKey: tokens.Credentials.SecretAccessKey, sessionToken: tokens.Credentials.SessionToken };
} catch (err) {
throw new OrgFormationError(`unable to assume role, error: \n${err}`);
}
}
}
private async initialize(command: ICommandArgs) {
if (command.initialized) { return; }
try {
await this.customInitializationIncludingMFASupport(command);
} catch (err) {
if (err instanceof OrgFormationError) {
throw err;
}
ConsoleUtil.LogInfo(`custom initialization failed, not support for MFA token\n${err}`);
}
const options: SharedIniFileCredentialsOptions = {};
if (command.profile) {
options.profile = command.profile;
}
const credentials = new AWS.SharedIniFileCredentials(options);
if (credentials.accessKeyId) {
AWS.config.credentials = credentials;
}
command.initialized = true;
}
}
export interface ICommandArgs {
stateBucketName: string;
stateObject: string;
profile?: string;
state?: PersistedState;
initialized?: boolean;
}
| constructor | identifier_name |
converter.go | // Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package stanza
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"math"
"runtime"
"sync"
"time"
"github.com/open-telemetry/opentelemetry-log-collection/entry"
"go.opentelemetry.io/collector/consumer/pdata"
"go.uber.org/zap"
)
const (
// DefaultFlushInterval is the default flush interval.
DefaultFlushInterval = 100 * time.Millisecond
// DefaultMaxFlushCount is the default max flush count.
DefaultMaxFlushCount = 100
)
// Converter converts entry.Entry into pdata.Logs aggregating translated
// entries into logs coming from the same Resource.
// Logs are being sent out based on the flush interval and/or the maximum
// batch size.
//
// The diagram below illustrates the internal communication inside the Converter:
//
// ┌─────────────────────────────────┐
// │ Batch() │
// ┌─────────┤ Ingests log entries and sends │
// │ │ them onto a workerChan │
// │ └─────────────────────────────────┘
// │
// │ ┌───────────────────────────────────────────────────┐
// ├─► workerLoop() │
// │ │ ┌─────────────────────────────────────────────────┴─┐
// ├─┼─► workerLoop() │
// │ │ │ ┌─────────────────────────────────────────────────┴─┐
// └─┼─┼─► workerLoop() │
// └─┤ │ consumes sent log entries from workerChan, │
// │ │ translates received entries to pdata.LogRecords,│
// └─┤ marshalls them to JSON and send them onto │
// │ batchChan │
// └─────────────────────────┬─────────────────────────┘
// │
// ▼
// ┌─────────────────────────────────────────────────────┐
// │ batchLoop() │
// │ consumes from batchChan, aggregates log records │
// │ by marshaled Resource and based on flush interval │
// │ and maxFlushCount decides whether to send the │
// │ aggregated buffer to flushChan │
// └───────────────────────────┬─────────────────────────┘
// │
// ▼
// ┌─────────────────────────────────────────────────────┐
// │ flushLoop() │
// │ receives log records from flushChan and sends │
// │ them onto pLogsChan which is consumed by │
// │ downstream consumers via OutChannel() │
// └─────────────────────────────────────────────────────┘
//
type Converter struct {
// pLogsChan is a channel on which batched logs will be sent to.
pLogsChan chan pdata.Logs
stopOnce sync.Once
stopChan chan struct{}
// workerChan is an internal communication channel that gets the log
// entries from Batch() calls and it receives the data in workerLoop().
workerChan chan *entry.Entry
// workerCount configures the amount of workers started.
workerCount int
// batchChan obtains log entries converted by the pool of workers,
// in a form of logRecords grouped by Resource and then after aggregating
// them decides based on maxFlushCount if the flush should be triggered.
// If also serves the ticker flushes configured by flushInterval.
batchChan chan *workerItem
// flushInterval defines how often we flush the aggregated log entries.
flushInterval time.Duration
// maxFlushCount defines what's the amount of entries in the buffer that
// will trigger a flush of log entries.
maxFlushCount uint
// flushChan is an internal channel used for transporting batched pdata.Logs.
flushChan chan pdata.Logs
// data holds currently converted and aggregated log entries, grouped by Resource.
data map[string]pdata.Logs
// logRecordCount holds the number of translated and accumulated log Records
// and is compared against maxFlushCount to make a decision whether to flush.
logRecordCount uint
// wg is a WaitGroup that makes sure that we wait for spun up goroutines exit
// when Stop() is called.
wg sync.WaitGroup
logger *zap.Logger
}
type ConverterOption interface {
apply(*Converter)
}
type optionFunc func(*Converter)
func (f optionFunc) apply(c *Converter) {
f(c)
}
func WithFlushInterval(interval time.Duration) ConverterOption {
return optionFunc(func(c *Converter) {
c.flushInterval = interval
})
}
func WithMaxFlushCount(count uint) ConverterOption {
return optionFunc(func(c *Converter) {
c.maxFlushCount = count
})
}
func WithLogger(logger *zap.Logger) ConverterOption {
return optionFunc(func(c *Converter) {
c.logger = logger
})
}
func WithWorkerCount(workerCount int) ConverterOption {
return optionFunc(func(c *Converter) {
c.workerCount = workerCount
})
}
func NewConverter(opts ...ConverterOption) *Converter {
c := &Converter{
workerChan: make(chan *entry.Entry),
workerCount: int(math.Max(1, float64(runtime.NumCPU()/4))),
batchChan: make(chan *workerItem),
data: make(map[string]pdata.Logs),
pLogsChan: make(chan pdata.Logs),
stopChan: make(chan struct{}),
logger: zap.NewNop(),
flushChan: make(chan pdata.Logs),
flushInterval: DefaultFlushInterval,
maxFlushCount: DefaultMaxFlushCount,
}
for _, opt := range opts {
opt.apply(c)
}
return c
}
func (c *Converter) Start() {
c.logger.Debug("Starting log converter", zap.Int("worker_count", c.workerCount))
for i := 0; i < c.workerCount; i++ {
c.wg.Add(1)
go c.workerLoop()
}
c.wg.Add(1)
go c.batchLoop()
c.wg.Add(1)
go c.flushLoop()
}
func (c *Converter) Stop() {
c.stopOnce.Do(func() {
close(c.stopChan)
c.wg.Wait()
close(c.pLogsChan)
})
}
// OutChannel returns the channel on which converted entries will be sent to.
func (c *Converter) OutChannel() <-chan pdata.Logs {
return c.pLogsChan
}
type workerItem struct {
Resource map[string]string
LogRecord pdata.LogRecord
ResourceString string
}
// workerLoop is responsible for obtaining log entries from Batch() calls,
// converting them to pdata.LogRecords and sending them together with the
// associated Resource through the batchChan for aggregation.
func (c *Converter) workerLoop() {
defer c.wg.Done()
var (
buff = bytes.Buffer{}
encoder = json.NewEncoder(&buff)
)
for {
select {
case <-c.stopChan:
return
case e, ok := <-c.workerChan:
if !ok {
return
}
buff.Reset()
lr := convert(e)
if err := encoder.Encode(e.Resource); err != nil {
c.logger.Debug("Failed marshaling entry.Resource to JSON",
zap.Any("resource", e.Resource),
)
continue
}
select {
case c.batchChan <- &workerItem{
Resource: e.Resource,
ResourceString: buff.String(),
LogRecord: lr,
}:
case <-c.stopChan:
}
}
}
}
// batchLoop is responsible for receiving the converted log entries and aggregating
// them by Resource.
// Whenever maxFlushCount is reached or the ticker ticks a flush is triggered.
func (c *Converter) batchLoop() {
defer c.wg.Done()
ticker := time.NewTicker(c.flushInterval)
defer ticker.Stop()
for {
select {
case wi, ok := <-c.batchChan:
if !ok {
return
}
pLogs, ok := c.data[wi.ResourceString]
if ok {
pLogs.ResourceLogs().
At(0).InstrumentationLibraryLogs().
At(0).Logs().Append(wi.LogRecord)
} else {
pLogs = pdata.NewLogs()
logs := pLogs.ResourceLogs()
logs.Resize(1)
rls := logs.At(0)
resource := rls.Resource()
resourceAtts := resource.Attributes()
resourceAtts.EnsureCapacity(len(wi.Resource))
for k, v := range wi.Resource {
resourceAtts.InsertString(k, v)
}
ills := rls.InstrumentationLibraryLogs()
ills.Resize(1)
ills.At(0).Logs().Append(wi.LogRecord)
}
c.data[wi.ResourceString] = pLogs
c.logRecordCount++
if c.logRecordCount >= c.maxFlushCount {
for r, pLogs := range c.data {
c.flushChan <- pLogs
delete(c.data, r)
}
c.logRecordCount = 0
}
case <-ticker.C:
for r, pLogs := range c.data {
c.flushChan <- pLogs
delete(c.data, r)
}
c.logRecordCount = 0
case <-c.stopChan:
return
}
}
}
func (c *Converter) flushLoop() {
defer c.wg.Done()
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
for {
select {
case <-c.stopChan:
return
case pLogs := <-c.flushChan:
if err := c.flush(ctx, pLogs); err != nil {
c.logger.Debug("Problem sending log entries",
zap.Error(err),
)
}
}
}
}
// flush flushes provided pdata.Logs entries onto a channel.
func (c *Converter) flush(ctx context.Context, pLogs pdata.Logs) error {
doneChan := ctx.Done()
select {
case <-doneChan:
return fmt.Errorf("flushing log entries interrupted, err: %w", ctx.Err())
case c.pLogsChan <- pLogs:
// The converter has been stopped so bail the flush.
case <-c.stopChan:
return errors.New("logs converter has been stopped")
}
return nil
}
// Batch takes in an entry.Entry and sends it to an available worker for processing.
func (c *Converter) Batch(e *entry.Entry) error {
select {
case c.workerChan <- e:
return nil
case <-c.stopChan:
return errors.New("logs converter has been stopped")
}
}
// convert converts one entry.Entry into pdata.LogRecord allocating it.
func convert(ent *entry.Entry) pdata.LogRecord {
dest := pdata.NewLogRecord()
convertInto(ent, dest)
return dest
}
// Convert converts one entry.Entry into pdata.Logs.
// To be used in a stateless setting like tests where ease of use is more
// important than performance or throughput.
func Convert(ent *entry.Entry) pdata.Logs {
pLogs := pdata.NewLogs()
logs := pLogs.ResourceLogs()
rls := logs.AppendEmpty()
resource := rls.Resource()
resourceAtts := resource.Attributes()
resourceAtts.EnsureCapacity(len(ent.Resource))
for k, v := range ent.Resource {
resourceAtts.InsertString(k, v)
}
ills := rls.InstrumentationLibraryLogs().AppendEmpty()
lr := ills.Logs().AppendEmpty()
convertInto(ent, lr)
return pLogs
}
// convertInto converts entry.Entry into provided pdata.LogRecord.
func convertInto(ent *entry.Entry, dest pdata.LogRecord) {
dest.SetTimestamp(pdata.TimestampFromTime(ent.Timestamp))
sevText, sevNum := convertSeverity(ent.Severity)
dest.SetSeverityText(sevText)
dest.SetSeverityNumber(sevNum)
if l := len(ent.Attributes); l > 0 {
attributes := dest.Attributes()
attributes.EnsureCapacity(l)
for k, v := range ent.Attributes {
attributes.InsertString(k, v)
}
}
insertToAttributeVal(ent.Body, dest.Body())
if ent.TraceId != nil {
var buffer [16]byte
copy(buffer[0:16], ent.TraceId)
dest.SetTraceID(pdata.NewTraceID(buffer))
}
if ent.SpanId != nil {
var buffer [8]byte
copy(buffer[0:8], ent.SpanId)
dest.SetSpanID(pdata.NewSp | value.(type) {
case bool:
dest.SetBoolVal(t)
case string:
dest.SetStringVal(t)
case []byte:
dest.SetStringVal(string(t))
case int64:
dest.SetIntVal(t)
case int32:
dest.SetIntVal(int64(t))
case int16:
dest.SetIntVal(int64(t))
case int8:
dest.SetIntVal(int64(t))
case int:
dest.SetIntVal(int64(t))
case uint64:
dest.SetIntVal(int64(t))
case uint32:
dest.SetIntVal(int64(t))
case uint16:
dest.SetIntVal(int64(t))
case uint8:
dest.SetIntVal(int64(t))
case uint:
dest.SetIntVal(int64(t))
case float64:
dest.SetDoubleVal(t)
case float32:
dest.SetDoubleVal(float64(t))
case map[string]interface{}:
toAttributeMap(t).CopyTo(dest)
case []interface{}:
toAttributeArray(t).CopyTo(dest)
default:
dest.SetStringVal(fmt.Sprintf("%v", t))
}
}
func toAttributeMap(obsMap map[string]interface{}) pdata.AttributeValue {
attVal := pdata.NewAttributeValueMap()
attMap := attVal.MapVal()
attMap.EnsureCapacity(len(obsMap))
for k, v := range obsMap {
switch t := v.(type) {
case bool:
attMap.InsertBool(k, t)
case string:
attMap.InsertString(k, t)
case []byte:
attMap.InsertString(k, string(t))
case int64:
attMap.InsertInt(k, t)
case int32:
attMap.InsertInt(k, int64(t))
case int16:
attMap.InsertInt(k, int64(t))
case int8:
attMap.InsertInt(k, int64(t))
case int:
attMap.InsertInt(k, int64(t))
case uint64:
attMap.InsertInt(k, int64(t))
case uint32:
attMap.InsertInt(k, int64(t))
case uint16:
attMap.InsertInt(k, int64(t))
case uint8:
attMap.InsertInt(k, int64(t))
case uint:
attMap.InsertInt(k, int64(t))
case float64:
attMap.InsertDouble(k, t)
case float32:
attMap.InsertDouble(k, float64(t))
case map[string]interface{}:
subMap := toAttributeMap(t)
attMap.Insert(k, subMap)
case []interface{}:
arr := toAttributeArray(t)
attMap.Insert(k, arr)
default:
attMap.InsertString(k, fmt.Sprintf("%v", t))
}
}
return attVal
}
func toAttributeArray(obsArr []interface{}) pdata.AttributeValue {
arrVal := pdata.NewAttributeValueArray()
arr := arrVal.ArrayVal()
arr.Resize(len(obsArr))
for i, v := range obsArr {
insertToAttributeVal(v, arr.At(i))
}
return arrVal
}
func convertSeverity(s entry.Severity) (string, pdata.SeverityNumber) {
switch {
// Handle standard severity levels
case s == entry.Catastrophe:
return "Fatal", pdata.SeverityNumberFATAL4
case s == entry.Emergency:
return "Error", pdata.SeverityNumberFATAL
case s == entry.Alert:
return "Error", pdata.SeverityNumberERROR3
case s == entry.Critical:
return "Error", pdata.SeverityNumberERROR2
case s == entry.Error:
return "Error", pdata.SeverityNumberERROR
case s == entry.Warning:
return "Info", pdata.SeverityNumberINFO4
case s == entry.Notice:
return "Info", pdata.SeverityNumberINFO3
case s == entry.Info:
return "Info", pdata.SeverityNumberINFO
case s == entry.Debug:
return "Debug", pdata.SeverityNumberDEBUG
case s == entry.Trace:
return "Trace", pdata.SeverityNumberTRACE2
// Handle custom severity levels
case s > entry.Emergency:
return "Fatal", pdata.SeverityNumberFATAL2
case s > entry.Alert:
return "Error", pdata.SeverityNumberERROR4
case s > entry.Critical:
return "Error", pdata.SeverityNumberERROR3
case s > entry.Error:
return "Error", pdata.SeverityNumberERROR2
case s > entry.Warning:
return "Info", pdata.SeverityNumberINFO4
case s > entry.Notice:
return "Info", pdata.SeverityNumberINFO3
case s > entry.Info:
return "Info", pdata.SeverityNumberINFO2
case s > entry.Debug:
return "Debug", pdata.SeverityNumberDEBUG2
case s > entry.Trace:
return "Trace", pdata.SeverityNumberTRACE3
case s > entry.Default:
return "Trace", pdata.SeverityNumberTRACE
default:
return "Undefined", pdata.SeverityNumberUNDEFINED
}
}
| anID(buffer))
}
if ent.TraceFlags != nil {
// The 8 least significant bits are the trace flags as defined in W3C Trace
// Context specification. Don't override the 24 reserved bits.
flags := dest.Flags()
flags = flags & 0xFFFFFF00
flags = flags | uint32(ent.TraceFlags[0])
dest.SetFlags(flags)
}
}
func insertToAttributeVal(value interface{}, dest pdata.AttributeValue) {
switch t := | identifier_body |
converter.go | // Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package stanza
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"math"
"runtime"
"sync"
"time"
"github.com/open-telemetry/opentelemetry-log-collection/entry"
"go.opentelemetry.io/collector/consumer/pdata"
"go.uber.org/zap"
)
const (
// DefaultFlushInterval is the default flush interval.
DefaultFlushInterval = 100 * time.Millisecond
// DefaultMaxFlushCount is the default max flush count.
DefaultMaxFlushCount = 100
)
// Converter converts entry.Entry into pdata.Logs aggregating translated
// entries into logs coming from the same Resource.
// Logs are being sent out based on the flush interval and/or the maximum
// batch size.
//
// The diagram below illustrates the internal communication inside the Converter:
//
// ┌─────────────────────────────────┐
// │ Batch() │
// ┌─────────┤ Ingests log entries and sends │
// │ │ them onto a workerChan │
// │ └─────────────────────────────────┘
// │
// │ ┌───────────────────────────────────────────────────┐
// ├─► workerLoop() │
// │ │ ┌─────────────────────────────────────────────────┴─┐
// ├─┼─► workerLoop() │
// │ │ │ ┌─────────────────────────────────────────────────┴─┐
// └─┼─┼─► workerLoop() │
// └─┤ │ consumes sent log entries from workerChan, │
// │ │ translates received entries to pdata.LogRecords,│
// └─┤ marshalls them to JSON and send them onto │
// │ batchChan │
// └─────────────────────────┬─────────────────────────┘
// │
// ▼
// ┌─────────────────────────────────────────────────────┐
// │ batchLoop() │
// │ consumes from batchChan, aggregates log records │ | // │ by marshaled Resource and based on flush interval │
// │ and maxFlushCount decides whether to send the │
// │ aggregated buffer to flushChan │
// └───────────────────────────┬─────────────────────────┘
// │
// ▼
// ┌─────────────────────────────────────────────────────┐
// │ flushLoop() │
// │ receives log records from flushChan and sends │
// │ them onto pLogsChan which is consumed by │
// │ downstream consumers via OutChannel() │
// └─────────────────────────────────────────────────────┘
//
type Converter struct {
// pLogsChan is a channel on which batched logs will be sent to.
pLogsChan chan pdata.Logs
stopOnce sync.Once
stopChan chan struct{}
// workerChan is an internal communication channel that gets the log
// entries from Batch() calls and it receives the data in workerLoop().
workerChan chan *entry.Entry
// workerCount configures the amount of workers started.
workerCount int
// batchChan obtains log entries converted by the pool of workers,
// in a form of logRecords grouped by Resource and then after aggregating
// them decides based on maxFlushCount if the flush should be triggered.
// If also serves the ticker flushes configured by flushInterval.
batchChan chan *workerItem
// flushInterval defines how often we flush the aggregated log entries.
flushInterval time.Duration
// maxFlushCount defines what's the amount of entries in the buffer that
// will trigger a flush of log entries.
maxFlushCount uint
// flushChan is an internal channel used for transporting batched pdata.Logs.
flushChan chan pdata.Logs
// data holds currently converted and aggregated log entries, grouped by Resource.
data map[string]pdata.Logs
// logRecordCount holds the number of translated and accumulated log Records
// and is compared against maxFlushCount to make a decision whether to flush.
logRecordCount uint
// wg is a WaitGroup that makes sure that we wait for spun up goroutines exit
// when Stop() is called.
wg sync.WaitGroup
logger *zap.Logger
}
type ConverterOption interface {
apply(*Converter)
}
type optionFunc func(*Converter)
func (f optionFunc) apply(c *Converter) {
f(c)
}
func WithFlushInterval(interval time.Duration) ConverterOption {
return optionFunc(func(c *Converter) {
c.flushInterval = interval
})
}
func WithMaxFlushCount(count uint) ConverterOption {
return optionFunc(func(c *Converter) {
c.maxFlushCount = count
})
}
func WithLogger(logger *zap.Logger) ConverterOption {
return optionFunc(func(c *Converter) {
c.logger = logger
})
}
func WithWorkerCount(workerCount int) ConverterOption {
return optionFunc(func(c *Converter) {
c.workerCount = workerCount
})
}
func NewConverter(opts ...ConverterOption) *Converter {
c := &Converter{
workerChan: make(chan *entry.Entry),
workerCount: int(math.Max(1, float64(runtime.NumCPU()/4))),
batchChan: make(chan *workerItem),
data: make(map[string]pdata.Logs),
pLogsChan: make(chan pdata.Logs),
stopChan: make(chan struct{}),
logger: zap.NewNop(),
flushChan: make(chan pdata.Logs),
flushInterval: DefaultFlushInterval,
maxFlushCount: DefaultMaxFlushCount,
}
for _, opt := range opts {
opt.apply(c)
}
return c
}
func (c *Converter) Start() {
c.logger.Debug("Starting log converter", zap.Int("worker_count", c.workerCount))
for i := 0; i < c.workerCount; i++ {
c.wg.Add(1)
go c.workerLoop()
}
c.wg.Add(1)
go c.batchLoop()
c.wg.Add(1)
go c.flushLoop()
}
func (c *Converter) Stop() {
c.stopOnce.Do(func() {
close(c.stopChan)
c.wg.Wait()
close(c.pLogsChan)
})
}
// OutChannel returns the channel on which converted entries will be sent to.
func (c *Converter) OutChannel() <-chan pdata.Logs {
return c.pLogsChan
}
type workerItem struct {
Resource map[string]string
LogRecord pdata.LogRecord
ResourceString string
}
// workerLoop is responsible for obtaining log entries from Batch() calls,
// converting them to pdata.LogRecords and sending them together with the
// associated Resource through the batchChan for aggregation.
func (c *Converter) workerLoop() {
defer c.wg.Done()
var (
buff = bytes.Buffer{}
encoder = json.NewEncoder(&buff)
)
for {
select {
case <-c.stopChan:
return
case e, ok := <-c.workerChan:
if !ok {
return
}
buff.Reset()
lr := convert(e)
if err := encoder.Encode(e.Resource); err != nil {
c.logger.Debug("Failed marshaling entry.Resource to JSON",
zap.Any("resource", e.Resource),
)
continue
}
select {
case c.batchChan <- &workerItem{
Resource: e.Resource,
ResourceString: buff.String(),
LogRecord: lr,
}:
case <-c.stopChan:
}
}
}
}
// batchLoop is responsible for receiving the converted log entries and aggregating
// them by Resource.
// Whenever maxFlushCount is reached or the ticker ticks a flush is triggered.
func (c *Converter) batchLoop() {
defer c.wg.Done()
ticker := time.NewTicker(c.flushInterval)
defer ticker.Stop()
for {
select {
case wi, ok := <-c.batchChan:
if !ok {
return
}
pLogs, ok := c.data[wi.ResourceString]
if ok {
pLogs.ResourceLogs().
At(0).InstrumentationLibraryLogs().
At(0).Logs().Append(wi.LogRecord)
} else {
pLogs = pdata.NewLogs()
logs := pLogs.ResourceLogs()
logs.Resize(1)
rls := logs.At(0)
resource := rls.Resource()
resourceAtts := resource.Attributes()
resourceAtts.EnsureCapacity(len(wi.Resource))
for k, v := range wi.Resource {
resourceAtts.InsertString(k, v)
}
ills := rls.InstrumentationLibraryLogs()
ills.Resize(1)
ills.At(0).Logs().Append(wi.LogRecord)
}
c.data[wi.ResourceString] = pLogs
c.logRecordCount++
if c.logRecordCount >= c.maxFlushCount {
for r, pLogs := range c.data {
c.flushChan <- pLogs
delete(c.data, r)
}
c.logRecordCount = 0
}
case <-ticker.C:
for r, pLogs := range c.data {
c.flushChan <- pLogs
delete(c.data, r)
}
c.logRecordCount = 0
case <-c.stopChan:
return
}
}
}
func (c *Converter) flushLoop() {
defer c.wg.Done()
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
for {
select {
case <-c.stopChan:
return
case pLogs := <-c.flushChan:
if err := c.flush(ctx, pLogs); err != nil {
c.logger.Debug("Problem sending log entries",
zap.Error(err),
)
}
}
}
}
// flush flushes provided pdata.Logs entries onto a channel.
func (c *Converter) flush(ctx context.Context, pLogs pdata.Logs) error {
doneChan := ctx.Done()
select {
case <-doneChan:
return fmt.Errorf("flushing log entries interrupted, err: %w", ctx.Err())
case c.pLogsChan <- pLogs:
// The converter has been stopped so bail the flush.
case <-c.stopChan:
return errors.New("logs converter has been stopped")
}
return nil
}
// Batch takes in an entry.Entry and sends it to an available worker for processing.
func (c *Converter) Batch(e *entry.Entry) error {
select {
case c.workerChan <- e:
return nil
case <-c.stopChan:
return errors.New("logs converter has been stopped")
}
}
// convert converts one entry.Entry into pdata.LogRecord allocating it.
func convert(ent *entry.Entry) pdata.LogRecord {
dest := pdata.NewLogRecord()
convertInto(ent, dest)
return dest
}
// Convert converts one entry.Entry into pdata.Logs.
// To be used in a stateless setting like tests where ease of use is more
// important than performance or throughput.
func Convert(ent *entry.Entry) pdata.Logs {
pLogs := pdata.NewLogs()
logs := pLogs.ResourceLogs()
rls := logs.AppendEmpty()
resource := rls.Resource()
resourceAtts := resource.Attributes()
resourceAtts.EnsureCapacity(len(ent.Resource))
for k, v := range ent.Resource {
resourceAtts.InsertString(k, v)
}
ills := rls.InstrumentationLibraryLogs().AppendEmpty()
lr := ills.Logs().AppendEmpty()
convertInto(ent, lr)
return pLogs
}
// convertInto converts entry.Entry into provided pdata.LogRecord.
func convertInto(ent *entry.Entry, dest pdata.LogRecord) {
dest.SetTimestamp(pdata.TimestampFromTime(ent.Timestamp))
sevText, sevNum := convertSeverity(ent.Severity)
dest.SetSeverityText(sevText)
dest.SetSeverityNumber(sevNum)
if l := len(ent.Attributes); l > 0 {
attributes := dest.Attributes()
attributes.EnsureCapacity(l)
for k, v := range ent.Attributes {
attributes.InsertString(k, v)
}
}
insertToAttributeVal(ent.Body, dest.Body())
if ent.TraceId != nil {
var buffer [16]byte
copy(buffer[0:16], ent.TraceId)
dest.SetTraceID(pdata.NewTraceID(buffer))
}
if ent.SpanId != nil {
var buffer [8]byte
copy(buffer[0:8], ent.SpanId)
dest.SetSpanID(pdata.NewSpanID(buffer))
}
if ent.TraceFlags != nil {
// The 8 least significant bits are the trace flags as defined in W3C Trace
// Context specification. Don't override the 24 reserved bits.
flags := dest.Flags()
flags = flags & 0xFFFFFF00
flags = flags | uint32(ent.TraceFlags[0])
dest.SetFlags(flags)
}
}
func insertToAttributeVal(value interface{}, dest pdata.AttributeValue) {
switch t := value.(type) {
case bool:
dest.SetBoolVal(t)
case string:
dest.SetStringVal(t)
case []byte:
dest.SetStringVal(string(t))
case int64:
dest.SetIntVal(t)
case int32:
dest.SetIntVal(int64(t))
case int16:
dest.SetIntVal(int64(t))
case int8:
dest.SetIntVal(int64(t))
case int:
dest.SetIntVal(int64(t))
case uint64:
dest.SetIntVal(int64(t))
case uint32:
dest.SetIntVal(int64(t))
case uint16:
dest.SetIntVal(int64(t))
case uint8:
dest.SetIntVal(int64(t))
case uint:
dest.SetIntVal(int64(t))
case float64:
dest.SetDoubleVal(t)
case float32:
dest.SetDoubleVal(float64(t))
case map[string]interface{}:
toAttributeMap(t).CopyTo(dest)
case []interface{}:
toAttributeArray(t).CopyTo(dest)
default:
dest.SetStringVal(fmt.Sprintf("%v", t))
}
}
func toAttributeMap(obsMap map[string]interface{}) pdata.AttributeValue {
attVal := pdata.NewAttributeValueMap()
attMap := attVal.MapVal()
attMap.EnsureCapacity(len(obsMap))
for k, v := range obsMap {
switch t := v.(type) {
case bool:
attMap.InsertBool(k, t)
case string:
attMap.InsertString(k, t)
case []byte:
attMap.InsertString(k, string(t))
case int64:
attMap.InsertInt(k, t)
case int32:
attMap.InsertInt(k, int64(t))
case int16:
attMap.InsertInt(k, int64(t))
case int8:
attMap.InsertInt(k, int64(t))
case int:
attMap.InsertInt(k, int64(t))
case uint64:
attMap.InsertInt(k, int64(t))
case uint32:
attMap.InsertInt(k, int64(t))
case uint16:
attMap.InsertInt(k, int64(t))
case uint8:
attMap.InsertInt(k, int64(t))
case uint:
attMap.InsertInt(k, int64(t))
case float64:
attMap.InsertDouble(k, t)
case float32:
attMap.InsertDouble(k, float64(t))
case map[string]interface{}:
subMap := toAttributeMap(t)
attMap.Insert(k, subMap)
case []interface{}:
arr := toAttributeArray(t)
attMap.Insert(k, arr)
default:
attMap.InsertString(k, fmt.Sprintf("%v", t))
}
}
return attVal
}
func toAttributeArray(obsArr []interface{}) pdata.AttributeValue {
arrVal := pdata.NewAttributeValueArray()
arr := arrVal.ArrayVal()
arr.Resize(len(obsArr))
for i, v := range obsArr {
insertToAttributeVal(v, arr.At(i))
}
return arrVal
}
func convertSeverity(s entry.Severity) (string, pdata.SeverityNumber) {
switch {
// Handle standard severity levels
case s == entry.Catastrophe:
return "Fatal", pdata.SeverityNumberFATAL4
case s == entry.Emergency:
return "Error", pdata.SeverityNumberFATAL
case s == entry.Alert:
return "Error", pdata.SeverityNumberERROR3
case s == entry.Critical:
return "Error", pdata.SeverityNumberERROR2
case s == entry.Error:
return "Error", pdata.SeverityNumberERROR
case s == entry.Warning:
return "Info", pdata.SeverityNumberINFO4
case s == entry.Notice:
return "Info", pdata.SeverityNumberINFO3
case s == entry.Info:
return "Info", pdata.SeverityNumberINFO
case s == entry.Debug:
return "Debug", pdata.SeverityNumberDEBUG
case s == entry.Trace:
return "Trace", pdata.SeverityNumberTRACE2
// Handle custom severity levels
case s > entry.Emergency:
return "Fatal", pdata.SeverityNumberFATAL2
case s > entry.Alert:
return "Error", pdata.SeverityNumberERROR4
case s > entry.Critical:
return "Error", pdata.SeverityNumberERROR3
case s > entry.Error:
return "Error", pdata.SeverityNumberERROR2
case s > entry.Warning:
return "Info", pdata.SeverityNumberINFO4
case s > entry.Notice:
return "Info", pdata.SeverityNumberINFO3
case s > entry.Info:
return "Info", pdata.SeverityNumberINFO2
case s > entry.Debug:
return "Debug", pdata.SeverityNumberDEBUG2
case s > entry.Trace:
return "Trace", pdata.SeverityNumberTRACE3
case s > entry.Default:
return "Trace", pdata.SeverityNumberTRACE
default:
return "Undefined", pdata.SeverityNumberUNDEFINED
}
} | random_line_split | |
converter.go | // Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package stanza
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"math"
"runtime"
"sync"
"time"
"github.com/open-telemetry/opentelemetry-log-collection/entry"
"go.opentelemetry.io/collector/consumer/pdata"
"go.uber.org/zap"
)
const (
// DefaultFlushInterval is the default flush interval.
DefaultFlushInterval = 100 * time.Millisecond
// DefaultMaxFlushCount is the default max flush count.
DefaultMaxFlushCount = 100
)
// Converter converts entry.Entry into pdata.Logs aggregating translated
// entries into logs coming from the same Resource.
// Logs are being sent out based on the flush interval and/or the maximum
// batch size.
//
// The diagram below illustrates the internal communication inside the Converter:
//
// ┌─────────────────────────────────┐
// │ Batch() │
// ┌─────────┤ Ingests log entries and sends │
// │ │ them onto a workerChan │
// │ └─────────────────────────────────┘
// │
// │ ┌───────────────────────────────────────────────────┐
// ├─► workerLoop() │
// │ │ ┌─────────────────────────────────────────────────┴─┐
// ├─┼─► workerLoop() │
// │ │ │ ┌─────────────────────────────────────────────────┴─┐
// └─┼─┼─► workerLoop() │
// └─┤ │ consumes sent log entries from workerChan, │
// │ │ translates received entries to pdata.LogRecords,│
// └─┤ marshalls them to JSON and send them onto │
// │ batchChan │
// └─────────────────────────┬─────────────────────────┘
// │
// ▼
// ┌─────────────────────────────────────────────────────┐
// │ batchLoop() │
// │ consumes from batchChan, aggregates log records │
// │ by marshaled Resource and based on flush interval │
// │ and maxFlushCount decides whether to send the │
// │ aggregated buffer to flushChan │
// └───────────────────────────┬─────────────────────────┘
// │
// ▼
// ┌─────────────────────────────────────────────────────┐
// │ flushLoop() │
// │ receives log records from flushChan and sends │
// │ them onto pLogsChan which is consumed by │
// │ downstream consumers via OutChannel() │
// └─────────────────────────────────────────────────────┘
//
type Converter struct {
// pLogsChan is a channel on which batched logs will be sent to.
pLogsChan chan pdata.Logs
stopOnce sync.Once
stopChan chan struct{}
// workerChan is an internal communication channel that gets the log
// entries from Batch() calls and it receives the data in workerLoop().
workerChan chan *entry.Entry
// workerCount configures the amount of workers started.
workerCount int
// batchChan obtains log entries converted by the pool of workers,
// in a form of logRecords grouped by Resource and then after aggregating
// them decides based on maxFlushCount if the flush should be triggered.
// If also serves the ticker flushes configured by flushInterval.
batchChan chan *workerItem
// flushInterval defines how often we flush the aggregated log entries.
flushInterval time.Duration
// maxFlushCount defines what's the amount of entries in the buffer that
// will trigger a flush of log entries.
maxFlushCount uint
// flushChan is an internal channel used for transporting batched pdata.Logs.
flushChan chan pdata.Logs
// data holds currently converted and aggregated log entries, grouped by Resource.
data map[string]pdata.Logs
// logRecordCount holds the number of translated and accumulated log Records
// and is compared against maxFlushCount to make a decision whether to flush.
logRecordCount uint
// wg is a WaitGroup that makes sure that we wait for spun up goroutines exit
// when Stop() is called.
wg sync.WaitGroup
logger *zap.Logger
}
type ConverterOption interface {
apply(*Converter)
}
type optionFunc func(*Converter)
func (f optionFunc) apply(c *Converter) {
f(c)
}
func WithFlushInterval(interval time.Duration) ConverterOption {
return optionFunc(func(c *Converter) {
c.flushInterval = interval
})
}
func WithMaxFlushCount(count uint) ConverterOption {
return optionFunc(func(c *Converter) {
c.maxFlushCount = count
})
}
func WithLogger(logger *zap.Logger) ConverterOption {
return optionFunc(func(c *Converter) {
c.logger = logger
})
}
func WithWorkerCount(workerCount int) ConverterOption {
return optionFunc(func(c *Converter) {
c.workerCount = workerCount
})
}
func NewConverter(opts ...ConverterOption) *Converter {
c := &Converter{
workerChan: make(chan *entry.Entry),
workerCount: int(math.Max(1, float64(runtime.NumCPU()/4))),
batchChan: make(chan *workerItem),
data: make(map[string]pdata.Logs),
pLogsChan: make(chan pdata.Logs),
stopChan: make(chan struct{}),
logger: zap.NewNop(),
flushChan: make(chan pdata.Logs),
flushInterval: DefaultFlushInterval,
maxFlushCount: DefaultMaxFlushCount,
}
for _, opt := range opts {
opt.apply(c)
}
return c
}
func (c *Converter) Start() {
c.logger.Debug("Starting log converter", zap.Int("worker_count", c.workerCount))
for i := 0; i < c.workerCount; i++ {
c.wg.Add(1)
go c.workerLoop()
}
c.wg.Add(1)
go c.batchLoop()
c.wg.Add(1)
go c.flushLoop()
}
func (c *Converter) Stop() {
c.stopOnce.Do(func() {
close(c.stopChan)
c.wg.Wait()
close(c.pLogsChan)
})
}
// OutChannel returns the channel on which converted entries will be sent to.
func (c *Converter) OutChannel() <-chan pdata.Logs {
return c.pLogsChan
}
type workerItem struct {
Resource map[string]string
LogRecord pdata.LogRecord
ResourceString string
}
// workerLoop is responsible for obtaining log entries from Batch() calls,
// converting them to pdata.LogRecords and sending them together with the
// associated Resource through the batchChan for aggregation.
func (c *Converter) workerLoop() {
defer c.wg.Done()
var (
buff = bytes.Buffer{}
encoder = json.NewEncoder(&buff)
)
for {
select {
case <-c.stopChan:
return
case e, ok := <-c.workerChan:
if !ok {
return
}
buff.Reset()
lr := convert(e)
if err := encoder.Encode(e.Resource); err != nil {
c.logger.Debug("Failed marshaling entry.Resource to JSON",
zap.Any("resource", e.Resource),
)
continue
}
select {
case c.batchChan <- &workerItem{
Resource: e.Resource,
ResourceString: buff.String(),
LogRecord: lr,
}:
case <-c.stopChan:
}
}
}
}
// batchLoop is responsible for receiving the converted log entries and aggregating
// them by Resource.
// Whenever maxFlushCount is reached or the ticker ticks a flush is triggered.
func (c *Converter) batchLoop() {
defer c.wg.Done()
ticker := time.NewTicker(c.flushInterval)
defer ticker.Stop()
for {
select {
case wi, ok := <-c.batchChan:
if !ok {
return
}
pLogs, ok := c.data[wi.ResourceString]
if ok {
pLogs.ResourceLogs().
At(0).InstrumentationLibraryLogs().
At(0).Logs().Append(wi.LogRecord)
| pLogs = pdata.NewLogs()
logs := pLogs.ResourceLogs()
logs.Resize(1)
rls := logs.At(0)
resource := rls.Resource()
resourceAtts := resource.Attributes()
resourceAtts.EnsureCapacity(len(wi.Resource))
for k, v := range wi.Resource {
resourceAtts.InsertString(k, v)
}
ills := rls.InstrumentationLibraryLogs()
ills.Resize(1)
ills.At(0).Logs().Append(wi.LogRecord)
}
c.data[wi.ResourceString] = pLogs
c.logRecordCount++
if c.logRecordCount >= c.maxFlushCount {
for r, pLogs := range c.data {
c.flushChan <- pLogs
delete(c.data, r)
}
c.logRecordCount = 0
}
case <-ticker.C:
for r, pLogs := range c.data {
c.flushChan <- pLogs
delete(c.data, r)
}
c.logRecordCount = 0
case <-c.stopChan:
return
}
}
}
func (c *Converter) flushLoop() {
defer c.wg.Done()
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
for {
select {
case <-c.stopChan:
return
case pLogs := <-c.flushChan:
if err := c.flush(ctx, pLogs); err != nil {
c.logger.Debug("Problem sending log entries",
zap.Error(err),
)
}
}
}
}
// flush flushes provided pdata.Logs entries onto a channel.
func (c *Converter) flush(ctx context.Context, pLogs pdata.Logs) error {
doneChan := ctx.Done()
select {
case <-doneChan:
return fmt.Errorf("flushing log entries interrupted, err: %w", ctx.Err())
case c.pLogsChan <- pLogs:
// The converter has been stopped so bail the flush.
case <-c.stopChan:
return errors.New("logs converter has been stopped")
}
return nil
}
// Batch takes in an entry.Entry and sends it to an available worker for processing.
func (c *Converter) Batch(e *entry.Entry) error {
select {
case c.workerChan <- e:
return nil
case <-c.stopChan:
return errors.New("logs converter has been stopped")
}
}
// convert converts one entry.Entry into pdata.LogRecord allocating it.
func convert(ent *entry.Entry) pdata.LogRecord {
dest := pdata.NewLogRecord()
convertInto(ent, dest)
return dest
}
// Convert converts one entry.Entry into pdata.Logs.
// To be used in a stateless setting like tests where ease of use is more
// important than performance or throughput.
func Convert(ent *entry.Entry) pdata.Logs {
pLogs := pdata.NewLogs()
logs := pLogs.ResourceLogs()
rls := logs.AppendEmpty()
resource := rls.Resource()
resourceAtts := resource.Attributes()
resourceAtts.EnsureCapacity(len(ent.Resource))
for k, v := range ent.Resource {
resourceAtts.InsertString(k, v)
}
ills := rls.InstrumentationLibraryLogs().AppendEmpty()
lr := ills.Logs().AppendEmpty()
convertInto(ent, lr)
return pLogs
}
// convertInto converts entry.Entry into provided pdata.LogRecord.
func convertInto(ent *entry.Entry, dest pdata.LogRecord) {
dest.SetTimestamp(pdata.TimestampFromTime(ent.Timestamp))
sevText, sevNum := convertSeverity(ent.Severity)
dest.SetSeverityText(sevText)
dest.SetSeverityNumber(sevNum)
if l := len(ent.Attributes); l > 0 {
attributes := dest.Attributes()
attributes.EnsureCapacity(l)
for k, v := range ent.Attributes {
attributes.InsertString(k, v)
}
}
insertToAttributeVal(ent.Body, dest.Body())
if ent.TraceId != nil {
var buffer [16]byte
copy(buffer[0:16], ent.TraceId)
dest.SetTraceID(pdata.NewTraceID(buffer))
}
if ent.SpanId != nil {
var buffer [8]byte
copy(buffer[0:8], ent.SpanId)
dest.SetSpanID(pdata.NewSpanID(buffer))
}
if ent.TraceFlags != nil {
// The 8 least significant bits are the trace flags as defined in W3C Trace
// Context specification. Don't override the 24 reserved bits.
flags := dest.Flags()
flags = flags & 0xFFFFFF00
flags = flags | uint32(ent.TraceFlags[0])
dest.SetFlags(flags)
}
}
func insertToAttributeVal(value interface{}, dest pdata.AttributeValue) {
switch t := value.(type) {
case bool:
dest.SetBoolVal(t)
case string:
dest.SetStringVal(t)
case []byte:
dest.SetStringVal(string(t))
case int64:
dest.SetIntVal(t)
case int32:
dest.SetIntVal(int64(t))
case int16:
dest.SetIntVal(int64(t))
case int8:
dest.SetIntVal(int64(t))
case int:
dest.SetIntVal(int64(t))
case uint64:
dest.SetIntVal(int64(t))
case uint32:
dest.SetIntVal(int64(t))
case uint16:
dest.SetIntVal(int64(t))
case uint8:
dest.SetIntVal(int64(t))
case uint:
dest.SetIntVal(int64(t))
case float64:
dest.SetDoubleVal(t)
case float32:
dest.SetDoubleVal(float64(t))
case map[string]interface{}:
toAttributeMap(t).CopyTo(dest)
case []interface{}:
toAttributeArray(t).CopyTo(dest)
default:
dest.SetStringVal(fmt.Sprintf("%v", t))
}
}
func toAttributeMap(obsMap map[string]interface{}) pdata.AttributeValue {
attVal := pdata.NewAttributeValueMap()
attMap := attVal.MapVal()
attMap.EnsureCapacity(len(obsMap))
for k, v := range obsMap {
switch t := v.(type) {
case bool:
attMap.InsertBool(k, t)
case string:
attMap.InsertString(k, t)
case []byte:
attMap.InsertString(k, string(t))
case int64:
attMap.InsertInt(k, t)
case int32:
attMap.InsertInt(k, int64(t))
case int16:
attMap.InsertInt(k, int64(t))
case int8:
attMap.InsertInt(k, int64(t))
case int:
attMap.InsertInt(k, int64(t))
case uint64:
attMap.InsertInt(k, int64(t))
case uint32:
attMap.InsertInt(k, int64(t))
case uint16:
attMap.InsertInt(k, int64(t))
case uint8:
attMap.InsertInt(k, int64(t))
case uint:
attMap.InsertInt(k, int64(t))
case float64:
attMap.InsertDouble(k, t)
case float32:
attMap.InsertDouble(k, float64(t))
case map[string]interface{}:
subMap := toAttributeMap(t)
attMap.Insert(k, subMap)
case []interface{}:
arr := toAttributeArray(t)
attMap.Insert(k, arr)
default:
attMap.InsertString(k, fmt.Sprintf("%v", t))
}
}
return attVal
}
func toAttributeArray(obsArr []interface{}) pdata.AttributeValue {
arrVal := pdata.NewAttributeValueArray()
arr := arrVal.ArrayVal()
arr.Resize(len(obsArr))
for i, v := range obsArr {
insertToAttributeVal(v, arr.At(i))
}
return arrVal
}
func convertSeverity(s entry.Severity) (string, pdata.SeverityNumber) {
switch {
// Handle standard severity levels
case s == entry.Catastrophe:
return "Fatal", pdata.SeverityNumberFATAL4
case s == entry.Emergency:
return "Error", pdata.SeverityNumberFATAL
case s == entry.Alert:
return "Error", pdata.SeverityNumberERROR3
case s == entry.Critical:
return "Error", pdata.SeverityNumberERROR2
case s == entry.Error:
return "Error", pdata.SeverityNumberERROR
case s == entry.Warning:
return "Info", pdata.SeverityNumberINFO4
case s == entry.Notice:
return "Info", pdata.SeverityNumberINFO3
case s == entry.Info:
return "Info", pdata.SeverityNumberINFO
case s == entry.Debug:
return "Debug", pdata.SeverityNumberDEBUG
case s == entry.Trace:
return "Trace", pdata.SeverityNumberTRACE2
// Handle custom severity levels
case s > entry.Emergency:
return "Fatal", pdata.SeverityNumberFATAL2
case s > entry.Alert:
return "Error", pdata.SeverityNumberERROR4
case s > entry.Critical:
return "Error", pdata.SeverityNumberERROR3
case s > entry.Error:
return "Error", pdata.SeverityNumberERROR2
case s > entry.Warning:
return "Info", pdata.SeverityNumberINFO4
case s > entry.Notice:
return "Info", pdata.SeverityNumberINFO3
case s > entry.Info:
return "Info", pdata.SeverityNumberINFO2
case s > entry.Debug:
return "Debug", pdata.SeverityNumberDEBUG2
case s > entry.Trace:
return "Trace", pdata.SeverityNumberTRACE3
case s > entry.Default:
return "Trace", pdata.SeverityNumberTRACE
default:
return "Undefined", pdata.SeverityNumberUNDEFINED
}
}
| } else {
| identifier_name |
converter.go | // Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package stanza
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"math"
"runtime"
"sync"
"time"
"github.com/open-telemetry/opentelemetry-log-collection/entry"
"go.opentelemetry.io/collector/consumer/pdata"
"go.uber.org/zap"
)
const (
// DefaultFlushInterval is the default flush interval.
DefaultFlushInterval = 100 * time.Millisecond
// DefaultMaxFlushCount is the default max flush count.
DefaultMaxFlushCount = 100
)
// Converter converts entry.Entry into pdata.Logs aggregating translated
// entries into logs coming from the same Resource.
// Logs are being sent out based on the flush interval and/or the maximum
// batch size.
//
// The diagram below illustrates the internal communication inside the Converter:
//
// ┌─────────────────────────────────┐
// │ Batch() │
// ┌─────────┤ Ingests log entries and sends │
// │ │ them onto a workerChan │
// │ └─────────────────────────────────┘
// │
// │ ┌───────────────────────────────────────────────────┐
// ├─► workerLoop() │
// │ │ ┌─────────────────────────────────────────────────┴─┐
// ├─┼─► workerLoop() │
// │ │ │ ┌─────────────────────────────────────────────────┴─┐
// └─┼─┼─► workerLoop() │
// └─┤ │ consumes sent log entries from workerChan, │
// │ │ translates received entries to pdata.LogRecords,│
// └─┤ marshalls them to JSON and send them onto │
// │ batchChan │
// └─────────────────────────┬─────────────────────────┘
// │
// ▼
// ┌─────────────────────────────────────────────────────┐
// │ batchLoop() │
// │ consumes from batchChan, aggregates log records │
// │ by marshaled Resource and based on flush interval │
// │ and maxFlushCount decides whether to send the │
// │ aggregated buffer to flushChan │
// └───────────────────────────┬─────────────────────────┘
// │
// ▼
// ┌─────────────────────────────────────────────────────┐
// │ flushLoop() │
// │ receives log records from flushChan and sends │
// │ them onto pLogsChan which is consumed by │
// │ downstream consumers via OutChannel() │
// └─────────────────────────────────────────────────────┘
//
type Converter struct {
// pLogsChan is a channel on which batched logs will be sent to.
pLogsChan chan pdata.Logs
stopOnce sync.Once
stopChan chan struct{}
// workerChan is an internal communication channel that gets the log
// entries from Batch() calls and it receives the data in workerLoop().
workerChan chan *entry.Entry
// workerCount configures the amount of workers started.
workerCount int
// batchChan obtains log entries converted by the pool of workers,
// in a form of logRecords grouped by Resource and then after aggregating
// them decides based on maxFlushCount if the flush should be triggered.
// If also serves the ticker flushes configured by flushInterval.
batchChan chan *workerItem
// flushInterval defines how often we flush the aggregated log entries.
flushInterval time.Duration
// maxFlushCount defines what's the amount of entries in the buffer that
// will trigger a flush of log entries.
maxFlushCount uint
// flushChan is an internal channel used for transporting batched pdata.Logs.
flushChan chan pdata.Logs
// data holds currently converted and aggregated log entries, grouped by Resource.
data map[string]pdata.Logs
// logRecordCount holds the number of translated and accumulated log Records
// and is compared against maxFlushCount to make a decision whether to flush.
logRecordCount uint
// wg is a WaitGroup that makes sure that we wait for spun up goroutines exit
// when Stop() is called.
wg sync.WaitGroup
logger *zap.Logger
}
type ConverterOption interface {
apply(*Converter)
}
type optionFunc func(*Converter)
func (f optionFunc) apply(c *Converter) {
f(c)
}
func WithFlushInterval(interval time.Duration) ConverterOption {
return optionFunc(func(c *Converter) {
c.flushInterval = interval
})
}
func WithMaxFlushCount(count uint) ConverterOption {
return optionFunc(func(c *Converter) {
c.maxFlushCount = count
})
}
func WithLogger(logger *zap.Logger) ConverterOption {
return optionFunc(func(c *Converter) {
c.logger = logger
})
}
func WithWorkerCount(workerCount int) ConverterOption {
return optionFunc(func(c *Converter) {
c.workerCount = workerCount
})
}
func NewConverter(opts ...ConverterOption) *Converter {
c := &Converter{
workerChan: make(chan *entry.Entry),
workerCount: int(math.Max(1, float64(runtime.NumCPU()/4))),
batchChan: make(chan *workerItem),
data: make(map[string]pdata.Logs),
pLogsChan: make(chan pdata.Logs),
stopChan: make(chan struct{}),
logger: zap.NewNop(),
flushChan: make(chan pdata.Logs),
flushInterval: DefaultFlushInterval,
maxFlushCount: DefaultMaxFlushCount,
}
for _, opt := range opts {
opt.apply(c)
}
return c
}
func (c *Converter) Start() {
c.logger.Debug("Starting log converter", zap.Int("worker_count", c.workerCount))
for i := 0; i < c.workerCount; i++ {
c.wg.Add(1)
go c.workerLoop()
}
c.wg.Add(1)
go c.batchLoop()
c.wg.Add(1)
go c.flushLoop()
}
func (c *Converter) Stop() {
c.stopOnce.Do(func() {
close(c.stopChan)
c.wg.Wait()
close(c.pLogsChan)
})
}
// OutChannel returns the channel on which converted entries will be sent to.
func (c *Converter) OutChannel() <-chan pdata.Logs {
return c.pLogsChan
}
type workerItem struct {
Resource map[string]string
LogRecord pdata.LogRecord
ResourceString string
}
// workerLoop is responsible for obtaining log entries from Batch() calls,
// converting them to pdata.LogRecords and sending them together with the
// associated Resource through the batchChan for aggregation.
func (c *Converter) workerLoop() {
defer c.wg.Done()
var (
buff = bytes.Buffer{}
encoder = json.NewEncoder(&buff)
)
for {
select {
case <-c.stopChan:
return
case e, ok := <-c.workerChan:
if !ok {
return
}
buff.Reset()
lr := convert(e)
if err := encoder.Encode(e.Resource); err != nil {
c.logger.Debug("Failed marshaling entry.Resource to JSON",
zap.Any("resource", e.Resource),
)
continue
}
select {
case c.batchChan <- &workerItem{
Resource: e.Resource,
ResourceString: buff.String(),
LogRecord: lr,
}:
case <-c.stopChan:
}
}
}
}
// batchLoop is responsible for receiving the converted log entries and aggregating
// them by Resource.
// Whenever maxFlushCount is reached or the ticker ticks a flush is triggered.
func (c *Converter) batchLoop() {
defer c.wg.Done()
ticker := time.NewTicker(c.flushInterval)
defer ticker.Stop()
for {
select {
case wi, ok := <-c.batchChan:
if !ok {
return
}
pLogs, ok := c.data[wi.ResourceString]
if ok {
pLogs.ResourceLogs().
At(0).InstrumentationLibraryLogs().
At(0).Logs().Append(wi.LogRecord)
} else {
pLogs = pdata.NewLogs()
logs := pLogs.ResourceLogs()
logs.Resize(1)
rls := logs.At(0)
resource := rls.Resource()
resourceAtts := resource.Attributes()
resourceAtts.EnsureCapacity(len(wi.Resource))
for k, v := range wi.Resource {
resourceAtts.InsertString(k, v)
}
ills := rls.InstrumentationLibraryLogs()
ills.Resize(1)
ills.At(0).Logs().Append(wi.LogRecord)
}
c.data[wi.ResourceString] = pLogs
c.logRecordCount++
if c.logRecordCount >= c.maxFlushCount {
for r, pLogs := range c.data {
c.flushChan <- pLogs
delete(c.data, r)
}
c.logRecordCount = 0
}
case <-ticker.C:
for r, pLogs := range c.data {
c.flushChan <- pLogs
delete(c.data, r)
}
c.logRecordCount = 0
case <-c.stopChan:
return
}
}
}
func (c *Converter) flushLoop() {
defer c.wg.Done()
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
for {
select {
case <-c.stopChan:
return
case pLogs := <-c.flushChan:
if err := c.flush(ctx, pLogs); err != nil {
c.logger.Debug("Problem sending log entries",
zap.Error(err),
)
}
}
}
}
// flush flushes provided pdata.Logs entries onto a channel.
func (c *Converter) flush(ctx context.Context, pLogs pdata.Logs) error {
doneChan := ctx.Done()
select {
case <-doneChan:
return fmt.Errorf("flushing log entries interrupted, err: %w", ctx.Err())
case c.pLogsChan <- pLogs:
// The converter has been stopped so bail the flush.
case <-c.stopChan:
return errors.New("logs converter has been stopped")
}
return nil
}
// Batch takes in an entry.Entry and sends it to an available worker for processing.
func (c *Converter) Batch(e *entry.Entry) error {
select {
case c.workerChan <- e:
return nil
case <-c.stopChan:
return errors.New("logs converter has been stopped")
}
}
// convert converts one entry.Entry into pdata.LogRecord allocating it.
func convert(ent *entry.Entry) pdata.LogRecord {
dest := pdata.NewLogRecord()
convertInto(ent, dest)
return dest
}
// Convert converts one entry.Entry into pdata.Logs.
// To be used in a stateless setting like tests where ease of use is more
// important than performance or throughput.
func Convert(ent *entry.Entry) pdata.Logs {
pLogs := pdata.NewLogs()
logs := pLogs.ResourceLogs()
rls := logs.AppendEmpty()
resource := rls.Resource()
resourceAtts := resource.Attributes()
resourceAtts.EnsureCapacity(len(ent.Resource))
for k, v := range ent.Resource {
resourceAtts.InsertString(k, v)
}
ills := rls.InstrumentationLibraryLogs().AppendEmpty()
lr := ills.Logs().AppendEmpty()
convertInto(ent, lr)
return pLogs
}
// convertInto converts entry.Entry into provided pdata.LogRecord.
func convertInto(ent *entry.Entry, dest pdata.LogRecord) {
dest.SetTimestamp(pdata.TimestampFromTime(ent.Timestamp))
sevText, sevNum := convertSeverity(ent.Severity)
dest.SetSeverityText(sevText)
dest.SetSeverityNumber(sevNum)
if l := len(ent.Attributes); l > 0 {
attributes := dest.Attributes()
attributes.EnsureCapacity(l)
for k, v := range ent.Attributes {
attributes.InsertString(k, v)
}
}
insertToAttributeVal(ent.Body, dest.Body())
if ent.TraceId != nil {
var buffer [16]byte
copy(buffer[0:16], ent.TraceId)
dest.SetTraceID(pdata.NewTraceID(buffer))
}
if ent.SpanId != nil {
var buffer [8]byte
copy(buffer[0:8], ent.SpanId)
dest.SetSpanID(pdata.NewSpanID(buffer))
}
if ent.TraceFlags != nil {
// The 8 least significant bits are the trace flags as defined in W3C Trace
// Context specification. Don't override the 24 reserved bits.
flags := dest.Flags()
flags = flags & 0xFFFFFF00
flags = flags | uint32(ent.TraceFlags[0])
dest.SetFlags(flags)
}
}
func insertToAttributeVal(value interface{}, dest pdata.AttributeValue) {
switch t := value.(type) {
case bool:
dest.SetBoolVal(t)
case string:
dest.SetStringVal(t)
case []byte:
dest.SetStringVal(string(t))
case int64:
dest.SetIntVal(t)
case int32:
dest.SetIntVal(int64(t))
case int16:
dest.SetIntVal(int64(t))
case int8:
dest.SetIntVal(int64(t))
case int:
dest.SetIntVal(int64(t))
case uint64:
dest.SetIntVal(int64(t))
case uint32:
dest.SetIntVal(int64(t))
case uint16:
dest.SetIntVal(int64(t))
case uint8:
dest.SetIntVal(int64(t))
case uint:
dest.SetIntVal(int64(t))
case float64:
dest.SetDoubleVal(t)
case float32:
dest.SetDoubleVal(float64(t))
case map[string]interface{}:
toAttributeMap(t).CopyTo(dest)
case []interface{}:
toAttributeArray(t).CopyTo(dest)
default:
dest.SetStringVal(fmt.Sprintf("%v", t))
}
}
func toAttributeMap( | attMap.InsertString(k, t)
case []byte:
attMap.InsertString(k, string(t))
case int64:
attMap.InsertInt(k, t)
case int32:
attMap.InsertInt(k, int64(t))
case int16:
attMap.InsertInt(k, int64(t))
case int8:
attMap.InsertInt(k, int64(t))
case int:
attMap.InsertInt(k, int64(t))
case uint64:
attMap.InsertInt(k, int64(t))
case uint32:
attMap.InsertInt(k, int64(t))
case uint16:
attMap.InsertInt(k, int64(t))
case uint8:
attMap.InsertInt(k, int64(t))
case uint:
attMap.InsertInt(k, int64(t))
case float64:
attMap.InsertDouble(k, t)
case float32:
attMap.InsertDouble(k, float64(t))
case map[string]interface{}:
subMap := toAttributeMap(t)
attMap.Insert(k, subMap)
case []interface{}:
arr := toAttributeArray(t)
attMap.Insert(k, arr)
default:
attMap.InsertString(k, fmt.Sprintf("%v", t))
}
}
return attVal
}
func toAttributeArray(obsArr []interface{}) pdata.AttributeValue {
arrVal := pdata.NewAttributeValueArray()
arr := arrVal.ArrayVal()
arr.Resize(len(obsArr))
for i, v := range obsArr {
insertToAttributeVal(v, arr.At(i))
}
return arrVal
}
func convertSeverity(s entry.Severity) (string, pdata.SeverityNumber) {
switch {
// Handle standard severity levels
case s == entry.Catastrophe:
return "Fatal", pdata.SeverityNumberFATAL4
case s == entry.Emergency:
return "Error", pdata.SeverityNumberFATAL
case s == entry.Alert:
return "Error", pdata.SeverityNumberERROR3
case s == entry.Critical:
return "Error", pdata.SeverityNumberERROR2
case s == entry.Error:
return "Error", pdata.SeverityNumberERROR
case s == entry.Warning:
return "Info", pdata.SeverityNumberINFO4
case s == entry.Notice:
return "Info", pdata.SeverityNumberINFO3
case s == entry.Info:
return "Info", pdata.SeverityNumberINFO
case s == entry.Debug:
return "Debug", pdata.SeverityNumberDEBUG
case s == entry.Trace:
return "Trace", pdata.SeverityNumberTRACE2
// Handle custom severity levels
case s > entry.Emergency:
return "Fatal", pdata.SeverityNumberFATAL2
case s > entry.Alert:
return "Error", pdata.SeverityNumberERROR4
case s > entry.Critical:
return "Error", pdata.SeverityNumberERROR3
case s > entry.Error:
return "Error", pdata.SeverityNumberERROR2
case s > entry.Warning:
return "Info", pdata.SeverityNumberINFO4
case s > entry.Notice:
return "Info", pdata.SeverityNumberINFO3
case s > entry.Info:
return "Info", pdata.SeverityNumberINFO2
case s > entry.Debug:
return "Debug", pdata.SeverityNumberDEBUG2
case s > entry.Trace:
return "Trace", pdata.SeverityNumberTRACE3
case s > entry.Default:
return "Trace", pdata.SeverityNumberTRACE
default:
return "Undefined", pdata.SeverityNumberUNDEFINED
}
}
| obsMap map[string]interface{}) pdata.AttributeValue {
attVal := pdata.NewAttributeValueMap()
attMap := attVal.MapVal()
attMap.EnsureCapacity(len(obsMap))
for k, v := range obsMap {
switch t := v.(type) {
case bool:
attMap.InsertBool(k, t)
case string:
| conditional_block |
hospital-info.component.ts | /* eslint-disable @typescript-eslint/quotes, quote-props */
import { Component, Input, OnInit } from '@angular/core';
import moment, { Moment } from 'moment';
import { Observable, of } from 'rxjs';
import { mergeMap, map, max } from 'rxjs/operators';
import { BedBackgroundOptions } from '../map/options/bed-background-options';
import { BedGlyphOptions } from '../map/options/bed-glyph-options';
import { BedType } from "../map/options/bed-type.enum";
import { QualitativeDiviDevelopmentRepository } from '../repositories/qualitative-divi-development.respository';
import { QualitativeTimedStatus } from '../repositories/types/in/qualitative-hospitals-development';
import { AggregatedHospitalOut } from '../repositories/types/out/aggregated-hospital-out';
import { SingleHospitalOut } from '../repositories/types/out/single-hospital-out';
import { HospitalUtilService } from '../services/hospital-util.service';
import { I18nService, SupportedLocales } from '../services/i18n.service';
import { QualitativeColormapService } from '../services/qualitative-colormap.service';
import { TranslationService } from '../services/translation.service';
import { VegaBarchartService } from '../services/vega-barchart.service';
import { getMoment, getStrDate } from '../util/date-util';
@Component({
selector: 'app-hospital-info',
templateUrl: './hospital-info.component.html',
styleUrls: ['./hospital-info.component.less']
})
export class HospitalInfoComponent implements OnInit {
contact: string;
url: boolean;
contactMsg: string;
public eBedType = BedType;
@Input()
mode: 'dialog' | 'tooltip';
private _data: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>;
private _options: BedGlyphOptions | BedBackgroundOptions;
@Input()
set options(o: BedGlyphOptions | BedBackgroundOptions) {
this._options = o;
this.updateData();
}
get options(): BedGlyphOptions | BedBackgroundOptions {
return this._options;
}
@Input()
set data(d: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>) {
this._data = d;
this.updateData();
}
get data(): SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus> {
return this._data;
}
private fullData: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>;
glyphLegendColors = QualitativeColormapService.bedStati;
temporalChartTemplateSpec = {
"$schema": "https://vega.github.io/schema/vega-lite/v4.json",
"width": 260, "height": 50,
"aggregated": true,
"data": {
"values": []
},
"mark": {"type": "area", "interpolate": "step-after"},
"encoding": {
"x": {
"field": "Datum", "type": "temporal",
"axis": {
"domain": false,
"tickSize": 2, "tickCount": 8,
"format": "%d.%m"
}
},
"y": {
"field": "num", "type": "quantitative",
"axis": {"title": "Anzahl KH", "tickMinStep": 1}
},
"color": {"type": "nominal", "field": "Kategorie", "scale": {"domain": [], "range": []}, "legend": false}
}
};
tempChartSpecs$: Observable<any[]>;
barChartSpecs$: Observable<any[]>;
bedAccessors = ['icu_low_state', 'icu_high_state', 'ecmo_state'];
bedAccessorsMapping = {'icu_low_state': 'ICU - Low Care', 'icu_high_state': 'ICU - High Care', 'ecmo_state': 'ECMO'};
isSingleHospital = false;
singleHospital: SingleHospitalOut<QualitativeTimedStatus>;
latestDevelopment: QualitativeTimedStatus;
lastUpdate: Date;
firstTimestamp: Moment;
warnOfOutdatedData: boolean;
totalNumberOfHospitals = 0;
now = new Date();
constructor(private colormapService: QualitativeColormapService,
private translationService: TranslationService,
private vegaBarchartService: VegaBarchartService,
private i18nService: I18nService,
private diviRepo: QualitativeDiviDevelopmentRepository,
private hospitalUtil: HospitalUtilService
) {
}
ngOnInit(): void {
this.updateData();
}
private async updateData() {
this.tempChartSpecs$ = undefined;
this.barChartSpecs$ = undefined;
if (!this.data || !this.options) {
return;
}
// eslint-disable-next-line prefer-const
let [from, to] = this.hospitalUtil.getFromToTupleFromOptions(this.options);
from = getStrDate(getMoment(to).subtract(11, 'days'));
if (this.hospitalUtil.isSingleHospital(this.data)) {
this.isSingleHospital = true;
this.fullData = (await this.diviRepo.getDiviDevelopmentSingleHospital(this.data.id, from, to).toPromise()).properties;
this.singleHospital = this.fullData as SingleHospitalOut<QualitativeTimedStatus>;
} else {
this.fullData = (await this.diviRepo.getDiviDevelopmentForAggLevelSingle(this.options.aggregationLevel, this.data.id, from, to).toPromise()).properties;
}
if (this.fullData.developments) {
this.latestDevelopment = this.fullData.developments[this.fullData.developments.length - 1];
this.totalNumberOfHospitals = this.latestDevelopment.num_hospitals;
const lastUpdateM = getMoment(this.latestDevelopment.last_updated);
this.lastUpdate = lastUpdateM.toDate();
const tenDaysAgo = moment().subtract(10, 'day');
this.firstTimestamp = moment.max(getMoment(this.fullData.developments[0].timestamp), tenDaysAgo);
this.warnOfOutdatedData = moment().subtract(1, 'day').isAfter(lastUpdateM);
}
this.tempChartSpecs$ = this.getTemporalCharts();
if (this.isSingleHospital) {
this.prepareAddressAndContactInformation();
} else {
this.barChartSpecs$ = this.getBarChartSpecs();
}
}
// getTrendIcon(entries: TimestampedValue[]): string {
// const latest = getLatest(entries);
// return latest >= 0 ? (latest == 0 ? 'trending_flat' : 'trending_up') : 'trending_down';
// }
getCapacityStateColor(bedStatus: string) {
return this.colormapService.getSingleHospitalColormap()(bedStatus);
}
getStatusColorFor(bedStatus: BedType) {
return this.colormapService.getLatestBedStatusColor(this.fullData, bedStatus);
}
getStatusDescriptionFor(bedStatus: BedType) {
const latest = this.fullData.developments[this.fullData.developments.length - 1];
const counts = this.colormapService.propertyAccessor(bedStatus)(latest);
return Object.keys(counts).find(key => key !== '' && counts[key] > 0) ?? "Keine Information";
}
private getBarChartSpecs(): Observable<any[]> {
const barChartSpecs = [];
if (!this.latestDevelopment) {
return;
}
const bedStati = this.glyphLegendColors;
for (const bedAccessor of this.bedAccessors) {
const spec = this.vegaBarchartService.compileChart(this.latestDevelopment, bedAccessor, bedStati, {
xAxisTitle: '',
yAxisTitle: this.translationService.translate('Anzahl Krankenhäuser'),
width: 55
});
barChartSpecs.push({
title: this.bedAccessorsMapping[bedAccessor],
chart: spec
});
}
return of(barChartSpecs)
.pipe(
mergeMap(d => d),
mergeMap(d => d.chart.data.values),
map((d: any) => d.num as number),
max(),
map(maxNum => {
barChartSpecs.forEach(spec => {
spec.chart.encoding.y.scale.domain = [0, maxNum + 1];
spec.chart.encoding.y.axis.tickCount = Math.min(maxNum + 1, 5);
});
return barChartSpecs;
})
);
}
private prepareAddressAndContactInformation() {
if (!this.isSingleHospital) {
return false;
}
if (this.singleHospital.contact.indexOf('http') > -1) {
this.contact = 'http' + this.singleHospital.contact.split('http')[1];
this.url = true;
this.contactMsg = this.singleHospital.contact.replace(this.contact, '').replace('Website', '').trim();
if (this.contactMsg === '') {
this.contactMsg = 'Webseite';
}
} else {
this.contact = this.singleHospital.contact;
this.url = false;
this.contactMsg = this.singleHospital.contact;
}
}
private existsInDataValues(date: Moment, category, dataValues){
for (let i = dataValues.length - 1; i >= 0; i--) {
if (moment(dataValues[i].Datum).isSame(date) && dataValues[i].Kategorie === category){
return true;
}
}
return false;
}
private getTemporalCharts(): Observable<any[]> {
return of(true)
.pipe(
map(() => {
const bedStati = this.glyphLegendColors;
const colors = [];
for (const bedStatus of bedStati) {
colors.push(this.getCapacityStateColor(bedStatus));
}
const specs = [];
let maxNum = 0;
let maxNumSlices = 0;
if (!this.fullData.developments) {
return null;
}
const tenDaysAgo = moment().subtract(10, 'day');
// const data = this.fullData.developments.filter(d => tenDaysAgo.isBefore(moment(d.timestamp)));
for (const bedAccessor of this.bedAccessors) {
let summedbedcounts = 0;
const dataValues = [];
if (this.firstTimestamp.isSameOrAfter(tenDaysAgo)) {
dataValues.push(
{
Kategorie: "Keine Information",
num: this.totalNumberOfHospitals,
color: this.getCapacityStateColor("Keine Information"),
Datum: tenDaysAgo
}
);
}
let counter = 0;
for ( const d of this.fullData.developments) {
let sumOfOneSlice = 0;
// fill the data object
for (const bedStatus of bedStati) {
const v = d[bedAccessor][bedStatus] || 0;
summedbedcounts++;
sumOfOneSlice += v;
if (!this.existsInDataValues(moment.max(getMoment(d.timestamp), tenDaysAgo), bedStatus, dataValues)) {
dataValues.push(
{
Kategorie: bedStatus,
num: v,
color: this.getCapacityStateColor(bedStatus),
Datum: moment.max(getMoment(d.timestamp), tenDaysAgo).toDate()
}
);
if (v > maxNum) {
maxNum = v;
}
}
// add last data point once again
if (counter === this.fullData.developments.length - 1){
dataValues.push(
{
Kategorie: bedStatus,
num: v,
color: this.getCapacityStateColor(bedStatus),
Datum: moment()
}
);
}
}
if (sumOfOneSlice > maxNumSlices) {
maxNumSlices = sumOfOneSlice;
}
counter++;
}
// hack deep clone spec
const spec = JSON.parse(JSON.stringify(this.temporalChartTemplateSpec));
// inject data values
spec.data.values = dataValues;
// if (this.isSingleHospital && (new Date(this.lastUpdate).getTime() - new Date(this.firstTimestamp).getTime() < 2 * 24 * 60 * 60 * 1000)) {
// spec.encoding.x.axis.labelExpr = "[timeFormat(datum.value, '%d.%m'), false ? ' ' : timeFormat(datum.value, '(%H:%M)')]";
// }
spec.encoding.y.scale = {
domain: [0, maxNumSlices]
};
if (!this.isSingleHospital) {
spec.mark.interpolate = 'step-after';
spec.encoding.y.axis.title = this.translationService.translate('Anzahl KH');
spec.encoding.x.axis.tickCount = 5;
// spec.width = 370;
} else {
// is single hospital
spec.encoding.y.axis = false;
spec.height = spec.height * 0.3;
spec.width = 200;
}
// also overwrite the title
spec.encoding.x.title = '';
if (this.i18nService.getCurrentLocale() === SupportedLocales.DE_DE) {
spec.encoding.x.axis.format = '%d.%m';
} else {
spec.encoding.x.axis.format = '%m/%d';
}
if (summedbedcounts > 0) {
const bedType = bedAccessor === 'icu_low_state' ? this.eBedType.icuLow : (bedAccessor === 'icu_high_state' ? this.eBedType.icuHigh : this.eBedType.ecmo);
specs.push({
title: this.bedAccessorsMapping[bedAccessor],
chart: spec,
bedtype: bedType, | bedStatusDesc: this.getStatusDescriptionFor(bedType)
});
}
// set the max value
specs.forEach(s => {
s.chart.encoding.color.scale.domain = bedStati;
s.chart.encoding.color.scale.range = colors;
// spec.encoding.color.range = Math.min(maxNum+1, 5);
});
}
return specs;
}));
}
} | random_line_split | |
hospital-info.component.ts | /* eslint-disable @typescript-eslint/quotes, quote-props */
import { Component, Input, OnInit } from '@angular/core';
import moment, { Moment } from 'moment';
import { Observable, of } from 'rxjs';
import { mergeMap, map, max } from 'rxjs/operators';
import { BedBackgroundOptions } from '../map/options/bed-background-options';
import { BedGlyphOptions } from '../map/options/bed-glyph-options';
import { BedType } from "../map/options/bed-type.enum";
import { QualitativeDiviDevelopmentRepository } from '../repositories/qualitative-divi-development.respository';
import { QualitativeTimedStatus } from '../repositories/types/in/qualitative-hospitals-development';
import { AggregatedHospitalOut } from '../repositories/types/out/aggregated-hospital-out';
import { SingleHospitalOut } from '../repositories/types/out/single-hospital-out';
import { HospitalUtilService } from '../services/hospital-util.service';
import { I18nService, SupportedLocales } from '../services/i18n.service';
import { QualitativeColormapService } from '../services/qualitative-colormap.service';
import { TranslationService } from '../services/translation.service';
import { VegaBarchartService } from '../services/vega-barchart.service';
import { getMoment, getStrDate } from '../util/date-util';
@Component({
selector: 'app-hospital-info',
templateUrl: './hospital-info.component.html',
styleUrls: ['./hospital-info.component.less']
})
export class HospitalInfoComponent implements OnInit {
contact: string;
url: boolean;
contactMsg: string;
public eBedType = BedType;
@Input()
mode: 'dialog' | 'tooltip';
private _data: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>;
private _options: BedGlyphOptions | BedBackgroundOptions;
@Input()
set options(o: BedGlyphOptions | BedBackgroundOptions) {
this._options = o;
this.updateData();
}
get options(): BedGlyphOptions | BedBackgroundOptions {
return this._options;
}
@Input()
set data(d: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>) {
this._data = d;
this.updateData();
}
get data(): SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus> {
return this._data;
}
private fullData: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>;
glyphLegendColors = QualitativeColormapService.bedStati;
temporalChartTemplateSpec = {
"$schema": "https://vega.github.io/schema/vega-lite/v4.json",
"width": 260, "height": 50,
"aggregated": true,
"data": {
"values": []
},
"mark": {"type": "area", "interpolate": "step-after"},
"encoding": {
"x": {
"field": "Datum", "type": "temporal",
"axis": {
"domain": false,
"tickSize": 2, "tickCount": 8,
"format": "%d.%m"
}
},
"y": {
"field": "num", "type": "quantitative",
"axis": {"title": "Anzahl KH", "tickMinStep": 1}
},
"color": {"type": "nominal", "field": "Kategorie", "scale": {"domain": [], "range": []}, "legend": false}
}
};
tempChartSpecs$: Observable<any[]>;
barChartSpecs$: Observable<any[]>;
bedAccessors = ['icu_low_state', 'icu_high_state', 'ecmo_state'];
bedAccessorsMapping = {'icu_low_state': 'ICU - Low Care', 'icu_high_state': 'ICU - High Care', 'ecmo_state': 'ECMO'};
isSingleHospital = false;
singleHospital: SingleHospitalOut<QualitativeTimedStatus>;
latestDevelopment: QualitativeTimedStatus;
lastUpdate: Date;
firstTimestamp: Moment;
warnOfOutdatedData: boolean;
totalNumberOfHospitals = 0;
now = new Date();
constructor(private colormapService: QualitativeColormapService,
private translationService: TranslationService,
private vegaBarchartService: VegaBarchartService,
private i18nService: I18nService,
private diviRepo: QualitativeDiviDevelopmentRepository,
private hospitalUtil: HospitalUtilService
) {
}
ngOnInit(): void {
this.updateData();
}
private async updateData() {
this.tempChartSpecs$ = undefined;
this.barChartSpecs$ = undefined;
if (!this.data || !this.options) {
return;
}
// eslint-disable-next-line prefer-const
let [from, to] = this.hospitalUtil.getFromToTupleFromOptions(this.options);
from = getStrDate(getMoment(to).subtract(11, 'days'));
if (this.hospitalUtil.isSingleHospital(this.data)) {
this.isSingleHospital = true;
this.fullData = (await this.diviRepo.getDiviDevelopmentSingleHospital(this.data.id, from, to).toPromise()).properties;
this.singleHospital = this.fullData as SingleHospitalOut<QualitativeTimedStatus>;
} else {
this.fullData = (await this.diviRepo.getDiviDevelopmentForAggLevelSingle(this.options.aggregationLevel, this.data.id, from, to).toPromise()).properties;
}
if (this.fullData.developments) {
this.latestDevelopment = this.fullData.developments[this.fullData.developments.length - 1];
this.totalNumberOfHospitals = this.latestDevelopment.num_hospitals;
const lastUpdateM = getMoment(this.latestDevelopment.last_updated);
this.lastUpdate = lastUpdateM.toDate();
const tenDaysAgo = moment().subtract(10, 'day');
this.firstTimestamp = moment.max(getMoment(this.fullData.developments[0].timestamp), tenDaysAgo);
this.warnOfOutdatedData = moment().subtract(1, 'day').isAfter(lastUpdateM);
}
this.tempChartSpecs$ = this.getTemporalCharts();
if (this.isSingleHospital) {
this.prepareAddressAndContactInformation();
} else {
this.barChartSpecs$ = this.getBarChartSpecs();
}
}
// getTrendIcon(entries: TimestampedValue[]): string {
// const latest = getLatest(entries);
// return latest >= 0 ? (latest == 0 ? 'trending_flat' : 'trending_up') : 'trending_down';
// }
getCapacityStateColor(bedStatus: string) {
return this.colormapService.getSingleHospitalColormap()(bedStatus);
}
getStatusColorFor(bedStatus: BedType) {
return this.colormapService.getLatestBedStatusColor(this.fullData, bedStatus);
}
getStatusDescriptionFor(bedStatus: BedType) {
const latest = this.fullData.developments[this.fullData.developments.length - 1];
const counts = this.colormapService.propertyAccessor(bedStatus)(latest);
return Object.keys(counts).find(key => key !== '' && counts[key] > 0) ?? "Keine Information";
}
private getBarChartSpecs(): Observable<any[]> {
const barChartSpecs = [];
if (!this.latestDevelopment) {
return;
}
const bedStati = this.glyphLegendColors;
for (const bedAccessor of this.bedAccessors) {
const spec = this.vegaBarchartService.compileChart(this.latestDevelopment, bedAccessor, bedStati, {
xAxisTitle: '',
yAxisTitle: this.translationService.translate('Anzahl Krankenhäuser'),
width: 55
});
barChartSpecs.push({
title: this.bedAccessorsMapping[bedAccessor],
chart: spec
});
}
return of(barChartSpecs)
.pipe(
mergeMap(d => d),
mergeMap(d => d.chart.data.values),
map((d: any) => d.num as number),
max(),
map(maxNum => {
barChartSpecs.forEach(spec => {
spec.chart.encoding.y.scale.domain = [0, maxNum + 1];
spec.chart.encoding.y.axis.tickCount = Math.min(maxNum + 1, 5);
});
return barChartSpecs;
})
);
}
private prepareAddressAndContactInformation() {
if (!this.isSingleHospital) {
return false;
}
if (this.singleHospital.contact.indexOf('http') > -1) {
this.contact = 'http' + this.singleHospital.contact.split('http')[1];
this.url = true;
this.contactMsg = this.singleHospital.contact.replace(this.contact, '').replace('Website', '').trim();
if (this.contactMsg === '') {
this.contactMsg = 'Webseite';
}
} else {
this.contact = this.singleHospital.contact;
this.url = false;
this.contactMsg = this.singleHospital.contact;
}
}
private existsInDataValues(date: Moment, category, dataValues){
for (let i = dataValues.length - 1; i >= 0; i--) {
if (moment(dataValues[i].Datum).isSame(date) && dataValues[i].Kategorie === category){
return true;
}
}
return false;
}
private g | ): Observable<any[]> {
return of(true)
.pipe(
map(() => {
const bedStati = this.glyphLegendColors;
const colors = [];
for (const bedStatus of bedStati) {
colors.push(this.getCapacityStateColor(bedStatus));
}
const specs = [];
let maxNum = 0;
let maxNumSlices = 0;
if (!this.fullData.developments) {
return null;
}
const tenDaysAgo = moment().subtract(10, 'day');
// const data = this.fullData.developments.filter(d => tenDaysAgo.isBefore(moment(d.timestamp)));
for (const bedAccessor of this.bedAccessors) {
let summedbedcounts = 0;
const dataValues = [];
if (this.firstTimestamp.isSameOrAfter(tenDaysAgo)) {
dataValues.push(
{
Kategorie: "Keine Information",
num: this.totalNumberOfHospitals,
color: this.getCapacityStateColor("Keine Information"),
Datum: tenDaysAgo
}
);
}
let counter = 0;
for ( const d of this.fullData.developments) {
let sumOfOneSlice = 0;
// fill the data object
for (const bedStatus of bedStati) {
const v = d[bedAccessor][bedStatus] || 0;
summedbedcounts++;
sumOfOneSlice += v;
if (!this.existsInDataValues(moment.max(getMoment(d.timestamp), tenDaysAgo), bedStatus, dataValues)) {
dataValues.push(
{
Kategorie: bedStatus,
num: v,
color: this.getCapacityStateColor(bedStatus),
Datum: moment.max(getMoment(d.timestamp), tenDaysAgo).toDate()
}
);
if (v > maxNum) {
maxNum = v;
}
}
// add last data point once again
if (counter === this.fullData.developments.length - 1){
dataValues.push(
{
Kategorie: bedStatus,
num: v,
color: this.getCapacityStateColor(bedStatus),
Datum: moment()
}
);
}
}
if (sumOfOneSlice > maxNumSlices) {
maxNumSlices = sumOfOneSlice;
}
counter++;
}
// hack deep clone spec
const spec = JSON.parse(JSON.stringify(this.temporalChartTemplateSpec));
// inject data values
spec.data.values = dataValues;
// if (this.isSingleHospital && (new Date(this.lastUpdate).getTime() - new Date(this.firstTimestamp).getTime() < 2 * 24 * 60 * 60 * 1000)) {
// spec.encoding.x.axis.labelExpr = "[timeFormat(datum.value, '%d.%m'), false ? ' ' : timeFormat(datum.value, '(%H:%M)')]";
// }
spec.encoding.y.scale = {
domain: [0, maxNumSlices]
};
if (!this.isSingleHospital) {
spec.mark.interpolate = 'step-after';
spec.encoding.y.axis.title = this.translationService.translate('Anzahl KH');
spec.encoding.x.axis.tickCount = 5;
// spec.width = 370;
} else {
// is single hospital
spec.encoding.y.axis = false;
spec.height = spec.height * 0.3;
spec.width = 200;
}
// also overwrite the title
spec.encoding.x.title = '';
if (this.i18nService.getCurrentLocale() === SupportedLocales.DE_DE) {
spec.encoding.x.axis.format = '%d.%m';
} else {
spec.encoding.x.axis.format = '%m/%d';
}
if (summedbedcounts > 0) {
const bedType = bedAccessor === 'icu_low_state' ? this.eBedType.icuLow : (bedAccessor === 'icu_high_state' ? this.eBedType.icuHigh : this.eBedType.ecmo);
specs.push({
title: this.bedAccessorsMapping[bedAccessor],
chart: spec,
bedtype: bedType,
bedStatusDesc: this.getStatusDescriptionFor(bedType)
});
}
// set the max value
specs.forEach(s => {
s.chart.encoding.color.scale.domain = bedStati;
s.chart.encoding.color.scale.range = colors;
// spec.encoding.color.range = Math.min(maxNum+1, 5);
});
}
return specs;
}));
}
}
| etTemporalCharts( | identifier_name |
hospital-info.component.ts | /* eslint-disable @typescript-eslint/quotes, quote-props */
import { Component, Input, OnInit } from '@angular/core';
import moment, { Moment } from 'moment';
import { Observable, of } from 'rxjs';
import { mergeMap, map, max } from 'rxjs/operators';
import { BedBackgroundOptions } from '../map/options/bed-background-options';
import { BedGlyphOptions } from '../map/options/bed-glyph-options';
import { BedType } from "../map/options/bed-type.enum";
import { QualitativeDiviDevelopmentRepository } from '../repositories/qualitative-divi-development.respository';
import { QualitativeTimedStatus } from '../repositories/types/in/qualitative-hospitals-development';
import { AggregatedHospitalOut } from '../repositories/types/out/aggregated-hospital-out';
import { SingleHospitalOut } from '../repositories/types/out/single-hospital-out';
import { HospitalUtilService } from '../services/hospital-util.service';
import { I18nService, SupportedLocales } from '../services/i18n.service';
import { QualitativeColormapService } from '../services/qualitative-colormap.service';
import { TranslationService } from '../services/translation.service';
import { VegaBarchartService } from '../services/vega-barchart.service';
import { getMoment, getStrDate } from '../util/date-util';
@Component({
selector: 'app-hospital-info',
templateUrl: './hospital-info.component.html',
styleUrls: ['./hospital-info.component.less']
})
export class HospitalInfoComponent implements OnInit {
contact: string;
url: boolean;
contactMsg: string;
public eBedType = BedType;
@Input()
mode: 'dialog' | 'tooltip';
private _data: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>;
private _options: BedGlyphOptions | BedBackgroundOptions;
@Input()
set options(o: BedGlyphOptions | BedBackgroundOptions) {
this._options = o;
this.updateData();
}
get options(): BedGlyphOptions | BedBackgroundOptions {
return this._options;
}
@Input()
set data(d: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>) {
this._data = d;
this.updateData();
}
get data(): SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus> {
return this._data;
}
private fullData: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>;
glyphLegendColors = QualitativeColormapService.bedStati;
temporalChartTemplateSpec = {
"$schema": "https://vega.github.io/schema/vega-lite/v4.json",
"width": 260, "height": 50,
"aggregated": true,
"data": {
"values": []
},
"mark": {"type": "area", "interpolate": "step-after"},
"encoding": {
"x": {
"field": "Datum", "type": "temporal",
"axis": {
"domain": false,
"tickSize": 2, "tickCount": 8,
"format": "%d.%m"
}
},
"y": {
"field": "num", "type": "quantitative",
"axis": {"title": "Anzahl KH", "tickMinStep": 1}
},
"color": {"type": "nominal", "field": "Kategorie", "scale": {"domain": [], "range": []}, "legend": false}
}
};
tempChartSpecs$: Observable<any[]>;
barChartSpecs$: Observable<any[]>;
bedAccessors = ['icu_low_state', 'icu_high_state', 'ecmo_state'];
bedAccessorsMapping = {'icu_low_state': 'ICU - Low Care', 'icu_high_state': 'ICU - High Care', 'ecmo_state': 'ECMO'};
isSingleHospital = false;
singleHospital: SingleHospitalOut<QualitativeTimedStatus>;
latestDevelopment: QualitativeTimedStatus;
lastUpdate: Date;
firstTimestamp: Moment;
warnOfOutdatedData: boolean;
totalNumberOfHospitals = 0;
now = new Date();
constructor(private colormapService: QualitativeColormapService,
private translationService: TranslationService,
private vegaBarchartService: VegaBarchartService,
private i18nService: I18nService,
private diviRepo: QualitativeDiviDevelopmentRepository,
private hospitalUtil: HospitalUtilService
) {
}
ngOnInit(): void {
this.updateData();
}
private async updateData() {
this.tempChartSpecs$ = undefined;
this.barChartSpecs$ = undefined;
if (!this.data || !this.options) {
return;
}
// eslint-disable-next-line prefer-const
let [from, to] = this.hospitalUtil.getFromToTupleFromOptions(this.options);
from = getStrDate(getMoment(to).subtract(11, 'days'));
if (this.hospitalUtil.isSingleHospital(this.data)) {
this.isSingleHospital = true;
this.fullData = (await this.diviRepo.getDiviDevelopmentSingleHospital(this.data.id, from, to).toPromise()).properties;
this.singleHospital = this.fullData as SingleHospitalOut<QualitativeTimedStatus>;
} else {
this.fullData = (await this.diviRepo.getDiviDevelopmentForAggLevelSingle(this.options.aggregationLevel, this.data.id, from, to).toPromise()).properties;
}
if (this.fullData.developments) {
this.latestDevelopment = this.fullData.developments[this.fullData.developments.length - 1];
this.totalNumberOfHospitals = this.latestDevelopment.num_hospitals;
const lastUpdateM = getMoment(this.latestDevelopment.last_updated);
this.lastUpdate = lastUpdateM.toDate();
const tenDaysAgo = moment().subtract(10, 'day');
this.firstTimestamp = moment.max(getMoment(this.fullData.developments[0].timestamp), tenDaysAgo);
this.warnOfOutdatedData = moment().subtract(1, 'day').isAfter(lastUpdateM);
}
this.tempChartSpecs$ = this.getTemporalCharts();
if (this.isSingleHospital) {
this.prepareAddressAndContactInformation();
} else {
this.barChartSpecs$ = this.getBarChartSpecs();
}
}
// getTrendIcon(entries: TimestampedValue[]): string {
// const latest = getLatest(entries);
// return latest >= 0 ? (latest == 0 ? 'trending_flat' : 'trending_up') : 'trending_down';
// }
getCapacityStateColor(bedStatus: string) {
return this.colormapService.getSingleHospitalColormap()(bedStatus);
}
getStatusColorFor(bedStatus: BedType) {
return this.colormapService.getLatestBedStatusColor(this.fullData, bedStatus);
}
getStatusDescriptionFor(bedStatus: BedType) {
const latest = this.fullData.developments[this.fullData.developments.length - 1];
const counts = this.colormapService.propertyAccessor(bedStatus)(latest);
return Object.keys(counts).find(key => key !== '' && counts[key] > 0) ?? "Keine Information";
}
private getBarChartSpecs(): Observable<any[]> {
const barChartSpecs = [];
if (!this.latestDevelopment) {
return;
}
const bedStati = this.glyphLegendColors;
for (const bedAccessor of this.bedAccessors) {
const spec = this.vegaBarchartService.compileChart(this.latestDevelopment, bedAccessor, bedStati, {
xAxisTitle: '',
yAxisTitle: this.translationService.translate('Anzahl Krankenhäuser'),
width: 55
});
barChartSpecs.push({
title: this.bedAccessorsMapping[bedAccessor],
chart: spec
});
}
return of(barChartSpecs)
.pipe(
mergeMap(d => d),
mergeMap(d => d.chart.data.values),
map((d: any) => d.num as number),
max(),
map(maxNum => {
barChartSpecs.forEach(spec => {
spec.chart.encoding.y.scale.domain = [0, maxNum + 1];
spec.chart.encoding.y.axis.tickCount = Math.min(maxNum + 1, 5);
});
return barChartSpecs;
})
);
}
private prepareAddressAndContactInformation() {
if (!this.isSingleHospital) {
return false;
}
if (this.singleHospital.contact.indexOf('http') > -1) {
this.contact = 'http' + this.singleHospital.contact.split('http')[1];
this.url = true;
this.contactMsg = this.singleHospital.contact.replace(this.contact, '').replace('Website', '').trim();
if (this.contactMsg === '') {
this.contactMsg = 'Webseite';
}
} else {
this.contact = this.singleHospital.contact;
this.url = false;
this.contactMsg = this.singleHospital.contact;
}
}
private existsInDataValues(date: Moment, category, dataValues){
for (let i = dataValues.length - 1; i >= 0; i--) {
if (moment(dataValues[i].Datum).isSame(date) && dataValues[i].Kategorie === category){
return true;
}
}
return false;
}
private getTemporalCharts(): Observable<any[]> { | }
|
return of(true)
.pipe(
map(() => {
const bedStati = this.glyphLegendColors;
const colors = [];
for (const bedStatus of bedStati) {
colors.push(this.getCapacityStateColor(bedStatus));
}
const specs = [];
let maxNum = 0;
let maxNumSlices = 0;
if (!this.fullData.developments) {
return null;
}
const tenDaysAgo = moment().subtract(10, 'day');
// const data = this.fullData.developments.filter(d => tenDaysAgo.isBefore(moment(d.timestamp)));
for (const bedAccessor of this.bedAccessors) {
let summedbedcounts = 0;
const dataValues = [];
if (this.firstTimestamp.isSameOrAfter(tenDaysAgo)) {
dataValues.push(
{
Kategorie: "Keine Information",
num: this.totalNumberOfHospitals,
color: this.getCapacityStateColor("Keine Information"),
Datum: tenDaysAgo
}
);
}
let counter = 0;
for ( const d of this.fullData.developments) {
let sumOfOneSlice = 0;
// fill the data object
for (const bedStatus of bedStati) {
const v = d[bedAccessor][bedStatus] || 0;
summedbedcounts++;
sumOfOneSlice += v;
if (!this.existsInDataValues(moment.max(getMoment(d.timestamp), tenDaysAgo), bedStatus, dataValues)) {
dataValues.push(
{
Kategorie: bedStatus,
num: v,
color: this.getCapacityStateColor(bedStatus),
Datum: moment.max(getMoment(d.timestamp), tenDaysAgo).toDate()
}
);
if (v > maxNum) {
maxNum = v;
}
}
// add last data point once again
if (counter === this.fullData.developments.length - 1){
dataValues.push(
{
Kategorie: bedStatus,
num: v,
color: this.getCapacityStateColor(bedStatus),
Datum: moment()
}
);
}
}
if (sumOfOneSlice > maxNumSlices) {
maxNumSlices = sumOfOneSlice;
}
counter++;
}
// hack deep clone spec
const spec = JSON.parse(JSON.stringify(this.temporalChartTemplateSpec));
// inject data values
spec.data.values = dataValues;
// if (this.isSingleHospital && (new Date(this.lastUpdate).getTime() - new Date(this.firstTimestamp).getTime() < 2 * 24 * 60 * 60 * 1000)) {
// spec.encoding.x.axis.labelExpr = "[timeFormat(datum.value, '%d.%m'), false ? ' ' : timeFormat(datum.value, '(%H:%M)')]";
// }
spec.encoding.y.scale = {
domain: [0, maxNumSlices]
};
if (!this.isSingleHospital) {
spec.mark.interpolate = 'step-after';
spec.encoding.y.axis.title = this.translationService.translate('Anzahl KH');
spec.encoding.x.axis.tickCount = 5;
// spec.width = 370;
} else {
// is single hospital
spec.encoding.y.axis = false;
spec.height = spec.height * 0.3;
spec.width = 200;
}
// also overwrite the title
spec.encoding.x.title = '';
if (this.i18nService.getCurrentLocale() === SupportedLocales.DE_DE) {
spec.encoding.x.axis.format = '%d.%m';
} else {
spec.encoding.x.axis.format = '%m/%d';
}
if (summedbedcounts > 0) {
const bedType = bedAccessor === 'icu_low_state' ? this.eBedType.icuLow : (bedAccessor === 'icu_high_state' ? this.eBedType.icuHigh : this.eBedType.ecmo);
specs.push({
title: this.bedAccessorsMapping[bedAccessor],
chart: spec,
bedtype: bedType,
bedStatusDesc: this.getStatusDescriptionFor(bedType)
});
}
// set the max value
specs.forEach(s => {
s.chart.encoding.color.scale.domain = bedStati;
s.chart.encoding.color.scale.range = colors;
// spec.encoding.color.range = Math.min(maxNum+1, 5);
});
}
return specs;
}));
}
| identifier_body |
hospital-info.component.ts | /* eslint-disable @typescript-eslint/quotes, quote-props */
import { Component, Input, OnInit } from '@angular/core';
import moment, { Moment } from 'moment';
import { Observable, of } from 'rxjs';
import { mergeMap, map, max } from 'rxjs/operators';
import { BedBackgroundOptions } from '../map/options/bed-background-options';
import { BedGlyphOptions } from '../map/options/bed-glyph-options';
import { BedType } from "../map/options/bed-type.enum";
import { QualitativeDiviDevelopmentRepository } from '../repositories/qualitative-divi-development.respository';
import { QualitativeTimedStatus } from '../repositories/types/in/qualitative-hospitals-development';
import { AggregatedHospitalOut } from '../repositories/types/out/aggregated-hospital-out';
import { SingleHospitalOut } from '../repositories/types/out/single-hospital-out';
import { HospitalUtilService } from '../services/hospital-util.service';
import { I18nService, SupportedLocales } from '../services/i18n.service';
import { QualitativeColormapService } from '../services/qualitative-colormap.service';
import { TranslationService } from '../services/translation.service';
import { VegaBarchartService } from '../services/vega-barchart.service';
import { getMoment, getStrDate } from '../util/date-util';
@Component({
selector: 'app-hospital-info',
templateUrl: './hospital-info.component.html',
styleUrls: ['./hospital-info.component.less']
})
export class HospitalInfoComponent implements OnInit {
contact: string;
url: boolean;
contactMsg: string;
public eBedType = BedType;
@Input()
mode: 'dialog' | 'tooltip';
private _data: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>;
private _options: BedGlyphOptions | BedBackgroundOptions;
@Input()
set options(o: BedGlyphOptions | BedBackgroundOptions) {
this._options = o;
this.updateData();
}
get options(): BedGlyphOptions | BedBackgroundOptions {
return this._options;
}
@Input()
set data(d: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>) {
this._data = d;
this.updateData();
}
get data(): SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus> {
return this._data;
}
private fullData: SingleHospitalOut<QualitativeTimedStatus> | AggregatedHospitalOut<QualitativeTimedStatus>;
glyphLegendColors = QualitativeColormapService.bedStati;
temporalChartTemplateSpec = {
"$schema": "https://vega.github.io/schema/vega-lite/v4.json",
"width": 260, "height": 50,
"aggregated": true,
"data": {
"values": []
},
"mark": {"type": "area", "interpolate": "step-after"},
"encoding": {
"x": {
"field": "Datum", "type": "temporal",
"axis": {
"domain": false,
"tickSize": 2, "tickCount": 8,
"format": "%d.%m"
}
},
"y": {
"field": "num", "type": "quantitative",
"axis": {"title": "Anzahl KH", "tickMinStep": 1}
},
"color": {"type": "nominal", "field": "Kategorie", "scale": {"domain": [], "range": []}, "legend": false}
}
};
tempChartSpecs$: Observable<any[]>;
barChartSpecs$: Observable<any[]>;
bedAccessors = ['icu_low_state', 'icu_high_state', 'ecmo_state'];
bedAccessorsMapping = {'icu_low_state': 'ICU - Low Care', 'icu_high_state': 'ICU - High Care', 'ecmo_state': 'ECMO'};
isSingleHospital = false;
singleHospital: SingleHospitalOut<QualitativeTimedStatus>;
latestDevelopment: QualitativeTimedStatus;
lastUpdate: Date;
firstTimestamp: Moment;
warnOfOutdatedData: boolean;
totalNumberOfHospitals = 0;
now = new Date();
constructor(private colormapService: QualitativeColormapService,
private translationService: TranslationService,
private vegaBarchartService: VegaBarchartService,
private i18nService: I18nService,
private diviRepo: QualitativeDiviDevelopmentRepository,
private hospitalUtil: HospitalUtilService
) {
}
ngOnInit(): void {
this.updateData();
}
private async updateData() {
this.tempChartSpecs$ = undefined;
this.barChartSpecs$ = undefined;
if (!this.data || !this.options) {
return;
}
// eslint-disable-next-line prefer-const
let [from, to] = this.hospitalUtil.getFromToTupleFromOptions(this.options);
from = getStrDate(getMoment(to).subtract(11, 'days'));
if (this.hospitalUtil.isSingleHospital(this.data)) {
this.isSingleHospital = true;
this.fullData = (await this.diviRepo.getDiviDevelopmentSingleHospital(this.data.id, from, to).toPromise()).properties;
this.singleHospital = this.fullData as SingleHospitalOut<QualitativeTimedStatus>;
} else {
this.fullData = (await this.diviRepo.getDiviDevelopmentForAggLevelSingle(this.options.aggregationLevel, this.data.id, from, to).toPromise()).properties;
}
if (this.fullData.developments) {
this.latestDevelopment = this.fullData.developments[this.fullData.developments.length - 1];
this.totalNumberOfHospitals = this.latestDevelopment.num_hospitals;
const lastUpdateM = getMoment(this.latestDevelopment.last_updated);
this.lastUpdate = lastUpdateM.toDate();
const tenDaysAgo = moment().subtract(10, 'day');
this.firstTimestamp = moment.max(getMoment(this.fullData.developments[0].timestamp), tenDaysAgo);
this.warnOfOutdatedData = moment().subtract(1, 'day').isAfter(lastUpdateM);
}
this.tempChartSpecs$ = this.getTemporalCharts();
if (this.isSingleHospital) {
this.prepareAddressAndContactInformation();
} else {
this.barChartSpecs$ = this.getBarChartSpecs();
}
}
// getTrendIcon(entries: TimestampedValue[]): string {
// const latest = getLatest(entries);
// return latest >= 0 ? (latest == 0 ? 'trending_flat' : 'trending_up') : 'trending_down';
// }
getCapacityStateColor(bedStatus: string) {
return this.colormapService.getSingleHospitalColormap()(bedStatus);
}
getStatusColorFor(bedStatus: BedType) {
return this.colormapService.getLatestBedStatusColor(this.fullData, bedStatus);
}
getStatusDescriptionFor(bedStatus: BedType) {
const latest = this.fullData.developments[this.fullData.developments.length - 1];
const counts = this.colormapService.propertyAccessor(bedStatus)(latest);
return Object.keys(counts).find(key => key !== '' && counts[key] > 0) ?? "Keine Information";
}
private getBarChartSpecs(): Observable<any[]> {
const barChartSpecs = [];
if (!this.latestDevelopment) {
return;
}
const bedStati = this.glyphLegendColors;
for (const bedAccessor of this.bedAccessors) {
const spec = this.vegaBarchartService.compileChart(this.latestDevelopment, bedAccessor, bedStati, {
xAxisTitle: '',
yAxisTitle: this.translationService.translate('Anzahl Krankenhäuser'),
width: 55
});
barChartSpecs.push({
title: this.bedAccessorsMapping[bedAccessor],
chart: spec
});
}
return of(barChartSpecs)
.pipe(
mergeMap(d => d),
mergeMap(d => d.chart.data.values),
map((d: any) => d.num as number),
max(),
map(maxNum => {
barChartSpecs.forEach(spec => {
spec.chart.encoding.y.scale.domain = [0, maxNum + 1];
spec.chart.encoding.y.axis.tickCount = Math.min(maxNum + 1, 5);
});
return barChartSpecs;
})
);
}
private prepareAddressAndContactInformation() {
if (!this.isSingleHospital) {
return false;
}
if (this.singleHospital.contact.indexOf('http') > -1) {
this.contact = 'http' + this.singleHospital.contact.split('http')[1];
this.url = true;
this.contactMsg = this.singleHospital.contact.replace(this.contact, '').replace('Website', '').trim();
if (this.contactMsg === '') {
this.contactMsg = 'Webseite';
}
} else {
this.contact = this.singleHospital.contact;
this.url = false;
this.contactMsg = this.singleHospital.contact;
}
}
private existsInDataValues(date: Moment, category, dataValues){
for (let i = dataValues.length - 1; i >= 0; i--) {
if (moment(dataValues[i].Datum).isSame(date) && dataValues[i].Kategorie === category){
return true;
}
}
return false;
}
private getTemporalCharts(): Observable<any[]> {
return of(true)
.pipe(
map(() => {
const bedStati = this.glyphLegendColors;
const colors = [];
for (const bedStatus of bedStati) {
colors.push(this.getCapacityStateColor(bedStatus));
}
const specs = [];
let maxNum = 0;
let maxNumSlices = 0;
if (!this.fullData.developments) {
return null;
}
const tenDaysAgo = moment().subtract(10, 'day');
// const data = this.fullData.developments.filter(d => tenDaysAgo.isBefore(moment(d.timestamp)));
for (const bedAccessor of this.bedAccessors) {
let summedbedcounts = 0;
const dataValues = [];
if (this.firstTimestamp.isSameOrAfter(tenDaysAgo)) {
dataValues.push(
{
Kategorie: "Keine Information",
num: this.totalNumberOfHospitals,
color: this.getCapacityStateColor("Keine Information"),
Datum: tenDaysAgo
}
);
}
let counter = 0;
for ( const d of this.fullData.developments) {
let sumOfOneSlice = 0;
// fill the data object
for (const bedStatus of bedStati) {
const v = d[bedAccessor][bedStatus] || 0;
summedbedcounts++;
sumOfOneSlice += v;
if (!this.existsInDataValues(moment.max(getMoment(d.timestamp), tenDaysAgo), bedStatus, dataValues)) {
dataValues.push(
{
Kategorie: bedStatus,
num: v,
color: this.getCapacityStateColor(bedStatus),
Datum: moment.max(getMoment(d.timestamp), tenDaysAgo).toDate()
}
);
if (v > maxNum) {
maxNum = v;
}
}
// add last data point once again
if (counter === this.fullData.developments.length - 1){
dataValues.push(
{
Kategorie: bedStatus,
num: v,
color: this.getCapacityStateColor(bedStatus),
Datum: moment()
}
);
}
}
if (sumOfOneSlice > maxNumSlices) {
maxNumSlices = sumOfOneSlice;
}
counter++;
}
// hack deep clone spec
const spec = JSON.parse(JSON.stringify(this.temporalChartTemplateSpec));
// inject data values
spec.data.values = dataValues;
// if (this.isSingleHospital && (new Date(this.lastUpdate).getTime() - new Date(this.firstTimestamp).getTime() < 2 * 24 * 60 * 60 * 1000)) {
// spec.encoding.x.axis.labelExpr = "[timeFormat(datum.value, '%d.%m'), false ? ' ' : timeFormat(datum.value, '(%H:%M)')]";
// }
spec.encoding.y.scale = {
domain: [0, maxNumSlices]
};
if (!this.isSingleHospital) { | else {
// is single hospital
spec.encoding.y.axis = false;
spec.height = spec.height * 0.3;
spec.width = 200;
}
// also overwrite the title
spec.encoding.x.title = '';
if (this.i18nService.getCurrentLocale() === SupportedLocales.DE_DE) {
spec.encoding.x.axis.format = '%d.%m';
} else {
spec.encoding.x.axis.format = '%m/%d';
}
if (summedbedcounts > 0) {
const bedType = bedAccessor === 'icu_low_state' ? this.eBedType.icuLow : (bedAccessor === 'icu_high_state' ? this.eBedType.icuHigh : this.eBedType.ecmo);
specs.push({
title: this.bedAccessorsMapping[bedAccessor],
chart: spec,
bedtype: bedType,
bedStatusDesc: this.getStatusDescriptionFor(bedType)
});
}
// set the max value
specs.forEach(s => {
s.chart.encoding.color.scale.domain = bedStati;
s.chart.encoding.color.scale.range = colors;
// spec.encoding.color.range = Math.min(maxNum+1, 5);
});
}
return specs;
}));
}
}
|
spec.mark.interpolate = 'step-after';
spec.encoding.y.axis.title = this.translationService.translate('Anzahl KH');
spec.encoding.x.axis.tickCount = 5;
// spec.width = 370;
} | conditional_block |
mod.rs | use md5;
use rustc_serialize::hex::ToHex;
use std::collections::HashMap;
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
enum Dir {
Up,
Down,
Left,
Right,
}
impl Dir {
fn as_char(&self) -> char {
match *self {
Dir::Up => 'U',
Dir::Down => 'D',
Dir::Left => 'L',
Dir::Right => 'R',
}
}
}
fn path_as_string(path: &Vec<Dir>) -> String {
path.iter().map(|d| d.as_char()).collect()
}
trait CanBeDoor {
fn is_door(&self) -> bool;
}
impl CanBeDoor for char {
fn is_door(&self) -> bool {
match *self {
'b' | 'c' | 'd' | 'e' | 'f' => true,
_ => false,
}
}
}
fn open_doors_here(passcode: &str, path: &Vec<Dir>) -> Vec<Dir> {
let mut hash_source = passcode.to_owned();
hash_source.push_str(&path_as_string(path));
let hash = md5::compute(hash_source.as_bytes()).to_hex();
// println!("hash computed {} from {}",
// hash.chars().take(4).collect::<String>(),
// hash_source);
let mut i = hash.chars();
let first = i.next().unwrap();
let second = i.next().unwrap();
let third = i.next().unwrap();
let fourth = i.next().unwrap();
let mut result = Vec::new();
if first.is_door() {
result.push(Dir::Up);
}
if second.is_door() {
result.push(Dir::Down);
}
if third.is_door() {
result.push(Dir::Left);
}
if fourth.is_door() {
result.push(Dir::Right);
}
result
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
struct Point {
x: usize,
y: usize,
}
impl Point {
fn go(&self, dir: Dir) -> Point {
match dir {
Dir::Left => Point { x: self.x - 1, ..*self },
Dir::Right => Point { x: self.x + 1, ..*self },
Dir::Up => Point { y: self.y - 1, ..*self },
Dir::Down => Point { y: self.y + 1, ..*self },
}
}
}
#[derive(Debug)]
struct Maze {
width: usize,
height: usize,
destination: Point,
calculated: HashMap<Point, HashMap<Vec<Dir>, Vec<Dir>>>,
passcode: String,
}
impl Maze {
fn | (passcode: String) -> Maze {
Maze {
width: 4,
height: 4,
destination: Point { x: 3, y: 3 },
calculated: HashMap::new(),
passcode: passcode,
}
}
/// have we visited this room and had this set of doors before?
fn have_visited(&mut self, room: Point, path: &Vec<Dir>) -> bool {
let doors = self.get_doors_for(room.clone(), path.clone());
// println!("Have I been to {:?} via {:?} before? I see doors {:?}",
// room,
// path,
// doors);
let r = self.calculated.get(&room);
let result = match r {
Some(r) => {
let previous_door_sets_here =
r.iter().filter(|&(k, _)| k != path).map(|(_, v)| v).collect::<Vec<_>>();
// println!("Previous doors here {:?}", previous_door_sets_here);
previous_door_sets_here.into_iter().any(|d| d.clone() == doors)
}
None => false,
};
result
}
fn get_doors_for(&mut self, room: Point, path: Vec<Dir>) -> Vec<Dir> {
if room == self.destination {
// don't need to add doors here
let r = self.calculated.entry(room).or_insert(HashMap::new());
r.insert(path, Vec::new());
return Vec::new();
}
if let Some(r) = self.calculated.clone().get_mut(&room) {
match r.clone().get(&path) {
Some(p) => p.clone(),
None => {
let doors = open_doors_here(&self.passcode, &path);
r.insert(path.clone(), doors.clone());
doors
}
}
} else {
let doors = open_doors_here(&self.passcode, &path);
let mut newmap = HashMap::new();
newmap.insert(path.clone(), doors.clone());
self.calculated.insert(room, newmap);
doors
}
}
fn follow_all_routes(&mut self) {
// this turns out to be completely specialised for short route finding and is
// absolutely not finding all routes at all because it doesn't
// search exhaustively
let mut current_room = Point { x: 0, y: 0 };
let mut current_path = Vec::new();
// collection of (room, path taken to that room, door)
let mut doors_to_follow = Vec::new();
let mut iteration = 0;
loop {
iteration += 1;
if iteration % 100 == 0 {
println!("Iteration {} and I have {} doors to follow",
iteration,
doors_to_follow.len());
}
// println!("I am at {:?} and I came by path {:?}",
// current_room,
// current_path);
let doors_here = self.get_doors_for(current_room, current_path.clone())
.into_iter()
.filter(|d| !self.is_wall(current_room, *d))
.collect::<Vec<_>>();
// println!("These are the doors here: {:?}", doors_here);
for door in doors_here {
let mut new_path = current_path.clone();
new_path.push(door);
let new_room = current_room.go(door);
if !self.have_visited(new_room, &new_path) {
let to_add = (current_room, current_path.clone(), door);
// println!("Adding to my search list {:?}", to_add);
doors_to_follow.push(to_add);
}
}
let next_room = doors_to_follow.pop();
match next_room {
None => break, // we're done!
Some((room, path, door)) => {
// go to that room
current_room = room.go(door);
// go through that door
current_path = path;
current_path.push(door);
}
}
}
}
fn is_wall(&self, room: Point, dir: Dir) -> bool {
match dir {
Dir::Left => room.x == 0,
Dir::Right => room.x >= self.width - 1,
Dir::Up => room.y == 0,
Dir::Down => room.y >= self.height - 1,
}
}
fn get_routes_for_destination(&self) -> Vec<Vec<Dir>> {
let room = self.calculated.get(&self.destination);
match room {
None => Vec::new(),
Some(r) => r.keys().cloned().collect(),
}
}
fn find_longest_route(&self, pos: Point, path: &Vec<Dir>, steps: usize) -> usize {
// based on the nicely elegant C solution by GitHub user rhardih
let doors = open_doors_here(&self.passcode, path);
let mut longest = 0;
let can_up = doors.contains(&Dir::Up) && !self.is_wall(pos.clone(), Dir::Up);
let can_down = doors.contains(&Dir::Down) && !self.is_wall(pos.clone(), Dir::Down);
let can_left = doors.contains(&Dir::Left) && !self.is_wall(pos.clone(), Dir::Left);
let can_right = doors.contains(&Dir::Right) && !self.is_wall(pos.clone(), Dir::Right);
// can only go down and we're above the destination
if pos.x == self.destination.x && pos.y == self.destination.y - 1 && can_down &&
!can_up && !can_left && !can_right {
return steps + 1;
}
// can only go right and we're left of the destination
if pos.x == self.destination.x - 1 && pos.y == self.destination.y && can_right &&
!can_up && !can_left && !can_down {
return steps + 1;
}
// more generally
for &(can, dir) in [(can_up, Dir::Up),
(can_down && pos.go(Dir::Down) != self.destination, Dir::Down),
(can_left, Dir::Left),
(can_right && pos.go(Dir::Right) != self.destination, Dir::Right)]
.into_iter() {
if can {
let mut try_route = path.clone();
try_route.push(dir);
let r = self.find_longest_route(pos.go(dir), &try_route, steps + 1);
if r > longest {
longest = r;
}
}
}
if longest > 0 {
return longest;
}
if pos.go(Dir::Down) == self.destination && can_down {
return steps + 1;
} else if pos.go(Dir::Right) == self.destination && can_right {
return steps + 1;
}
// or there is no path at all
return 0;
}
}
fn shortest_vec(vecs: &Vec<Vec<Dir>>) -> Option<Vec<Dir>> {
let mut shortest_so_far = None;
let mut shortest_size = usize::max_value();
for v in vecs {
if v.len() < shortest_size {
shortest_so_far = Some(v);
shortest_size = v.len();
}
}
shortest_so_far.map(|v| v.clone())
}
fn find_shortest_route_with_key(key: &str) -> String {
let mut maze = Maze::new(key.to_owned());
maze.follow_all_routes();
let routes = maze.get_routes_for_destination();
let shortest = shortest_vec(&routes);
format_route(&shortest)
}
fn find_longest_route_length_with_key(key: &str) -> usize {
let maze = Maze::new(key.to_owned());
maze.find_longest_route(Point { x: 0, y: 0 }, &Vec::new(), 0)
}
fn format_route(route: &Option<Vec<Dir>>) -> String {
route.clone()
.map(|p| p.into_iter().map(|d| d.as_char()).collect::<String>())
.unwrap_or("No route found".to_owned())
}
pub fn do_day17() {
let key = "gdjjyniy";
let shortest = find_shortest_route_with_key(key);
println!("Shortest: {}", shortest);
println!("Longest: {}", find_longest_route_length_with_key(key));
}
#[test]
fn test_it_1_shortest() {
let shortest = find_shortest_route_with_key("ihgpwlah");
assert_eq!(shortest, "DDRRRD".to_owned());
// assert_eq!(longest.len(), 370);
}
#[test]
fn test_it_1_longest() {
let longest = find_longest_route_length_with_key("ihgpwlah");
assert_eq!(longest, 370);
}
#[test]
fn test_it_2_shortest() {
let shortest = find_shortest_route_with_key("kglvqrro");
assert_eq!(shortest, "DDUDRLRRUDRD".to_owned());
// assert_eq!(longest.len(), 492);
}
#[test]
fn test_it_2_longest() {
assert_eq!(find_longest_route_length_with_key("kglvqrro"), 492);
}
#[test]
fn test_it_3_shortest() {
let shortest = find_shortest_route_with_key("ulqzkmiv");
assert_eq!(shortest, "DRURDRUDDLLDLUURRDULRLDUUDDDRR".to_owned());
}
#[test]
fn test_it_3_longest() {
assert_eq!(find_longest_route_length_with_key("ulqzkmiv"), 830);
}
#[test]
fn test_open_doors_here() {
assert_eq!(open_doors_here("hijkl", &vec![]),
vec![Dir::Up, Dir::Down, Dir::Left]);
assert_eq!(open_doors_here("hijkl", &vec![Dir::Down]),
vec![Dir::Up, Dir::Left, Dir::Right]);
}
#[test]
fn test_get_doors_for() {
let mut maze = Maze::new("hijkl".to_owned());
assert_eq!(maze.get_doors_for(Point { x: 0, y: 0 }, vec![]),
vec![Dir::Up, Dir::Down, Dir::Left]);
// check it cached it
assert_eq!(*maze.calculated.get(&Point { x: 0, y: 0 }).unwrap().get(&vec![]).unwrap(),
vec![Dir::Up, Dir::Down, Dir::Left]);
}
| new | identifier_name |
mod.rs | use md5;
use rustc_serialize::hex::ToHex;
use std::collections::HashMap;
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
enum Dir {
Up,
Down,
Left,
Right,
}
impl Dir {
fn as_char(&self) -> char {
match *self {
Dir::Up => 'U',
Dir::Down => 'D',
Dir::Left => 'L',
Dir::Right => 'R',
}
}
}
fn path_as_string(path: &Vec<Dir>) -> String {
path.iter().map(|d| d.as_char()).collect()
}
trait CanBeDoor {
fn is_door(&self) -> bool;
}
impl CanBeDoor for char {
fn is_door(&self) -> bool {
match *self {
'b' | 'c' | 'd' | 'e' | 'f' => true,
_ => false,
}
}
}
fn open_doors_here(passcode: &str, path: &Vec<Dir>) -> Vec<Dir> {
let mut hash_source = passcode.to_owned();
hash_source.push_str(&path_as_string(path));
let hash = md5::compute(hash_source.as_bytes()).to_hex();
// println!("hash computed {} from {}",
// hash.chars().take(4).collect::<String>(),
// hash_source);
let mut i = hash.chars();
let first = i.next().unwrap();
let second = i.next().unwrap();
let third = i.next().unwrap();
let fourth = i.next().unwrap();
let mut result = Vec::new();
if first.is_door() {
result.push(Dir::Up);
}
if second.is_door() {
result.push(Dir::Down);
}
if third.is_door() {
result.push(Dir::Left);
}
if fourth.is_door() {
result.push(Dir::Right);
}
result
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
struct Point {
x: usize,
y: usize,
}
impl Point {
fn go(&self, dir: Dir) -> Point {
match dir {
Dir::Left => Point { x: self.x - 1, ..*self },
Dir::Right => Point { x: self.x + 1, ..*self },
Dir::Up => Point { y: self.y - 1, ..*self },
Dir::Down => Point { y: self.y + 1, ..*self },
}
}
}
#[derive(Debug)]
struct Maze {
width: usize,
height: usize,
destination: Point,
calculated: HashMap<Point, HashMap<Vec<Dir>, Vec<Dir>>>,
passcode: String,
}
impl Maze {
fn new(passcode: String) -> Maze {
Maze {
width: 4,
height: 4,
destination: Point { x: 3, y: 3 },
calculated: HashMap::new(),
passcode: passcode,
}
}
/// have we visited this room and had this set of doors before?
fn have_visited(&mut self, room: Point, path: &Vec<Dir>) -> bool {
let doors = self.get_doors_for(room.clone(), path.clone());
// println!("Have I been to {:?} via {:?} before? I see doors {:?}",
// room,
// path,
// doors);
let r = self.calculated.get(&room);
let result = match r {
Some(r) => {
let previous_door_sets_here =
r.iter().filter(|&(k, _)| k != path).map(|(_, v)| v).collect::<Vec<_>>(); | // println!("Previous doors here {:?}", previous_door_sets_here);
previous_door_sets_here.into_iter().any(|d| d.clone() == doors)
}
None => false,
};
result
}
fn get_doors_for(&mut self, room: Point, path: Vec<Dir>) -> Vec<Dir> {
if room == self.destination {
// don't need to add doors here
let r = self.calculated.entry(room).or_insert(HashMap::new());
r.insert(path, Vec::new());
return Vec::new();
}
if let Some(r) = self.calculated.clone().get_mut(&room) {
match r.clone().get(&path) {
Some(p) => p.clone(),
None => {
let doors = open_doors_here(&self.passcode, &path);
r.insert(path.clone(), doors.clone());
doors
}
}
} else {
let doors = open_doors_here(&self.passcode, &path);
let mut newmap = HashMap::new();
newmap.insert(path.clone(), doors.clone());
self.calculated.insert(room, newmap);
doors
}
}
fn follow_all_routes(&mut self) {
// this turns out to be completely specialised for short route finding and is
// absolutely not finding all routes at all because it doesn't
// search exhaustively
let mut current_room = Point { x: 0, y: 0 };
let mut current_path = Vec::new();
// collection of (room, path taken to that room, door)
let mut doors_to_follow = Vec::new();
let mut iteration = 0;
loop {
iteration += 1;
if iteration % 100 == 0 {
println!("Iteration {} and I have {} doors to follow",
iteration,
doors_to_follow.len());
}
// println!("I am at {:?} and I came by path {:?}",
// current_room,
// current_path);
let doors_here = self.get_doors_for(current_room, current_path.clone())
.into_iter()
.filter(|d| !self.is_wall(current_room, *d))
.collect::<Vec<_>>();
// println!("These are the doors here: {:?}", doors_here);
for door in doors_here {
let mut new_path = current_path.clone();
new_path.push(door);
let new_room = current_room.go(door);
if !self.have_visited(new_room, &new_path) {
let to_add = (current_room, current_path.clone(), door);
// println!("Adding to my search list {:?}", to_add);
doors_to_follow.push(to_add);
}
}
let next_room = doors_to_follow.pop();
match next_room {
None => break, // we're done!
Some((room, path, door)) => {
// go to that room
current_room = room.go(door);
// go through that door
current_path = path;
current_path.push(door);
}
}
}
}
fn is_wall(&self, room: Point, dir: Dir) -> bool {
match dir {
Dir::Left => room.x == 0,
Dir::Right => room.x >= self.width - 1,
Dir::Up => room.y == 0,
Dir::Down => room.y >= self.height - 1,
}
}
fn get_routes_for_destination(&self) -> Vec<Vec<Dir>> {
let room = self.calculated.get(&self.destination);
match room {
None => Vec::new(),
Some(r) => r.keys().cloned().collect(),
}
}
fn find_longest_route(&self, pos: Point, path: &Vec<Dir>, steps: usize) -> usize {
// based on the nicely elegant C solution by GitHub user rhardih
let doors = open_doors_here(&self.passcode, path);
let mut longest = 0;
let can_up = doors.contains(&Dir::Up) && !self.is_wall(pos.clone(), Dir::Up);
let can_down = doors.contains(&Dir::Down) && !self.is_wall(pos.clone(), Dir::Down);
let can_left = doors.contains(&Dir::Left) && !self.is_wall(pos.clone(), Dir::Left);
let can_right = doors.contains(&Dir::Right) && !self.is_wall(pos.clone(), Dir::Right);
// can only go down and we're above the destination
if pos.x == self.destination.x && pos.y == self.destination.y - 1 && can_down &&
!can_up && !can_left && !can_right {
return steps + 1;
}
// can only go right and we're left of the destination
if pos.x == self.destination.x - 1 && pos.y == self.destination.y && can_right &&
!can_up && !can_left && !can_down {
return steps + 1;
}
// more generally
for &(can, dir) in [(can_up, Dir::Up),
(can_down && pos.go(Dir::Down) != self.destination, Dir::Down),
(can_left, Dir::Left),
(can_right && pos.go(Dir::Right) != self.destination, Dir::Right)]
.into_iter() {
if can {
let mut try_route = path.clone();
try_route.push(dir);
let r = self.find_longest_route(pos.go(dir), &try_route, steps + 1);
if r > longest {
longest = r;
}
}
}
if longest > 0 {
return longest;
}
if pos.go(Dir::Down) == self.destination && can_down {
return steps + 1;
} else if pos.go(Dir::Right) == self.destination && can_right {
return steps + 1;
}
// or there is no path at all
return 0;
}
}
fn shortest_vec(vecs: &Vec<Vec<Dir>>) -> Option<Vec<Dir>> {
let mut shortest_so_far = None;
let mut shortest_size = usize::max_value();
for v in vecs {
if v.len() < shortest_size {
shortest_so_far = Some(v);
shortest_size = v.len();
}
}
shortest_so_far.map(|v| v.clone())
}
fn find_shortest_route_with_key(key: &str) -> String {
let mut maze = Maze::new(key.to_owned());
maze.follow_all_routes();
let routes = maze.get_routes_for_destination();
let shortest = shortest_vec(&routes);
format_route(&shortest)
}
fn find_longest_route_length_with_key(key: &str) -> usize {
let maze = Maze::new(key.to_owned());
maze.find_longest_route(Point { x: 0, y: 0 }, &Vec::new(), 0)
}
fn format_route(route: &Option<Vec<Dir>>) -> String {
route.clone()
.map(|p| p.into_iter().map(|d| d.as_char()).collect::<String>())
.unwrap_or("No route found".to_owned())
}
pub fn do_day17() {
let key = "gdjjyniy";
let shortest = find_shortest_route_with_key(key);
println!("Shortest: {}", shortest);
println!("Longest: {}", find_longest_route_length_with_key(key));
}
#[test]
fn test_it_1_shortest() {
let shortest = find_shortest_route_with_key("ihgpwlah");
assert_eq!(shortest, "DDRRRD".to_owned());
// assert_eq!(longest.len(), 370);
}
#[test]
fn test_it_1_longest() {
let longest = find_longest_route_length_with_key("ihgpwlah");
assert_eq!(longest, 370);
}
#[test]
fn test_it_2_shortest() {
let shortest = find_shortest_route_with_key("kglvqrro");
assert_eq!(shortest, "DDUDRLRRUDRD".to_owned());
// assert_eq!(longest.len(), 492);
}
#[test]
fn test_it_2_longest() {
assert_eq!(find_longest_route_length_with_key("kglvqrro"), 492);
}
#[test]
fn test_it_3_shortest() {
let shortest = find_shortest_route_with_key("ulqzkmiv");
assert_eq!(shortest, "DRURDRUDDLLDLUURRDULRLDUUDDDRR".to_owned());
}
#[test]
fn test_it_3_longest() {
assert_eq!(find_longest_route_length_with_key("ulqzkmiv"), 830);
}
#[test]
fn test_open_doors_here() {
assert_eq!(open_doors_here("hijkl", &vec![]),
vec![Dir::Up, Dir::Down, Dir::Left]);
assert_eq!(open_doors_here("hijkl", &vec![Dir::Down]),
vec![Dir::Up, Dir::Left, Dir::Right]);
}
#[test]
fn test_get_doors_for() {
let mut maze = Maze::new("hijkl".to_owned());
assert_eq!(maze.get_doors_for(Point { x: 0, y: 0 }, vec![]),
vec![Dir::Up, Dir::Down, Dir::Left]);
// check it cached it
assert_eq!(*maze.calculated.get(&Point { x: 0, y: 0 }).unwrap().get(&vec![]).unwrap(),
vec![Dir::Up, Dir::Down, Dir::Left]);
} | random_line_split | |
mod.rs | use md5;
use rustc_serialize::hex::ToHex;
use std::collections::HashMap;
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
enum Dir {
Up,
Down,
Left,
Right,
}
impl Dir {
fn as_char(&self) -> char {
match *self {
Dir::Up => 'U',
Dir::Down => 'D',
Dir::Left => 'L',
Dir::Right => 'R',
}
}
}
fn path_as_string(path: &Vec<Dir>) -> String {
path.iter().map(|d| d.as_char()).collect()
}
trait CanBeDoor {
fn is_door(&self) -> bool;
}
impl CanBeDoor for char {
fn is_door(&self) -> bool {
match *self {
'b' | 'c' | 'd' | 'e' | 'f' => true,
_ => false,
}
}
}
fn open_doors_here(passcode: &str, path: &Vec<Dir>) -> Vec<Dir> {
let mut hash_source = passcode.to_owned();
hash_source.push_str(&path_as_string(path));
let hash = md5::compute(hash_source.as_bytes()).to_hex();
// println!("hash computed {} from {}",
// hash.chars().take(4).collect::<String>(),
// hash_source);
let mut i = hash.chars();
let first = i.next().unwrap();
let second = i.next().unwrap();
let third = i.next().unwrap();
let fourth = i.next().unwrap();
let mut result = Vec::new();
if first.is_door() {
result.push(Dir::Up);
}
if second.is_door() {
result.push(Dir::Down);
}
if third.is_door() {
result.push(Dir::Left);
}
if fourth.is_door() {
result.push(Dir::Right);
}
result
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
struct Point {
x: usize,
y: usize,
}
impl Point {
fn go(&self, dir: Dir) -> Point {
match dir {
Dir::Left => Point { x: self.x - 1, ..*self },
Dir::Right => Point { x: self.x + 1, ..*self },
Dir::Up => Point { y: self.y - 1, ..*self },
Dir::Down => Point { y: self.y + 1, ..*self },
}
}
}
#[derive(Debug)]
struct Maze {
width: usize,
height: usize,
destination: Point,
calculated: HashMap<Point, HashMap<Vec<Dir>, Vec<Dir>>>,
passcode: String,
}
impl Maze {
fn new(passcode: String) -> Maze {
Maze {
width: 4,
height: 4,
destination: Point { x: 3, y: 3 },
calculated: HashMap::new(),
passcode: passcode,
}
}
/// have we visited this room and had this set of doors before?
fn have_visited(&mut self, room: Point, path: &Vec<Dir>) -> bool {
let doors = self.get_doors_for(room.clone(), path.clone());
// println!("Have I been to {:?} via {:?} before? I see doors {:?}",
// room,
// path,
// doors);
let r = self.calculated.get(&room);
let result = match r {
Some(r) => {
let previous_door_sets_here =
r.iter().filter(|&(k, _)| k != path).map(|(_, v)| v).collect::<Vec<_>>();
// println!("Previous doors here {:?}", previous_door_sets_here);
previous_door_sets_here.into_iter().any(|d| d.clone() == doors)
}
None => false,
};
result
}
fn get_doors_for(&mut self, room: Point, path: Vec<Dir>) -> Vec<Dir> {
if room == self.destination {
// don't need to add doors here
let r = self.calculated.entry(room).or_insert(HashMap::new());
r.insert(path, Vec::new());
return Vec::new();
}
if let Some(r) = self.calculated.clone().get_mut(&room) {
match r.clone().get(&path) {
Some(p) => p.clone(),
None => {
let doors = open_doors_here(&self.passcode, &path);
r.insert(path.clone(), doors.clone());
doors
}
}
} else {
let doors = open_doors_here(&self.passcode, &path);
let mut newmap = HashMap::new();
newmap.insert(path.clone(), doors.clone());
self.calculated.insert(room, newmap);
doors
}
}
fn follow_all_routes(&mut self) {
// this turns out to be completely specialised for short route finding and is
// absolutely not finding all routes at all because it doesn't
// search exhaustively
let mut current_room = Point { x: 0, y: 0 };
let mut current_path = Vec::new();
// collection of (room, path taken to that room, door)
let mut doors_to_follow = Vec::new();
let mut iteration = 0;
loop {
iteration += 1;
if iteration % 100 == 0 {
println!("Iteration {} and I have {} doors to follow",
iteration,
doors_to_follow.len());
}
// println!("I am at {:?} and I came by path {:?}",
// current_room,
// current_path);
let doors_here = self.get_doors_for(current_room, current_path.clone())
.into_iter()
.filter(|d| !self.is_wall(current_room, *d))
.collect::<Vec<_>>();
// println!("These are the doors here: {:?}", doors_here);
for door in doors_here {
let mut new_path = current_path.clone();
new_path.push(door);
let new_room = current_room.go(door);
if !self.have_visited(new_room, &new_path) {
let to_add = (current_room, current_path.clone(), door);
// println!("Adding to my search list {:?}", to_add);
doors_to_follow.push(to_add);
}
}
let next_room = doors_to_follow.pop();
match next_room {
None => break, // we're done!
Some((room, path, door)) => {
// go to that room
current_room = room.go(door);
// go through that door
current_path = path;
current_path.push(door);
}
}
}
}
fn is_wall(&self, room: Point, dir: Dir) -> bool {
match dir {
Dir::Left => room.x == 0,
Dir::Right => room.x >= self.width - 1,
Dir::Up => room.y == 0,
Dir::Down => room.y >= self.height - 1,
}
}
fn get_routes_for_destination(&self) -> Vec<Vec<Dir>> |
fn find_longest_route(&self, pos: Point, path: &Vec<Dir>, steps: usize) -> usize {
// based on the nicely elegant C solution by GitHub user rhardih
let doors = open_doors_here(&self.passcode, path);
let mut longest = 0;
let can_up = doors.contains(&Dir::Up) && !self.is_wall(pos.clone(), Dir::Up);
let can_down = doors.contains(&Dir::Down) && !self.is_wall(pos.clone(), Dir::Down);
let can_left = doors.contains(&Dir::Left) && !self.is_wall(pos.clone(), Dir::Left);
let can_right = doors.contains(&Dir::Right) && !self.is_wall(pos.clone(), Dir::Right);
// can only go down and we're above the destination
if pos.x == self.destination.x && pos.y == self.destination.y - 1 && can_down &&
!can_up && !can_left && !can_right {
return steps + 1;
}
// can only go right and we're left of the destination
if pos.x == self.destination.x - 1 && pos.y == self.destination.y && can_right &&
!can_up && !can_left && !can_down {
return steps + 1;
}
// more generally
for &(can, dir) in [(can_up, Dir::Up),
(can_down && pos.go(Dir::Down) != self.destination, Dir::Down),
(can_left, Dir::Left),
(can_right && pos.go(Dir::Right) != self.destination, Dir::Right)]
.into_iter() {
if can {
let mut try_route = path.clone();
try_route.push(dir);
let r = self.find_longest_route(pos.go(dir), &try_route, steps + 1);
if r > longest {
longest = r;
}
}
}
if longest > 0 {
return longest;
}
if pos.go(Dir::Down) == self.destination && can_down {
return steps + 1;
} else if pos.go(Dir::Right) == self.destination && can_right {
return steps + 1;
}
// or there is no path at all
return 0;
}
}
fn shortest_vec(vecs: &Vec<Vec<Dir>>) -> Option<Vec<Dir>> {
let mut shortest_so_far = None;
let mut shortest_size = usize::max_value();
for v in vecs {
if v.len() < shortest_size {
shortest_so_far = Some(v);
shortest_size = v.len();
}
}
shortest_so_far.map(|v| v.clone())
}
fn find_shortest_route_with_key(key: &str) -> String {
let mut maze = Maze::new(key.to_owned());
maze.follow_all_routes();
let routes = maze.get_routes_for_destination();
let shortest = shortest_vec(&routes);
format_route(&shortest)
}
fn find_longest_route_length_with_key(key: &str) -> usize {
let maze = Maze::new(key.to_owned());
maze.find_longest_route(Point { x: 0, y: 0 }, &Vec::new(), 0)
}
fn format_route(route: &Option<Vec<Dir>>) -> String {
route.clone()
.map(|p| p.into_iter().map(|d| d.as_char()).collect::<String>())
.unwrap_or("No route found".to_owned())
}
pub fn do_day17() {
let key = "gdjjyniy";
let shortest = find_shortest_route_with_key(key);
println!("Shortest: {}", shortest);
println!("Longest: {}", find_longest_route_length_with_key(key));
}
#[test]
fn test_it_1_shortest() {
let shortest = find_shortest_route_with_key("ihgpwlah");
assert_eq!(shortest, "DDRRRD".to_owned());
// assert_eq!(longest.len(), 370);
}
#[test]
fn test_it_1_longest() {
let longest = find_longest_route_length_with_key("ihgpwlah");
assert_eq!(longest, 370);
}
#[test]
fn test_it_2_shortest() {
let shortest = find_shortest_route_with_key("kglvqrro");
assert_eq!(shortest, "DDUDRLRRUDRD".to_owned());
// assert_eq!(longest.len(), 492);
}
#[test]
fn test_it_2_longest() {
assert_eq!(find_longest_route_length_with_key("kglvqrro"), 492);
}
#[test]
fn test_it_3_shortest() {
let shortest = find_shortest_route_with_key("ulqzkmiv");
assert_eq!(shortest, "DRURDRUDDLLDLUURRDULRLDUUDDDRR".to_owned());
}
#[test]
fn test_it_3_longest() {
assert_eq!(find_longest_route_length_with_key("ulqzkmiv"), 830);
}
#[test]
fn test_open_doors_here() {
assert_eq!(open_doors_here("hijkl", &vec![]),
vec![Dir::Up, Dir::Down, Dir::Left]);
assert_eq!(open_doors_here("hijkl", &vec![Dir::Down]),
vec![Dir::Up, Dir::Left, Dir::Right]);
}
#[test]
fn test_get_doors_for() {
let mut maze = Maze::new("hijkl".to_owned());
assert_eq!(maze.get_doors_for(Point { x: 0, y: 0 }, vec![]),
vec![Dir::Up, Dir::Down, Dir::Left]);
// check it cached it
assert_eq!(*maze.calculated.get(&Point { x: 0, y: 0 }).unwrap().get(&vec![]).unwrap(),
vec![Dir::Up, Dir::Down, Dir::Left]);
}
| {
let room = self.calculated.get(&self.destination);
match room {
None => Vec::new(),
Some(r) => r.keys().cloned().collect(),
}
} | identifier_body |
frame.go | // Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package cassandra
import (
"errors"
"fmt"
"runtime"
"sync"
"github.com/elastic/beats/libbeat/common/streambuf"
"github.com/elastic/beats/libbeat/logp"
)
var (
ErrFrameTooBig = errors.New("frame length is bigger than the maximum allowed")
debugf = logp.MakeDebug("cassandra")
)
type frameHeader struct {
Version protoVersion
Flags byte
Stream int
Op FrameOp
BodyLength int
HeadLength int
CustomPayload map[string][]byte
}
func (f frameHeader) ToMap() map[string]interface{} {
data := make(map[string]interface{})
data["version"] = fmt.Sprintf("%d", f.Version.version())
data["flags"] = getHeadFlagString(f.Flags)
data["stream"] = f.Stream
data["op"] = f.Op.String()
data["length"] = f.BodyLength
return data
}
func (f frameHeader) String() string {
return fmt.Sprintf("version:%s, flags: %s, steam: %v, OP: %v, length: %v", f.Version.String(), getHeadFlagString(f.Flags), f.Stream, f.Op.String(), f.BodyLength)
}
var framerPool = sync.Pool{
New: func() interface{} {
return &Framer{compres: nil, isCompressed: false, Header: nil, r: nil, decoder: nil}
},
}
// a framer is responsible for reading, writing and parsing frames on a single stream
type Framer struct {
proto byte
compres Compressor
isCompressed bool
// if this frame was read then the header will be here
Header *frameHeader
r *streambuf.Buffer
decoder Decoder
}
func NewFramer(r *streambuf.Buffer, compressor Compressor) *Framer {
f := framerPool.Get().(*Framer)
f.compres = compressor
f.r = r
return f
}
// read header frame from stream
func (f *Framer) ReadHeader() (head *frameHeader, err error) {
v, err := f.r.ReadByte()
if err != nil {
return nil, err
}
version := v & protoVersionMask
if version < protoVersion1 || version > protoVersion4 {
return nil, fmt.Errorf("unsupported version: %x ", v)
}
f.proto = version
head = &frameHeader{}
head.Version = protoVersion(v)
flag, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Flags = flag
if version > protoVersion2 {
stream, err := f.r.ReadNetUint16()
if err != nil {
return nil, err
}
head.Stream = int(stream)
b, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Op = FrameOp(b)
l, err := f.r.ReadNetUint32()
if err != nil {
return nil, err
}
head.BodyLength = int(l)
} else {
stream, err := f.r.ReadNetUint8()
if err != nil {
return nil, err
}
head.Stream = int(stream)
b, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Op = FrameOp(b)
l, err := f.r.ReadNetUint32()
if err != nil {
return nil, err
}
head.BodyLength = int(l)
}
if head.BodyLength < 0 {
return nil, fmt.Errorf("frame body length can not be less than 0: %d", head.BodyLength)
} else if head.BodyLength > maxFrameSize {
// need to free up the connection to be used again
logp.Err("head length is too large")
return nil, ErrFrameTooBig
}
headSize := f.r.BufferConsumed()
head.HeadLength = headSize
debugf("header: %v", head)
f.Header = head
return head, nil
}
// reads a frame form the wire into the framers buffer
func (f *Framer) ReadFrame() (data map[string]interface{}, err error) {
defer func() {
if r := recover(); r != nil {
if _, ok := r.(runtime.Error); ok {
panic(r)
}
err = r.(error)
}
}()
decoder := &StreamDecoder{}
decoder.r = f.r
f.decoder = decoder
data = make(map[string]interface{})
//Only QUERY, PREPARE and EXECUTE queries support tracing
//If a response frame has the tracing flag set, its body contains
//a tracing ID. The tracing ID is a [uuid] and is the first thing in
//the frame body. The rest of the body will then be the usual body
//corresponding to the response opcode.
if f.Header.Flags&flagTracing == flagTracing && (f.Header.Op&opQuery == opQuery || f.Header.Op&opExecute == opExecute || f.Header.Op&opPrepare == opPrepare) {
debugf("tracing enabled")
//seems no UUID to read, protocol incorrect?
//uid := decoder.ReadUUID()
//data["trace_id"] = uid.String()
}
if f.Header.Flags&flagWarning == flagWarning {
debugf("hit warning flags")
warnings := decoder.ReadStringList()
// dealing with warnings
data["warnings"] = warnings
}
if f.Header.Flags&flagCustomPayload == flagCustomPayload {
debugf("hit custom payload flags")
f.Header.CustomPayload = decoder.ReadBytesMap()
}
if f.Header.Flags&flagCompress == flagCompress {
//decompress data and switch to use bytearray decoder
if f.compres == nil {
logp.Err("hit compress flag, but compressor was not set")
panic(errors.New("hit compress flag, but compressor was not set"))
}
decoder := &ByteArrayDecoder{}
buf := make([]byte, f.Header.BodyLength)
f.r.Read(buf)
dec, err := f.compres.Decode(buf)
if err != nil {
return nil, err
}
decoder.Data = &dec
f.decoder = decoder
debugf("hit compress flags")
}
// assumes that the frame body has been read into rbuf
switch f.Header.Op {
//below ops are requests
case opStartup, opAuthResponse, opOptions, opPrepare, opExecute, opBatch, opRegister:
//ignored
case opQuery:
data = f.parseQueryFrame()
//below ops are responses
case opError:
data["error"] = f.parseErrorFrame()
case opResult:
data["result"] = f.parseResultFrame()
case opSupported:
data = f.parseSupportedFrame()
case opAuthenticate:
data["authentication"] = f.parseAuthenticateFrame()
case opAuthChallenge:
data["authentication"] = f.parseAuthChallengeFrame()
case opAuthSuccess:
data["authentication"] = f.parseAuthSuccessFrame()
case opEvent:
data["event"] = f.parseEventFrame()
case opReady:
// the body should be empty
default:
//ignore
debugf("unknow ops, not processed, %v", f.Header)
}
return data, nil
}
func (f *Framer) parseErrorFrame() (data map[string]interface{}) {
decoder := f.decoder
code := decoder.ReadInt()
msg := decoder.ReadString()
errT := ErrType(code)
data = make(map[string]interface{})
data["code"] = code
data["msg"] = msg
data["type"] = errT.String()
detail := map[string]interface{}{}
switch errT {
case errUnavailable:
cl := decoder.ReadConsistency()
required := decoder.ReadInt()
alive := decoder.ReadInt()
detail["read_consistency"] = cl.String()
detail["required"] = required
detail["alive"] = alive
case errWriteTimeout:
cl := decoder.ReadConsistency()
received := decoder.ReadInt()
blockfor := decoder.ReadInt()
writeType := decoder.ReadString()
detail["read_consistency"] = cl.String()
detail["received"] = received
detail["blockfor"] = blockfor
detail["write_type"] = writeType
case errReadTimeout:
cl := decoder.ReadConsistency()
received := decoder.ReadInt()
blockfor := decoder.ReadInt()
dataPresent, err := decoder.ReadByte()
if err != nil {
panic(err)
}
detail["read_consistency"] = cl.String()
detail["received"] = received
detail["blockfor"] = blockfor
detail["data_present"] = dataPresent != 0
case errAlreadyExists:
ks := decoder.ReadString()
table := decoder.ReadString()
detail["keyspace"] = ks
detail["table"] = table
case errUnprepared:
stmtID := decoder.ReadShortBytes()
detail["stmt_id"] = stmtID
case errReadFailure:
detail["read_consistency"] = decoder.ReadConsistency().String()
detail["received"] = decoder.ReadInt()
detail["blockfor"] = decoder.ReadInt()
b, err := decoder.ReadByte()
if err != nil {
panic(err)
}
detail["data_present"] = b != 0
case errWriteFailure:
detail["read_consistency"] = decoder.ReadConsistency().String()
detail["received"] = decoder.ReadInt()
detail["blockfor"] = decoder.ReadInt()
detail["num_failures"] = decoder.ReadInt()
detail["write_type"] = decoder.ReadString()
case errFunctionFailure:
detail["keyspace"] = decoder.ReadString()
detail["function"] = decoder.ReadString()
detail["arg_types"] = decoder.ReadStringList()
case errInvalid, errBootstrapping, errConfig, errCredentials, errOverloaded,
errProtocol, errServer, errSyntax, errTruncate, errUnauthorized:
//ignored
default:
logp.Err("unknown error code: 0x%x", code)
}
if len(detail) > 0 {
data["details"] = detail
}
return data
}
func (f *Framer) parseSupportedFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["supported"] = (f.decoder).ReadStringMultiMap()
return data
}
func (f *Framer) parseResultMetadata(getPKinfo bool) map[string]interface{} {
decoder := f.decoder
meta := make(map[string]interface{})
flags := decoder.ReadInt()
meta["flags"] = getRowFlagString(flags)
colCount := decoder.ReadInt()
meta["col_count"] = colCount
if getPKinfo {
//only for prepared result
if f.proto >= protoVersion4 {
pkeyCount := decoder.ReadInt()
pkeys := make([]int, pkeyCount)
for i := 0; i < pkeyCount; i++ {
pkeys[i] = int(decoder.ReadShort())
}
meta["pkey_columns"] = pkeys
}
}
if flags&flagHasMorePages == flagHasMorePages {
meta["paging_state"] = fmt.Sprintf("%X", decoder.ReadBytes())
return meta
}
if flags&flagNoMetaData == flagNoMetaData {
return meta
}
var keyspace, table string
globalSpec := flags&flagGlobalTableSpec == flagGlobalTableSpec
if globalSpec {
keyspace = decoder.ReadString()
table = decoder.ReadString()
meta["keyspace"] = keyspace
meta["table"] = table
}
return meta
}
func (f *Framer) parseQueryFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["query"] = (f.decoder).ReadLongString()
return data
}
func (f *Framer) parseResultFrame() (data map[string]interface{}) {
kind := (f.decoder).ReadInt()
data = make(map[string]interface{})
switch kind {
case resultKindVoid:
data["type"] = "void"
case resultKindRows:
data["type"] = "rows"
data["rows"] = f.parseResultRows()
case resultKindSetKeyspace:
data["type"] = "set_keyspace"
data["keyspace"] = (f.decoder).ReadString()
case resultKindPrepared:
data["type"] = "prepared"
data["prepared"] = f.parseResultPrepared()
case resultKindSchemaChanged:
data["type"] = "schemaChanged"
data["schema_change"] = f.parseResultSchemaChange()
}
return data
}
func (f *Framer) parseResultRows() map[string]interface{} {
result := make(map[string]interface{})
result["meta"] = f.parseResultMetadata(false)
result["num_rows"] = (f.decoder).ReadInt()
return result
}
func (f *Framer) | () map[string]interface{} {
result := make(map[string]interface{})
uuid, err := UUIDFromBytes((f.decoder).ReadShortBytes())
if err != nil {
logp.Err("Error in parsing UUID")
}
result["prepared_id"] = uuid.String()
result["req_meta"] = f.parseResultMetadata(true)
if f.proto < protoVersion2 {
return result
}
result["resp_meta"] = f.parseResultMetadata(false)
return result
}
func (f *Framer) parseResultSchemaChange() (data map[string]interface{}) {
data = make(map[string]interface{})
decoder := f.decoder
if f.proto <= protoVersion2 {
change := decoder.ReadString()
keyspace := decoder.ReadString()
table := decoder.ReadString()
data["change"] = change
data["keyspace"] = keyspace
data["table"] = table
} else {
change := decoder.ReadString()
target := decoder.ReadString()
data["change"] = change
data["target"] = target
switch target {
case "KEYSPACE":
data["keyspace"] = decoder.ReadString()
case "TABLE", "TYPE":
data["keyspace"] = decoder.ReadString()
data["object"] = decoder.ReadString()
case "FUNCTION", "AGGREGATE":
data["keyspace"] = decoder.ReadString()
data["name"] = decoder.ReadString()
data["args"] = decoder.ReadStringList()
default:
logp.Warn("unknown SCHEMA_CHANGE target: %q change: %q", target, change)
}
}
return data
}
func (f *Framer) parseAuthenticateFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["class"] = (f.decoder).ReadString()
return data
}
func (f *Framer) parseAuthSuccessFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
data["data"] = fmt.Sprintf("%q", (f.decoder).ReadBytes())
return data
}
func (f *Framer) parseAuthChallengeFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
data["data"] = fmt.Sprintf("%q", (f.decoder).ReadBytes())
return data
}
func (f *Framer) parseEventFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
decoder := f.decoder
eventType := decoder.ReadString()
data["type"] = eventType
switch eventType {
case "TOPOLOGY_CHANGE":
data["change"] = decoder.ReadString()
host, port := decoder.ReadInet()
data["host"] = host
data["port"] = port
case "STATUS_CHANGE":
data["change"] = decoder.ReadString()
host, port := decoder.ReadInet()
data["host"] = host
data["port"] = port
case "SCHEMA_CHANGE":
// this should work for all versions
data["schema_change"] = f.parseResultSchemaChange()
default:
logp.Err("unknown event type: %q", eventType)
}
return data
}
| parseResultPrepared | identifier_name |
frame.go | // Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package cassandra
import (
"errors"
"fmt"
"runtime"
"sync"
"github.com/elastic/beats/libbeat/common/streambuf"
"github.com/elastic/beats/libbeat/logp"
)
var (
ErrFrameTooBig = errors.New("frame length is bigger than the maximum allowed")
debugf = logp.MakeDebug("cassandra")
)
type frameHeader struct {
Version protoVersion
Flags byte
Stream int
Op FrameOp
BodyLength int
HeadLength int
CustomPayload map[string][]byte
}
func (f frameHeader) ToMap() map[string]interface{} {
data := make(map[string]interface{})
data["version"] = fmt.Sprintf("%d", f.Version.version())
data["flags"] = getHeadFlagString(f.Flags)
data["stream"] = f.Stream
data["op"] = f.Op.String()
data["length"] = f.BodyLength
return data
}
func (f frameHeader) String() string {
return fmt.Sprintf("version:%s, flags: %s, steam: %v, OP: %v, length: %v", f.Version.String(), getHeadFlagString(f.Flags), f.Stream, f.Op.String(), f.BodyLength)
}
var framerPool = sync.Pool{
New: func() interface{} {
return &Framer{compres: nil, isCompressed: false, Header: nil, r: nil, decoder: nil}
},
}
// a framer is responsible for reading, writing and parsing frames on a single stream
type Framer struct {
proto byte
compres Compressor
isCompressed bool
// if this frame was read then the header will be here
Header *frameHeader
r *streambuf.Buffer
decoder Decoder
}
func NewFramer(r *streambuf.Buffer, compressor Compressor) *Framer {
f := framerPool.Get().(*Framer)
f.compres = compressor
f.r = r
return f
}
// read header frame from stream
func (f *Framer) ReadHeader() (head *frameHeader, err error) {
v, err := f.r.ReadByte()
if err != nil {
return nil, err
}
version := v & protoVersionMask
if version < protoVersion1 || version > protoVersion4 {
return nil, fmt.Errorf("unsupported version: %x ", v)
}
f.proto = version
head = &frameHeader{}
head.Version = protoVersion(v)
flag, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Flags = flag
if version > protoVersion2 {
stream, err := f.r.ReadNetUint16()
if err != nil {
return nil, err
}
head.Stream = int(stream)
b, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Op = FrameOp(b)
l, err := f.r.ReadNetUint32()
if err != nil {
return nil, err
}
head.BodyLength = int(l)
} else {
stream, err := f.r.ReadNetUint8()
if err != nil {
return nil, err
}
head.Stream = int(stream)
b, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Op = FrameOp(b)
l, err := f.r.ReadNetUint32()
if err != nil {
return nil, err
}
head.BodyLength = int(l)
}
if head.BodyLength < 0 {
return nil, fmt.Errorf("frame body length can not be less than 0: %d", head.BodyLength)
} else if head.BodyLength > maxFrameSize {
// need to free up the connection to be used again
logp.Err("head length is too large")
return nil, ErrFrameTooBig
}
headSize := f.r.BufferConsumed()
head.HeadLength = headSize
debugf("header: %v", head)
f.Header = head
return head, nil
}
// reads a frame form the wire into the framers buffer
func (f *Framer) ReadFrame() (data map[string]interface{}, err error) {
defer func() {
if r := recover(); r != nil {
if _, ok := r.(runtime.Error); ok {
panic(r)
}
err = r.(error)
}
}()
decoder := &StreamDecoder{}
decoder.r = f.r
f.decoder = decoder
data = make(map[string]interface{})
//Only QUERY, PREPARE and EXECUTE queries support tracing
//If a response frame has the tracing flag set, its body contains
//a tracing ID. The tracing ID is a [uuid] and is the first thing in
//the frame body. The rest of the body will then be the usual body
//corresponding to the response opcode.
if f.Header.Flags&flagTracing == flagTracing && (f.Header.Op&opQuery == opQuery || f.Header.Op&opExecute == opExecute || f.Header.Op&opPrepare == opPrepare) {
debugf("tracing enabled")
//seems no UUID to read, protocol incorrect?
//uid := decoder.ReadUUID()
//data["trace_id"] = uid.String()
}
if f.Header.Flags&flagWarning == flagWarning {
debugf("hit warning flags")
warnings := decoder.ReadStringList()
// dealing with warnings
data["warnings"] = warnings
}
if f.Header.Flags&flagCustomPayload == flagCustomPayload {
debugf("hit custom payload flags")
f.Header.CustomPayload = decoder.ReadBytesMap()
}
if f.Header.Flags&flagCompress == flagCompress {
//decompress data and switch to use bytearray decoder
if f.compres == nil {
logp.Err("hit compress flag, but compressor was not set")
panic(errors.New("hit compress flag, but compressor was not set"))
}
decoder := &ByteArrayDecoder{}
buf := make([]byte, f.Header.BodyLength)
f.r.Read(buf)
dec, err := f.compres.Decode(buf)
if err != nil {
return nil, err
}
decoder.Data = &dec
f.decoder = decoder
debugf("hit compress flags")
}
// assumes that the frame body has been read into rbuf
switch f.Header.Op {
//below ops are requests
case opStartup, opAuthResponse, opOptions, opPrepare, opExecute, opBatch, opRegister:
//ignored
case opQuery:
data = f.parseQueryFrame()
//below ops are responses
case opError:
data["error"] = f.parseErrorFrame()
case opResult:
data["result"] = f.parseResultFrame()
case opSupported:
data = f.parseSupportedFrame()
case opAuthenticate:
data["authentication"] = f.parseAuthenticateFrame()
case opAuthChallenge:
data["authentication"] = f.parseAuthChallengeFrame()
case opAuthSuccess:
data["authentication"] = f.parseAuthSuccessFrame()
case opEvent:
data["event"] = f.parseEventFrame()
case opReady:
// the body should be empty
default:
//ignore
debugf("unknow ops, not processed, %v", f.Header)
}
return data, nil
}
func (f *Framer) parseErrorFrame() (data map[string]interface{}) |
func (f *Framer) parseSupportedFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["supported"] = (f.decoder).ReadStringMultiMap()
return data
}
func (f *Framer) parseResultMetadata(getPKinfo bool) map[string]interface{} {
decoder := f.decoder
meta := make(map[string]interface{})
flags := decoder.ReadInt()
meta["flags"] = getRowFlagString(flags)
colCount := decoder.ReadInt()
meta["col_count"] = colCount
if getPKinfo {
//only for prepared result
if f.proto >= protoVersion4 {
pkeyCount := decoder.ReadInt()
pkeys := make([]int, pkeyCount)
for i := 0; i < pkeyCount; i++ {
pkeys[i] = int(decoder.ReadShort())
}
meta["pkey_columns"] = pkeys
}
}
if flags&flagHasMorePages == flagHasMorePages {
meta["paging_state"] = fmt.Sprintf("%X", decoder.ReadBytes())
return meta
}
if flags&flagNoMetaData == flagNoMetaData {
return meta
}
var keyspace, table string
globalSpec := flags&flagGlobalTableSpec == flagGlobalTableSpec
if globalSpec {
keyspace = decoder.ReadString()
table = decoder.ReadString()
meta["keyspace"] = keyspace
meta["table"] = table
}
return meta
}
func (f *Framer) parseQueryFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["query"] = (f.decoder).ReadLongString()
return data
}
func (f *Framer) parseResultFrame() (data map[string]interface{}) {
kind := (f.decoder).ReadInt()
data = make(map[string]interface{})
switch kind {
case resultKindVoid:
data["type"] = "void"
case resultKindRows:
data["type"] = "rows"
data["rows"] = f.parseResultRows()
case resultKindSetKeyspace:
data["type"] = "set_keyspace"
data["keyspace"] = (f.decoder).ReadString()
case resultKindPrepared:
data["type"] = "prepared"
data["prepared"] = f.parseResultPrepared()
case resultKindSchemaChanged:
data["type"] = "schemaChanged"
data["schema_change"] = f.parseResultSchemaChange()
}
return data
}
func (f *Framer) parseResultRows() map[string]interface{} {
result := make(map[string]interface{})
result["meta"] = f.parseResultMetadata(false)
result["num_rows"] = (f.decoder).ReadInt()
return result
}
func (f *Framer) parseResultPrepared() map[string]interface{} {
result := make(map[string]interface{})
uuid, err := UUIDFromBytes((f.decoder).ReadShortBytes())
if err != nil {
logp.Err("Error in parsing UUID")
}
result["prepared_id"] = uuid.String()
result["req_meta"] = f.parseResultMetadata(true)
if f.proto < protoVersion2 {
return result
}
result["resp_meta"] = f.parseResultMetadata(false)
return result
}
func (f *Framer) parseResultSchemaChange() (data map[string]interface{}) {
data = make(map[string]interface{})
decoder := f.decoder
if f.proto <= protoVersion2 {
change := decoder.ReadString()
keyspace := decoder.ReadString()
table := decoder.ReadString()
data["change"] = change
data["keyspace"] = keyspace
data["table"] = table
} else {
change := decoder.ReadString()
target := decoder.ReadString()
data["change"] = change
data["target"] = target
switch target {
case "KEYSPACE":
data["keyspace"] = decoder.ReadString()
case "TABLE", "TYPE":
data["keyspace"] = decoder.ReadString()
data["object"] = decoder.ReadString()
case "FUNCTION", "AGGREGATE":
data["keyspace"] = decoder.ReadString()
data["name"] = decoder.ReadString()
data["args"] = decoder.ReadStringList()
default:
logp.Warn("unknown SCHEMA_CHANGE target: %q change: %q", target, change)
}
}
return data
}
func (f *Framer) parseAuthenticateFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["class"] = (f.decoder).ReadString()
return data
}
func (f *Framer) parseAuthSuccessFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
data["data"] = fmt.Sprintf("%q", (f.decoder).ReadBytes())
return data
}
func (f *Framer) parseAuthChallengeFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
data["data"] = fmt.Sprintf("%q", (f.decoder).ReadBytes())
return data
}
func (f *Framer) parseEventFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
decoder := f.decoder
eventType := decoder.ReadString()
data["type"] = eventType
switch eventType {
case "TOPOLOGY_CHANGE":
data["change"] = decoder.ReadString()
host, port := decoder.ReadInet()
data["host"] = host
data["port"] = port
case "STATUS_CHANGE":
data["change"] = decoder.ReadString()
host, port := decoder.ReadInet()
data["host"] = host
data["port"] = port
case "SCHEMA_CHANGE":
// this should work for all versions
data["schema_change"] = f.parseResultSchemaChange()
default:
logp.Err("unknown event type: %q", eventType)
}
return data
}
| {
decoder := f.decoder
code := decoder.ReadInt()
msg := decoder.ReadString()
errT := ErrType(code)
data = make(map[string]interface{})
data["code"] = code
data["msg"] = msg
data["type"] = errT.String()
detail := map[string]interface{}{}
switch errT {
case errUnavailable:
cl := decoder.ReadConsistency()
required := decoder.ReadInt()
alive := decoder.ReadInt()
detail["read_consistency"] = cl.String()
detail["required"] = required
detail["alive"] = alive
case errWriteTimeout:
cl := decoder.ReadConsistency()
received := decoder.ReadInt()
blockfor := decoder.ReadInt()
writeType := decoder.ReadString()
detail["read_consistency"] = cl.String()
detail["received"] = received
detail["blockfor"] = blockfor
detail["write_type"] = writeType
case errReadTimeout:
cl := decoder.ReadConsistency()
received := decoder.ReadInt()
blockfor := decoder.ReadInt()
dataPresent, err := decoder.ReadByte()
if err != nil {
panic(err)
}
detail["read_consistency"] = cl.String()
detail["received"] = received
detail["blockfor"] = blockfor
detail["data_present"] = dataPresent != 0
case errAlreadyExists:
ks := decoder.ReadString()
table := decoder.ReadString()
detail["keyspace"] = ks
detail["table"] = table
case errUnprepared:
stmtID := decoder.ReadShortBytes()
detail["stmt_id"] = stmtID
case errReadFailure:
detail["read_consistency"] = decoder.ReadConsistency().String()
detail["received"] = decoder.ReadInt()
detail["blockfor"] = decoder.ReadInt()
b, err := decoder.ReadByte()
if err != nil {
panic(err)
}
detail["data_present"] = b != 0
case errWriteFailure:
detail["read_consistency"] = decoder.ReadConsistency().String()
detail["received"] = decoder.ReadInt()
detail["blockfor"] = decoder.ReadInt()
detail["num_failures"] = decoder.ReadInt()
detail["write_type"] = decoder.ReadString()
case errFunctionFailure:
detail["keyspace"] = decoder.ReadString()
detail["function"] = decoder.ReadString()
detail["arg_types"] = decoder.ReadStringList()
case errInvalid, errBootstrapping, errConfig, errCredentials, errOverloaded,
errProtocol, errServer, errSyntax, errTruncate, errUnauthorized:
//ignored
default:
logp.Err("unknown error code: 0x%x", code)
}
if len(detail) > 0 {
data["details"] = detail
}
return data
} | identifier_body |
frame.go | // Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package cassandra
import (
"errors"
"fmt"
"runtime"
"sync"
"github.com/elastic/beats/libbeat/common/streambuf"
"github.com/elastic/beats/libbeat/logp"
)
var (
ErrFrameTooBig = errors.New("frame length is bigger than the maximum allowed")
debugf = logp.MakeDebug("cassandra")
)
type frameHeader struct {
Version protoVersion
Flags byte
Stream int
Op FrameOp
BodyLength int
HeadLength int
CustomPayload map[string][]byte
}
func (f frameHeader) ToMap() map[string]interface{} {
data := make(map[string]interface{})
data["version"] = fmt.Sprintf("%d", f.Version.version())
data["flags"] = getHeadFlagString(f.Flags)
data["stream"] = f.Stream
data["op"] = f.Op.String()
data["length"] = f.BodyLength
return data
}
func (f frameHeader) String() string {
return fmt.Sprintf("version:%s, flags: %s, steam: %v, OP: %v, length: %v", f.Version.String(), getHeadFlagString(f.Flags), f.Stream, f.Op.String(), f.BodyLength)
}
var framerPool = sync.Pool{
New: func() interface{} {
return &Framer{compres: nil, isCompressed: false, Header: nil, r: nil, decoder: nil}
},
}
// a framer is responsible for reading, writing and parsing frames on a single stream
type Framer struct {
proto byte
compres Compressor
isCompressed bool
// if this frame was read then the header will be here
Header *frameHeader
r *streambuf.Buffer
decoder Decoder
}
func NewFramer(r *streambuf.Buffer, compressor Compressor) *Framer {
f := framerPool.Get().(*Framer)
f.compres = compressor
f.r = r
return f
}
// read header frame from stream
func (f *Framer) ReadHeader() (head *frameHeader, err error) {
v, err := f.r.ReadByte()
if err != nil {
return nil, err
}
version := v & protoVersionMask
if version < protoVersion1 || version > protoVersion4 {
return nil, fmt.Errorf("unsupported version: %x ", v)
}
f.proto = version
head = &frameHeader{}
head.Version = protoVersion(v)
flag, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Flags = flag
if version > protoVersion2 {
stream, err := f.r.ReadNetUint16()
if err != nil {
return nil, err
}
head.Stream = int(stream)
b, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Op = FrameOp(b)
l, err := f.r.ReadNetUint32()
if err != nil {
return nil, err
}
head.BodyLength = int(l)
} else {
stream, err := f.r.ReadNetUint8()
if err != nil {
return nil, err
}
head.Stream = int(stream)
b, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Op = FrameOp(b)
l, err := f.r.ReadNetUint32()
if err != nil {
return nil, err
}
head.BodyLength = int(l)
}
if head.BodyLength < 0 {
return nil, fmt.Errorf("frame body length can not be less than 0: %d", head.BodyLength)
} else if head.BodyLength > maxFrameSize {
// need to free up the connection to be used again
logp.Err("head length is too large")
return nil, ErrFrameTooBig
}
headSize := f.r.BufferConsumed()
head.HeadLength = headSize
debugf("header: %v", head)
f.Header = head
return head, nil
}
// reads a frame form the wire into the framers buffer
func (f *Framer) ReadFrame() (data map[string]interface{}, err error) {
defer func() {
if r := recover(); r != nil {
if _, ok := r.(runtime.Error); ok {
panic(r)
}
err = r.(error)
}
}()
decoder := &StreamDecoder{}
decoder.r = f.r
f.decoder = decoder
data = make(map[string]interface{})
//Only QUERY, PREPARE and EXECUTE queries support tracing
//If a response frame has the tracing flag set, its body contains
//a tracing ID. The tracing ID is a [uuid] and is the first thing in
//the frame body. The rest of the body will then be the usual body
//corresponding to the response opcode.
if f.Header.Flags&flagTracing == flagTracing && (f.Header.Op&opQuery == opQuery || f.Header.Op&opExecute == opExecute || f.Header.Op&opPrepare == opPrepare) {
debugf("tracing enabled")
//seems no UUID to read, protocol incorrect?
//uid := decoder.ReadUUID()
//data["trace_id"] = uid.String()
}
if f.Header.Flags&flagWarning == flagWarning |
if f.Header.Flags&flagCustomPayload == flagCustomPayload {
debugf("hit custom payload flags")
f.Header.CustomPayload = decoder.ReadBytesMap()
}
if f.Header.Flags&flagCompress == flagCompress {
//decompress data and switch to use bytearray decoder
if f.compres == nil {
logp.Err("hit compress flag, but compressor was not set")
panic(errors.New("hit compress flag, but compressor was not set"))
}
decoder := &ByteArrayDecoder{}
buf := make([]byte, f.Header.BodyLength)
f.r.Read(buf)
dec, err := f.compres.Decode(buf)
if err != nil {
return nil, err
}
decoder.Data = &dec
f.decoder = decoder
debugf("hit compress flags")
}
// assumes that the frame body has been read into rbuf
switch f.Header.Op {
//below ops are requests
case opStartup, opAuthResponse, opOptions, opPrepare, opExecute, opBatch, opRegister:
//ignored
case opQuery:
data = f.parseQueryFrame()
//below ops are responses
case opError:
data["error"] = f.parseErrorFrame()
case opResult:
data["result"] = f.parseResultFrame()
case opSupported:
data = f.parseSupportedFrame()
case opAuthenticate:
data["authentication"] = f.parseAuthenticateFrame()
case opAuthChallenge:
data["authentication"] = f.parseAuthChallengeFrame()
case opAuthSuccess:
data["authentication"] = f.parseAuthSuccessFrame()
case opEvent:
data["event"] = f.parseEventFrame()
case opReady:
// the body should be empty
default:
//ignore
debugf("unknow ops, not processed, %v", f.Header)
}
return data, nil
}
func (f *Framer) parseErrorFrame() (data map[string]interface{}) {
decoder := f.decoder
code := decoder.ReadInt()
msg := decoder.ReadString()
errT := ErrType(code)
data = make(map[string]interface{})
data["code"] = code
data["msg"] = msg
data["type"] = errT.String()
detail := map[string]interface{}{}
switch errT {
case errUnavailable:
cl := decoder.ReadConsistency()
required := decoder.ReadInt()
alive := decoder.ReadInt()
detail["read_consistency"] = cl.String()
detail["required"] = required
detail["alive"] = alive
case errWriteTimeout:
cl := decoder.ReadConsistency()
received := decoder.ReadInt()
blockfor := decoder.ReadInt()
writeType := decoder.ReadString()
detail["read_consistency"] = cl.String()
detail["received"] = received
detail["blockfor"] = blockfor
detail["write_type"] = writeType
case errReadTimeout:
cl := decoder.ReadConsistency()
received := decoder.ReadInt()
blockfor := decoder.ReadInt()
dataPresent, err := decoder.ReadByte()
if err != nil {
panic(err)
}
detail["read_consistency"] = cl.String()
detail["received"] = received
detail["blockfor"] = blockfor
detail["data_present"] = dataPresent != 0
case errAlreadyExists:
ks := decoder.ReadString()
table := decoder.ReadString()
detail["keyspace"] = ks
detail["table"] = table
case errUnprepared:
stmtID := decoder.ReadShortBytes()
detail["stmt_id"] = stmtID
case errReadFailure:
detail["read_consistency"] = decoder.ReadConsistency().String()
detail["received"] = decoder.ReadInt()
detail["blockfor"] = decoder.ReadInt()
b, err := decoder.ReadByte()
if err != nil {
panic(err)
}
detail["data_present"] = b != 0
case errWriteFailure:
detail["read_consistency"] = decoder.ReadConsistency().String()
detail["received"] = decoder.ReadInt()
detail["blockfor"] = decoder.ReadInt()
detail["num_failures"] = decoder.ReadInt()
detail["write_type"] = decoder.ReadString()
case errFunctionFailure:
detail["keyspace"] = decoder.ReadString()
detail["function"] = decoder.ReadString()
detail["arg_types"] = decoder.ReadStringList()
case errInvalid, errBootstrapping, errConfig, errCredentials, errOverloaded,
errProtocol, errServer, errSyntax, errTruncate, errUnauthorized:
//ignored
default:
logp.Err("unknown error code: 0x%x", code)
}
if len(detail) > 0 {
data["details"] = detail
}
return data
}
func (f *Framer) parseSupportedFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["supported"] = (f.decoder).ReadStringMultiMap()
return data
}
func (f *Framer) parseResultMetadata(getPKinfo bool) map[string]interface{} {
decoder := f.decoder
meta := make(map[string]interface{})
flags := decoder.ReadInt()
meta["flags"] = getRowFlagString(flags)
colCount := decoder.ReadInt()
meta["col_count"] = colCount
if getPKinfo {
//only for prepared result
if f.proto >= protoVersion4 {
pkeyCount := decoder.ReadInt()
pkeys := make([]int, pkeyCount)
for i := 0; i < pkeyCount; i++ {
pkeys[i] = int(decoder.ReadShort())
}
meta["pkey_columns"] = pkeys
}
}
if flags&flagHasMorePages == flagHasMorePages {
meta["paging_state"] = fmt.Sprintf("%X", decoder.ReadBytes())
return meta
}
if flags&flagNoMetaData == flagNoMetaData {
return meta
}
var keyspace, table string
globalSpec := flags&flagGlobalTableSpec == flagGlobalTableSpec
if globalSpec {
keyspace = decoder.ReadString()
table = decoder.ReadString()
meta["keyspace"] = keyspace
meta["table"] = table
}
return meta
}
func (f *Framer) parseQueryFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["query"] = (f.decoder).ReadLongString()
return data
}
func (f *Framer) parseResultFrame() (data map[string]interface{}) {
kind := (f.decoder).ReadInt()
data = make(map[string]interface{})
switch kind {
case resultKindVoid:
data["type"] = "void"
case resultKindRows:
data["type"] = "rows"
data["rows"] = f.parseResultRows()
case resultKindSetKeyspace:
data["type"] = "set_keyspace"
data["keyspace"] = (f.decoder).ReadString()
case resultKindPrepared:
data["type"] = "prepared"
data["prepared"] = f.parseResultPrepared()
case resultKindSchemaChanged:
data["type"] = "schemaChanged"
data["schema_change"] = f.parseResultSchemaChange()
}
return data
}
func (f *Framer) parseResultRows() map[string]interface{} {
result := make(map[string]interface{})
result["meta"] = f.parseResultMetadata(false)
result["num_rows"] = (f.decoder).ReadInt()
return result
}
func (f *Framer) parseResultPrepared() map[string]interface{} {
result := make(map[string]interface{})
uuid, err := UUIDFromBytes((f.decoder).ReadShortBytes())
if err != nil {
logp.Err("Error in parsing UUID")
}
result["prepared_id"] = uuid.String()
result["req_meta"] = f.parseResultMetadata(true)
if f.proto < protoVersion2 {
return result
}
result["resp_meta"] = f.parseResultMetadata(false)
return result
}
func (f *Framer) parseResultSchemaChange() (data map[string]interface{}) {
data = make(map[string]interface{})
decoder := f.decoder
if f.proto <= protoVersion2 {
change := decoder.ReadString()
keyspace := decoder.ReadString()
table := decoder.ReadString()
data["change"] = change
data["keyspace"] = keyspace
data["table"] = table
} else {
change := decoder.ReadString()
target := decoder.ReadString()
data["change"] = change
data["target"] = target
switch target {
case "KEYSPACE":
data["keyspace"] = decoder.ReadString()
case "TABLE", "TYPE":
data["keyspace"] = decoder.ReadString()
data["object"] = decoder.ReadString()
case "FUNCTION", "AGGREGATE":
data["keyspace"] = decoder.ReadString()
data["name"] = decoder.ReadString()
data["args"] = decoder.ReadStringList()
default:
logp.Warn("unknown SCHEMA_CHANGE target: %q change: %q", target, change)
}
}
return data
}
func (f *Framer) parseAuthenticateFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["class"] = (f.decoder).ReadString()
return data
}
func (f *Framer) parseAuthSuccessFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
data["data"] = fmt.Sprintf("%q", (f.decoder).ReadBytes())
return data
}
func (f *Framer) parseAuthChallengeFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
data["data"] = fmt.Sprintf("%q", (f.decoder).ReadBytes())
return data
}
func (f *Framer) parseEventFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
decoder := f.decoder
eventType := decoder.ReadString()
data["type"] = eventType
switch eventType {
case "TOPOLOGY_CHANGE":
data["change"] = decoder.ReadString()
host, port := decoder.ReadInet()
data["host"] = host
data["port"] = port
case "STATUS_CHANGE":
data["change"] = decoder.ReadString()
host, port := decoder.ReadInet()
data["host"] = host
data["port"] = port
case "SCHEMA_CHANGE":
// this should work for all versions
data["schema_change"] = f.parseResultSchemaChange()
default:
logp.Err("unknown event type: %q", eventType)
}
return data
}
| {
debugf("hit warning flags")
warnings := decoder.ReadStringList()
// dealing with warnings
data["warnings"] = warnings
} | conditional_block |
frame.go | // Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package cassandra
import (
"errors"
"fmt"
"runtime"
"sync"
"github.com/elastic/beats/libbeat/common/streambuf"
"github.com/elastic/beats/libbeat/logp"
)
var (
ErrFrameTooBig = errors.New("frame length is bigger than the maximum allowed")
debugf = logp.MakeDebug("cassandra")
)
type frameHeader struct {
Version protoVersion
Flags byte
Stream int
Op FrameOp
BodyLength int
HeadLength int
CustomPayload map[string][]byte
}
func (f frameHeader) ToMap() map[string]interface{} {
data := make(map[string]interface{})
data["version"] = fmt.Sprintf("%d", f.Version.version())
data["flags"] = getHeadFlagString(f.Flags)
data["stream"] = f.Stream
data["op"] = f.Op.String()
data["length"] = f.BodyLength
return data
}
func (f frameHeader) String() string {
return fmt.Sprintf("version:%s, flags: %s, steam: %v, OP: %v, length: %v", f.Version.String(), getHeadFlagString(f.Flags), f.Stream, f.Op.String(), f.BodyLength)
}
var framerPool = sync.Pool{
New: func() interface{} {
return &Framer{compres: nil, isCompressed: false, Header: nil, r: nil, decoder: nil}
},
}
// a framer is responsible for reading, writing and parsing frames on a single stream
type Framer struct {
proto byte
compres Compressor
isCompressed bool
// if this frame was read then the header will be here
Header *frameHeader
r *streambuf.Buffer
decoder Decoder
}
func NewFramer(r *streambuf.Buffer, compressor Compressor) *Framer {
f := framerPool.Get().(*Framer)
f.compres = compressor
f.r = r
return f
}
// read header frame from stream
func (f *Framer) ReadHeader() (head *frameHeader, err error) {
v, err := f.r.ReadByte()
if err != nil {
return nil, err
}
version := v & protoVersionMask
if version < protoVersion1 || version > protoVersion4 {
return nil, fmt.Errorf("unsupported version: %x ", v)
}
f.proto = version
head = &frameHeader{}
head.Version = protoVersion(v)
flag, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Flags = flag
if version > protoVersion2 {
stream, err := f.r.ReadNetUint16()
if err != nil {
return nil, err
}
head.Stream = int(stream)
b, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Op = FrameOp(b)
l, err := f.r.ReadNetUint32()
if err != nil {
return nil, err
}
head.BodyLength = int(l)
} else {
stream, err := f.r.ReadNetUint8()
if err != nil {
return nil, err
}
head.Stream = int(stream)
b, err := f.r.ReadByte()
if err != nil {
return nil, err
}
head.Op = FrameOp(b)
l, err := f.r.ReadNetUint32()
if err != nil {
return nil, err
}
head.BodyLength = int(l)
}
if head.BodyLength < 0 {
return nil, fmt.Errorf("frame body length can not be less than 0: %d", head.BodyLength)
} else if head.BodyLength > maxFrameSize {
// need to free up the connection to be used again
logp.Err("head length is too large")
return nil, ErrFrameTooBig
}
headSize := f.r.BufferConsumed()
head.HeadLength = headSize
debugf("header: %v", head)
f.Header = head
return head, nil
}
// reads a frame form the wire into the framers buffer
func (f *Framer) ReadFrame() (data map[string]interface{}, err error) {
defer func() {
if r := recover(); r != nil {
if _, ok := r.(runtime.Error); ok {
panic(r)
}
err = r.(error)
}
}()
decoder := &StreamDecoder{}
decoder.r = f.r
f.decoder = decoder
data = make(map[string]interface{})
//Only QUERY, PREPARE and EXECUTE queries support tracing
//If a response frame has the tracing flag set, its body contains
//a tracing ID. The tracing ID is a [uuid] and is the first thing in
//the frame body. The rest of the body will then be the usual body
//corresponding to the response opcode.
if f.Header.Flags&flagTracing == flagTracing && (f.Header.Op&opQuery == opQuery || f.Header.Op&opExecute == opExecute || f.Header.Op&opPrepare == opPrepare) {
debugf("tracing enabled")
//seems no UUID to read, protocol incorrect?
//uid := decoder.ReadUUID()
//data["trace_id"] = uid.String()
}
if f.Header.Flags&flagWarning == flagWarning {
debugf("hit warning flags")
warnings := decoder.ReadStringList()
// dealing with warnings
data["warnings"] = warnings
}
if f.Header.Flags&flagCustomPayload == flagCustomPayload {
debugf("hit custom payload flags")
f.Header.CustomPayload = decoder.ReadBytesMap()
}
if f.Header.Flags&flagCompress == flagCompress {
//decompress data and switch to use bytearray decoder
if f.compres == nil {
logp.Err("hit compress flag, but compressor was not set")
panic(errors.New("hit compress flag, but compressor was not set"))
}
decoder := &ByteArrayDecoder{}
buf := make([]byte, f.Header.BodyLength)
f.r.Read(buf)
dec, err := f.compres.Decode(buf)
if err != nil {
return nil, err
}
decoder.Data = &dec
f.decoder = decoder
debugf("hit compress flags")
}
// assumes that the frame body has been read into rbuf
switch f.Header.Op {
//below ops are requests
case opStartup, opAuthResponse, opOptions, opPrepare, opExecute, opBatch, opRegister:
//ignored
case opQuery:
data = f.parseQueryFrame()
//below ops are responses
case opError:
data["error"] = f.parseErrorFrame()
case opResult:
data["result"] = f.parseResultFrame()
case opSupported:
data = f.parseSupportedFrame()
case opAuthenticate:
data["authentication"] = f.parseAuthenticateFrame()
case opAuthChallenge:
data["authentication"] = f.parseAuthChallengeFrame()
case opAuthSuccess:
data["authentication"] = f.parseAuthSuccessFrame()
case opEvent:
data["event"] = f.parseEventFrame()
case opReady:
// the body should be empty
default:
//ignore
debugf("unknow ops, not processed, %v", f.Header)
}
return data, nil
}
func (f *Framer) parseErrorFrame() (data map[string]interface{}) {
decoder := f.decoder
code := decoder.ReadInt()
msg := decoder.ReadString()
errT := ErrType(code)
data = make(map[string]interface{})
data["code"] = code
data["msg"] = msg
data["type"] = errT.String()
detail := map[string]interface{}{}
switch errT {
case errUnavailable:
cl := decoder.ReadConsistency()
required := decoder.ReadInt()
alive := decoder.ReadInt()
detail["read_consistency"] = cl.String()
detail["required"] = required
detail["alive"] = alive
case errWriteTimeout:
cl := decoder.ReadConsistency()
received := decoder.ReadInt()
blockfor := decoder.ReadInt()
writeType := decoder.ReadString()
detail["read_consistency"] = cl.String()
detail["received"] = received
detail["blockfor"] = blockfor
detail["write_type"] = writeType
case errReadTimeout:
cl := decoder.ReadConsistency()
received := decoder.ReadInt()
blockfor := decoder.ReadInt()
dataPresent, err := decoder.ReadByte()
if err != nil {
panic(err)
}
detail["read_consistency"] = cl.String()
detail["received"] = received
detail["blockfor"] = blockfor
detail["data_present"] = dataPresent != 0
case errAlreadyExists:
ks := decoder.ReadString()
table := decoder.ReadString()
detail["keyspace"] = ks
detail["table"] = table
case errUnprepared:
stmtID := decoder.ReadShortBytes()
detail["stmt_id"] = stmtID
case errReadFailure:
detail["read_consistency"] = decoder.ReadConsistency().String()
detail["received"] = decoder.ReadInt()
detail["blockfor"] = decoder.ReadInt()
b, err := decoder.ReadByte()
if err != nil {
panic(err)
}
detail["data_present"] = b != 0
case errWriteFailure:
detail["read_consistency"] = decoder.ReadConsistency().String()
detail["received"] = decoder.ReadInt()
detail["blockfor"] = decoder.ReadInt()
detail["num_failures"] = decoder.ReadInt()
detail["write_type"] = decoder.ReadString()
case errFunctionFailure:
detail["keyspace"] = decoder.ReadString()
detail["function"] = decoder.ReadString()
detail["arg_types"] = decoder.ReadStringList()
case errInvalid, errBootstrapping, errConfig, errCredentials, errOverloaded,
errProtocol, errServer, errSyntax, errTruncate, errUnauthorized:
//ignored
default:
logp.Err("unknown error code: 0x%x", code)
}
if len(detail) > 0 {
data["details"] = detail
}
return data
}
func (f *Framer) parseSupportedFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["supported"] = (f.decoder).ReadStringMultiMap()
return data
}
func (f *Framer) parseResultMetadata(getPKinfo bool) map[string]interface{} {
decoder := f.decoder
meta := make(map[string]interface{})
flags := decoder.ReadInt()
meta["flags"] = getRowFlagString(flags)
colCount := decoder.ReadInt()
meta["col_count"] = colCount
if getPKinfo {
//only for prepared result
if f.proto >= protoVersion4 {
pkeyCount := decoder.ReadInt()
pkeys := make([]int, pkeyCount)
for i := 0; i < pkeyCount; i++ {
pkeys[i] = int(decoder.ReadShort())
}
meta["pkey_columns"] = pkeys
}
}
if flags&flagHasMorePages == flagHasMorePages {
meta["paging_state"] = fmt.Sprintf("%X", decoder.ReadBytes())
return meta
}
if flags&flagNoMetaData == flagNoMetaData {
return meta
}
var keyspace, table string
globalSpec := flags&flagGlobalTableSpec == flagGlobalTableSpec
if globalSpec {
keyspace = decoder.ReadString()
table = decoder.ReadString()
meta["keyspace"] = keyspace
meta["table"] = table
}
return meta
}
func (f *Framer) parseQueryFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["query"] = (f.decoder).ReadLongString()
return data
}
func (f *Framer) parseResultFrame() (data map[string]interface{}) {
kind := (f.decoder).ReadInt()
data = make(map[string]interface{})
switch kind {
case resultKindVoid:
data["type"] = "void"
case resultKindRows:
data["type"] = "rows"
data["rows"] = f.parseResultRows()
case resultKindSetKeyspace:
data["type"] = "set_keyspace"
data["keyspace"] = (f.decoder).ReadString()
case resultKindPrepared:
data["type"] = "prepared"
data["prepared"] = f.parseResultPrepared()
case resultKindSchemaChanged:
data["type"] = "schemaChanged"
data["schema_change"] = f.parseResultSchemaChange()
}
return data
}
func (f *Framer) parseResultRows() map[string]interface{} {
result := make(map[string]interface{})
result["meta"] = f.parseResultMetadata(false)
result["num_rows"] = (f.decoder).ReadInt()
return result
}
func (f *Framer) parseResultPrepared() map[string]interface{} {
result := make(map[string]interface{})
uuid, err := UUIDFromBytes((f.decoder).ReadShortBytes())
if err != nil {
logp.Err("Error in parsing UUID")
}
result["prepared_id"] = uuid.String()
result["req_meta"] = f.parseResultMetadata(true)
if f.proto < protoVersion2 {
return result | return result
}
func (f *Framer) parseResultSchemaChange() (data map[string]interface{}) {
data = make(map[string]interface{})
decoder := f.decoder
if f.proto <= protoVersion2 {
change := decoder.ReadString()
keyspace := decoder.ReadString()
table := decoder.ReadString()
data["change"] = change
data["keyspace"] = keyspace
data["table"] = table
} else {
change := decoder.ReadString()
target := decoder.ReadString()
data["change"] = change
data["target"] = target
switch target {
case "KEYSPACE":
data["keyspace"] = decoder.ReadString()
case "TABLE", "TYPE":
data["keyspace"] = decoder.ReadString()
data["object"] = decoder.ReadString()
case "FUNCTION", "AGGREGATE":
data["keyspace"] = decoder.ReadString()
data["name"] = decoder.ReadString()
data["args"] = decoder.ReadStringList()
default:
logp.Warn("unknown SCHEMA_CHANGE target: %q change: %q", target, change)
}
}
return data
}
func (f *Framer) parseAuthenticateFrame() (data map[string]interface{}) {
data = make(map[string]interface{})
data["class"] = (f.decoder).ReadString()
return data
}
func (f *Framer) parseAuthSuccessFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
data["data"] = fmt.Sprintf("%q", (f.decoder).ReadBytes())
return data
}
func (f *Framer) parseAuthChallengeFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
data["data"] = fmt.Sprintf("%q", (f.decoder).ReadBytes())
return data
}
func (f *Framer) parseEventFrame() (data map[string]interface{}) {
data = make((map[string]interface{}))
decoder := f.decoder
eventType := decoder.ReadString()
data["type"] = eventType
switch eventType {
case "TOPOLOGY_CHANGE":
data["change"] = decoder.ReadString()
host, port := decoder.ReadInet()
data["host"] = host
data["port"] = port
case "STATUS_CHANGE":
data["change"] = decoder.ReadString()
host, port := decoder.ReadInet()
data["host"] = host
data["port"] = port
case "SCHEMA_CHANGE":
// this should work for all versions
data["schema_change"] = f.parseResultSchemaChange()
default:
logp.Err("unknown event type: %q", eventType)
}
return data
} | }
result["resp_meta"] = f.parseResultMetadata(false)
| random_line_split |
gameentity.py | from pygame.sprite import Sprite
from pygame import Surface
import pygame as pg
from math import sqrt
from shared import GLOBAL
from default import *
import random as rd
"""
Generic Game entity, which may be displayed on the map (town, player, trap, object...)
Most of the object base code will go there
"""
class GameEntity(Sprite):
COUNTER = 0
def __init__(self,
name=None,
pos=None,
image_ref=None,
z_level=2,
blocking_tile_list=None,
blocking_view_list=None,
vision=1,
blocks=False,
actionable=None,
ai=None):
Sprite.__init__(self)
if not pos:
pos = (-1, -1)
(self.x, self.y) = pos
self.name = name
if name is None:
GameEntity.COUNTER += 1
self.name = "Entity ".format(GameEntity.COUNTER)
# Image settings
self.z_level = z_level # The depth. Default is 2, min is 0.
self.image_ref = image_ref
self.image = None
self.animated = False
self.current_region_name = None
self._current_region = None
self.init_graphics()
# Blocking: what the object can go over, what it can see over, and if the object prevents movement upon itself
self.blocking_tile_list = blocking_tile_list
self.blocking_view_list = blocking_view_list
self.blocks = blocks
self.base_vision_radius = vision
# Components
self.actionable = actionable
if self.actionable:
self.actionable.owner = self
self.ai = ai
if self.ai:
self.ai.owner = self
@property
def pos(self):
return self.x, self.y
@property
def region(self):
assert self._current_region is not None, "Tried accessing a region that is not yet set {}".format(self.name)
return self._current_region
# GRAPHICAL RELATED FUNCTIONS
def init_graphics(self):
"""
Initiate all graphical objects | # This is the case for the special visual effect
self.image = self.image_ref
else:
image = GLOBAL.img(self.image_ref)
if type(image) is tuple:
# for decode purpose
self.image = Surface(TILESIZE_SCREEN)
self.image.fill(image)
elif type(image) is list or type(image) is dict:
self.animated = True
self.current_frame = 0
self.last_update = 0
if type(image) is list:
self.list_image = image
self.image = self.list_image[self.current_frame]
else:
self.last_direction = (1, 0)
self.dict_image = image
self.image = self.dict_image['E'][self.current_frame]
else:
self.image = image
self._reposition_rect()
def update_graphics(self, new_image_ref):
"""
Update the graphical reference, and force a reset of the graphical function
:param new_image_ref: the new image reference
:return: nothing
"""
self.image_ref = new_image_ref
self.init_graphics()
def clean_before_save(self, image_only=False):
"""
Clean all graphical objects, remove from sprite dictionary and remove the game reference
:return:
"""
self.image = None
self.animated = False
if hasattr(self, "dict_image"):
# self.dict_image = None
delattr(self, "dict_image")
if hasattr(self, "list_image"):
self.list_image = None
delattr(self, "list_image")
def _reposition_rect(self):
self.rect = self.image.get_rect()
self.rect.centerx = self.x * TILESIZE_SCREEN[0] + int(TILESIZE_SCREEN[1] / 2) # initial position for the camera
self.rect.centery = self.y * TILESIZE_SCREEN[0] + int(TILESIZE_SCREEN[1] / 2)
def assign_entity_to_region(self, region):
self.add(region.all_groups[self.z_level])
self.current_region_name = region.name
self._current_region = region
region.region_entities.add(self)
if self.ai is not None:
region.ticker.schedule_turn(self.ai.speed, self.ai)
def remove_entity_from_region(self, region):
self.remove(region.all_groups[self.z_level])
self.current_region_name = None
self._current_region = None
region.region_entities.remove(self)
region.ticker.unregister(self.ai)
def animate(self):
now = pg.time.get_ticks()
delta = 200
if hasattr(self, "ai") and self.ai is not None:
if hasattr(self.ai, "speed"):
delta = self.ai.speed * 30
elif hasattr(self, "speed"):
delta = self.speed * 30
if now - self.last_update > delta:
self.last_update = now
reference = 'E'
if hasattr(self, "dict_image"):
if self.last_direction[0] < 0:
reference = 'W'
if self.last_direction[0] > 0:
reference = 'E'
if self.last_direction[1] < 0:
reference = 'N'
if self.last_direction[1] > 0:
reference = 'S'
if "NW" in self.dict_image:
if self.last_direction == (-1, -1):
reference = "NW"
elif self.last_direction == (1, 1):
reference = "SE"
elif self.last_direction == (-1, 1):
reference = "SW"
elif self.last_direction == (1, -1):
reference = "NE"
self.current_frame = (self.current_frame + 1) % len(self.dict_image[reference])
self.image = self.dict_image[reference][self.current_frame]
else:
self.current_frame = (self.current_frame + 1) % len(self.list_image)
self.image = self.list_image[self.current_frame]
def update(self):
if self.animated:
self.animate()
self._reposition_rect()
def move(self, dx=0, dy=0):
"""
Try to move the entity.
Return True if an action was done (either move or attack)
"""
# Test if we enter the actionable zone of an entity
# Note: this can be a door to open, or a fight!
for entity in GLOBAL.game.current_region.region_entities:
if entity != self and hasattr(entity, "actionable") and entity.actionable is not None and \
(self.x + dx, self.y + dy) in entity.actionable.action_field:
self.x += dx
self.y += dy
ok_to_move = entity.actionable.action(self)
self.x -= dx
self.y -= dy
if ok_to_move is not None and not ok_to_move:
# We triggered an object, and it prevented the move (like a door not opening)
return False
if entity != self and hasattr(entity, "fighter") and entity.fighter is not None and \
(self.x + dx, self.y + dy) in entity.fighter.action_field:
self.x += dx
self.y += dy
ok_to_move = entity.fighter.action(self)
self.x -= dx
self.y -= dy
if ok_to_move is not None and not ok_to_move:
# We came in a fight...
return False
# Test if we collide with the terrain, and terrain only
destination_tile = GLOBAL.game.current_region.tiles[self.x + dx][self.y + dy]
if not destination_tile.block_for(self):
# now test the list of objects
for entity in GLOBAL.game.current_region.region_entities:
if entity != self and entity.blocks and entity.x == self.x + dx and entity.y == self.y + dy:
return False
# success
self.x += dx
self.y += dy
if self.animated and (dx != 0 or dy != 0):
self.last_direction = (dx, dy)
GLOBAL.game.invalidate_fog_of_war = True
# self.game.ticker.ticks_to_advance += self.speed_cost_for(c.AC_ENV_MOVE)
return True
return False
class ActionableEntity:
"""
An actionable entity is an object which is triggered when something (player, monster...) is around (or directly in).
This is typically a door, a trap, a town...
"""
def __init__(self, radius=0, actionable_by_player_only=True, function=None):
self.radius = radius # the radius for triggering the function
self.owner = None
self.actionable_by_player_only = actionable_by_player_only
self._action_field = None
self.function = function
@property
def action_field(self):
if self._action_field is not None:
return self._action_field
else:
if self.owner is not None:
self._action_field = [self.owner.pos]
for i in range(-self.radius, self.radius):
if (self.owner.pos[0] + i, self.owner.pos[1]) not in self._action_field:
self._action_field.append((self.owner.pos[0] + i, self.owner.pos[1]))
if (self.owner.pos[0], self.owner.pos[1] + i) not in self._action_field:
self._action_field.append((self.owner.pos[0], self.owner.pos[1] + i))
return self._action_field
else:
return []
def action(self, entity_that_actioned):
if self.function is None:
print("No function created")
else:
if self.actionable_by_player_only:
if entity_that_actioned == GLOBAL.game.player:
return self.function(self.owner, entity_that_actioned)
else:
return self.function(self.owner, entity_that_actioned)
class AIEntity:
def __init__(self, speed=1):
self.owner = None
self.speed = speed # the speed represents the time between two turns
def move_towards_position(self, pos):
# vector from this object to the target, and distance
dx = pos[0] - self.owner.x
dy = pos[1] - self.owner.y
distance = sqrt(dx ** 2 + dy ** 2)
# normalize it to length 1 (preserving direction), then round it and
# convert to integer so the movement is restricted to the map grid
if distance != 0:
dx = int(round(dx / distance))
dy = int(round(dy / distance))
else:
dx = dy = 0
return self.owner.move(dx, dy)
def move_towards_entity(self, other_entity):
self.move_towards_position(other_entity.pos)
def move_randomly(self, with_fight=False):
"""
Move by 1 around the current position. The destination should be non blocking.
If no tiles match, then no move is taken.
:return:
"""
delta = [(-1, -1), (-1, 0), (-1, 1), (0, 1), (0, -1), (1, -1), (1, 0), (1, 1)]
rd.shuffle(delta)
x, y = self.owner.pos
while len(delta) > 0:
dx, dy = delta.pop()
if self.move_towards_position((x + dx, y + dy)):
return
def take_turn(self):
assert True, "Entity has not redefined the take turn"
class WanderingAIEntity(AIEntity):
def __init__(self, speed):
AIEntity.__init__(self, speed=speed)
def take_turn(self):
self.move_randomly(with_fight=False)
print("{} moves to {}".format(self.owner.name, self.owner.pos))
GLOBAL.game.world[self.owner.current_region_name].ticker.schedule_turn(self.speed, self) | :return: Nothing
"""
if type(self.image_ref) is Surface: | random_line_split |
gameentity.py | from pygame.sprite import Sprite
from pygame import Surface
import pygame as pg
from math import sqrt
from shared import GLOBAL
from default import *
import random as rd
"""
Generic Game entity, which may be displayed on the map (town, player, trap, object...)
Most of the object base code will go there
"""
class GameEntity(Sprite):
COUNTER = 0
def __init__(self,
name=None,
pos=None,
image_ref=None,
z_level=2,
blocking_tile_list=None,
blocking_view_list=None,
vision=1,
blocks=False,
actionable=None,
ai=None):
Sprite.__init__(self)
if not pos:
pos = (-1, -1)
(self.x, self.y) = pos
self.name = name
if name is None:
GameEntity.COUNTER += 1
self.name = "Entity ".format(GameEntity.COUNTER)
# Image settings
self.z_level = z_level # The depth. Default is 2, min is 0.
self.image_ref = image_ref
self.image = None
self.animated = False
self.current_region_name = None
self._current_region = None
self.init_graphics()
# Blocking: what the object can go over, what it can see over, and if the object prevents movement upon itself
self.blocking_tile_list = blocking_tile_list
self.blocking_view_list = blocking_view_list
self.blocks = blocks
self.base_vision_radius = vision
# Components
self.actionable = actionable
if self.actionable:
self.actionable.owner = self
self.ai = ai
if self.ai:
self.ai.owner = self
@property
def pos(self):
return self.x, self.y
@property
def region(self):
assert self._current_region is not None, "Tried accessing a region that is not yet set {}".format(self.name)
return self._current_region
# GRAPHICAL RELATED FUNCTIONS
def init_graphics(self):
"""
Initiate all graphical objects
:return: Nothing
"""
if type(self.image_ref) is Surface:
# This is the case for the special visual effect
self.image = self.image_ref
else:
image = GLOBAL.img(self.image_ref)
if type(image) is tuple:
# for decode purpose
self.image = Surface(TILESIZE_SCREEN)
self.image.fill(image)
elif type(image) is list or type(image) is dict:
self.animated = True
self.current_frame = 0
self.last_update = 0
if type(image) is list:
self.list_image = image
self.image = self.list_image[self.current_frame]
else:
self.last_direction = (1, 0)
self.dict_image = image
self.image = self.dict_image['E'][self.current_frame]
else:
self.image = image
self._reposition_rect()
def update_graphics(self, new_image_ref):
"""
Update the graphical reference, and force a reset of the graphical function
:param new_image_ref: the new image reference
:return: nothing
"""
self.image_ref = new_image_ref
self.init_graphics()
def clean_before_save(self, image_only=False):
"""
Clean all graphical objects, remove from sprite dictionary and remove the game reference
:return:
"""
self.image = None
self.animated = False
if hasattr(self, "dict_image"):
# self.dict_image = None
delattr(self, "dict_image")
if hasattr(self, "list_image"):
self.list_image = None
delattr(self, "list_image")
def _reposition_rect(self):
self.rect = self.image.get_rect()
self.rect.centerx = self.x * TILESIZE_SCREEN[0] + int(TILESIZE_SCREEN[1] / 2) # initial position for the camera
self.rect.centery = self.y * TILESIZE_SCREEN[0] + int(TILESIZE_SCREEN[1] / 2)
def assign_entity_to_region(self, region):
self.add(region.all_groups[self.z_level])
self.current_region_name = region.name
self._current_region = region
region.region_entities.add(self)
if self.ai is not None:
region.ticker.schedule_turn(self.ai.speed, self.ai)
def remove_entity_from_region(self, region):
self.remove(region.all_groups[self.z_level])
self.current_region_name = None
self._current_region = None
region.region_entities.remove(self)
region.ticker.unregister(self.ai)
def animate(self):
now = pg.time.get_ticks()
delta = 200
if hasattr(self, "ai") and self.ai is not None:
if hasattr(self.ai, "speed"):
delta = self.ai.speed * 30
elif hasattr(self, "speed"):
delta = self.speed * 30
if now - self.last_update > delta:
self.last_update = now
reference = 'E'
if hasattr(self, "dict_image"):
if self.last_direction[0] < 0:
reference = 'W'
if self.last_direction[0] > 0:
reference = 'E'
if self.last_direction[1] < 0:
reference = 'N'
if self.last_direction[1] > 0:
reference = 'S'
if "NW" in self.dict_image:
if self.last_direction == (-1, -1):
reference = "NW"
elif self.last_direction == (1, 1):
reference = "SE"
elif self.last_direction == (-1, 1):
reference = "SW"
elif self.last_direction == (1, -1):
reference = "NE"
self.current_frame = (self.current_frame + 1) % len(self.dict_image[reference])
self.image = self.dict_image[reference][self.current_frame]
else:
self.current_frame = (self.current_frame + 1) % len(self.list_image)
self.image = self.list_image[self.current_frame]
def update(self):
if self.animated:
self.animate()
self._reposition_rect()
def move(self, dx=0, dy=0):
"""
Try to move the entity.
Return True if an action was done (either move or attack)
"""
# Test if we enter the actionable zone of an entity
# Note: this can be a door to open, or a fight!
for entity in GLOBAL.game.current_region.region_entities:
if entity != self and hasattr(entity, "actionable") and entity.actionable is not None and \
(self.x + dx, self.y + dy) in entity.actionable.action_field:
self.x += dx
self.y += dy
ok_to_move = entity.actionable.action(self)
self.x -= dx
self.y -= dy
if ok_to_move is not None and not ok_to_move:
# We triggered an object, and it prevented the move (like a door not opening)
return False
if entity != self and hasattr(entity, "fighter") and entity.fighter is not None and \
(self.x + dx, self.y + dy) in entity.fighter.action_field:
self.x += dx
self.y += dy
ok_to_move = entity.fighter.action(self)
self.x -= dx
self.y -= dy
if ok_to_move is not None and not ok_to_move:
# We came in a fight...
return False
# Test if we collide with the terrain, and terrain only
destination_tile = GLOBAL.game.current_region.tiles[self.x + dx][self.y + dy]
if not destination_tile.block_for(self):
# now test the list of objects
for entity in GLOBAL.game.current_region.region_entities:
|
# success
self.x += dx
self.y += dy
if self.animated and (dx != 0 or dy != 0):
self.last_direction = (dx, dy)
GLOBAL.game.invalidate_fog_of_war = True
# self.game.ticker.ticks_to_advance += self.speed_cost_for(c.AC_ENV_MOVE)
return True
return False
class ActionableEntity:
"""
An actionable entity is an object which is triggered when something (player, monster...) is around (or directly in).
This is typically a door, a trap, a town...
"""
def __init__(self, radius=0, actionable_by_player_only=True, function=None):
self.radius = radius # the radius for triggering the function
self.owner = None
self.actionable_by_player_only = actionable_by_player_only
self._action_field = None
self.function = function
@property
def action_field(self):
if self._action_field is not None:
return self._action_field
else:
if self.owner is not None:
self._action_field = [self.owner.pos]
for i in range(-self.radius, self.radius):
if (self.owner.pos[0] + i, self.owner.pos[1]) not in self._action_field:
self._action_field.append((self.owner.pos[0] + i, self.owner.pos[1]))
if (self.owner.pos[0], self.owner.pos[1] + i) not in self._action_field:
self._action_field.append((self.owner.pos[0], self.owner.pos[1] + i))
return self._action_field
else:
return []
def action(self, entity_that_actioned):
if self.function is None:
print("No function created")
else:
if self.actionable_by_player_only:
if entity_that_actioned == GLOBAL.game.player:
return self.function(self.owner, entity_that_actioned)
else:
return self.function(self.owner, entity_that_actioned)
class AIEntity:
def __init__(self, speed=1):
self.owner = None
self.speed = speed # the speed represents the time between two turns
def move_towards_position(self, pos):
# vector from this object to the target, and distance
dx = pos[0] - self.owner.x
dy = pos[1] - self.owner.y
distance = sqrt(dx ** 2 + dy ** 2)
# normalize it to length 1 (preserving direction), then round it and
# convert to integer so the movement is restricted to the map grid
if distance != 0:
dx = int(round(dx / distance))
dy = int(round(dy / distance))
else:
dx = dy = 0
return self.owner.move(dx, dy)
def move_towards_entity(self, other_entity):
self.move_towards_position(other_entity.pos)
def move_randomly(self, with_fight=False):
"""
Move by 1 around the current position. The destination should be non blocking.
If no tiles match, then no move is taken.
:return:
"""
delta = [(-1, -1), (-1, 0), (-1, 1), (0, 1), (0, -1), (1, -1), (1, 0), (1, 1)]
rd.shuffle(delta)
x, y = self.owner.pos
while len(delta) > 0:
dx, dy = delta.pop()
if self.move_towards_position((x + dx, y + dy)):
return
def take_turn(self):
assert True, "Entity has not redefined the take turn"
class WanderingAIEntity(AIEntity):
def __init__(self, speed):
AIEntity.__init__(self, speed=speed)
def take_turn(self):
self.move_randomly(with_fight=False)
print("{} moves to {}".format(self.owner.name, self.owner.pos))
GLOBAL.game.world[self.owner.current_region_name].ticker.schedule_turn(self.speed, self)
| if entity != self and entity.blocks and entity.x == self.x + dx and entity.y == self.y + dy:
return False | conditional_block |
gameentity.py | from pygame.sprite import Sprite
from pygame import Surface
import pygame as pg
from math import sqrt
from shared import GLOBAL
from default import *
import random as rd
"""
Generic Game entity, which may be displayed on the map (town, player, trap, object...)
Most of the object base code will go there
"""
class GameEntity(Sprite):
COUNTER = 0
def __init__(self,
name=None,
pos=None,
image_ref=None,
z_level=2,
blocking_tile_list=None,
blocking_view_list=None,
vision=1,
blocks=False,
actionable=None,
ai=None):
Sprite.__init__(self)
if not pos:
pos = (-1, -1)
(self.x, self.y) = pos
self.name = name
if name is None:
GameEntity.COUNTER += 1
self.name = "Entity ".format(GameEntity.COUNTER)
# Image settings
self.z_level = z_level # The depth. Default is 2, min is 0.
self.image_ref = image_ref
self.image = None
self.animated = False
self.current_region_name = None
self._current_region = None
self.init_graphics()
# Blocking: what the object can go over, what it can see over, and if the object prevents movement upon itself
self.blocking_tile_list = blocking_tile_list
self.blocking_view_list = blocking_view_list
self.blocks = blocks
self.base_vision_radius = vision
# Components
self.actionable = actionable
if self.actionable:
self.actionable.owner = self
self.ai = ai
if self.ai:
self.ai.owner = self
@property
def pos(self):
return self.x, self.y
@property
def region(self):
assert self._current_region is not None, "Tried accessing a region that is not yet set {}".format(self.name)
return self._current_region
# GRAPHICAL RELATED FUNCTIONS
def init_graphics(self):
"""
Initiate all graphical objects
:return: Nothing
"""
if type(self.image_ref) is Surface:
# This is the case for the special visual effect
self.image = self.image_ref
else:
image = GLOBAL.img(self.image_ref)
if type(image) is tuple:
# for decode purpose
self.image = Surface(TILESIZE_SCREEN)
self.image.fill(image)
elif type(image) is list or type(image) is dict:
self.animated = True
self.current_frame = 0
self.last_update = 0
if type(image) is list:
self.list_image = image
self.image = self.list_image[self.current_frame]
else:
self.last_direction = (1, 0)
self.dict_image = image
self.image = self.dict_image['E'][self.current_frame]
else:
self.image = image
self._reposition_rect()
def update_graphics(self, new_image_ref):
"""
Update the graphical reference, and force a reset of the graphical function
:param new_image_ref: the new image reference
:return: nothing
"""
self.image_ref = new_image_ref
self.init_graphics()
def clean_before_save(self, image_only=False):
"""
Clean all graphical objects, remove from sprite dictionary and remove the game reference
:return:
"""
self.image = None
self.animated = False
if hasattr(self, "dict_image"):
# self.dict_image = None
delattr(self, "dict_image")
if hasattr(self, "list_image"):
self.list_image = None
delattr(self, "list_image")
def _reposition_rect(self):
self.rect = self.image.get_rect()
self.rect.centerx = self.x * TILESIZE_SCREEN[0] + int(TILESIZE_SCREEN[1] / 2) # initial position for the camera
self.rect.centery = self.y * TILESIZE_SCREEN[0] + int(TILESIZE_SCREEN[1] / 2)
def assign_entity_to_region(self, region):
self.add(region.all_groups[self.z_level])
self.current_region_name = region.name
self._current_region = region
region.region_entities.add(self)
if self.ai is not None:
region.ticker.schedule_turn(self.ai.speed, self.ai)
def remove_entity_from_region(self, region):
self.remove(region.all_groups[self.z_level])
self.current_region_name = None
self._current_region = None
region.region_entities.remove(self)
region.ticker.unregister(self.ai)
def animate(self):
now = pg.time.get_ticks()
delta = 200
if hasattr(self, "ai") and self.ai is not None:
if hasattr(self.ai, "speed"):
delta = self.ai.speed * 30
elif hasattr(self, "speed"):
delta = self.speed * 30
if now - self.last_update > delta:
self.last_update = now
reference = 'E'
if hasattr(self, "dict_image"):
if self.last_direction[0] < 0:
reference = 'W'
if self.last_direction[0] > 0:
reference = 'E'
if self.last_direction[1] < 0:
reference = 'N'
if self.last_direction[1] > 0:
reference = 'S'
if "NW" in self.dict_image:
if self.last_direction == (-1, -1):
reference = "NW"
elif self.last_direction == (1, 1):
reference = "SE"
elif self.last_direction == (-1, 1):
reference = "SW"
elif self.last_direction == (1, -1):
reference = "NE"
self.current_frame = (self.current_frame + 1) % len(self.dict_image[reference])
self.image = self.dict_image[reference][self.current_frame]
else:
self.current_frame = (self.current_frame + 1) % len(self.list_image)
self.image = self.list_image[self.current_frame]
def update(self):
if self.animated:
self.animate()
self._reposition_rect()
def move(self, dx=0, dy=0):
"""
Try to move the entity.
Return True if an action was done (either move or attack)
"""
# Test if we enter the actionable zone of an entity
# Note: this can be a door to open, or a fight!
for entity in GLOBAL.game.current_region.region_entities:
if entity != self and hasattr(entity, "actionable") and entity.actionable is not None and \
(self.x + dx, self.y + dy) in entity.actionable.action_field:
self.x += dx
self.y += dy
ok_to_move = entity.actionable.action(self)
self.x -= dx
self.y -= dy
if ok_to_move is not None and not ok_to_move:
# We triggered an object, and it prevented the move (like a door not opening)
return False
if entity != self and hasattr(entity, "fighter") and entity.fighter is not None and \
(self.x + dx, self.y + dy) in entity.fighter.action_field:
self.x += dx
self.y += dy
ok_to_move = entity.fighter.action(self)
self.x -= dx
self.y -= dy
if ok_to_move is not None and not ok_to_move:
# We came in a fight...
return False
# Test if we collide with the terrain, and terrain only
destination_tile = GLOBAL.game.current_region.tiles[self.x + dx][self.y + dy]
if not destination_tile.block_for(self):
# now test the list of objects
for entity in GLOBAL.game.current_region.region_entities:
if entity != self and entity.blocks and entity.x == self.x + dx and entity.y == self.y + dy:
return False
# success
self.x += dx
self.y += dy
if self.animated and (dx != 0 or dy != 0):
self.last_direction = (dx, dy)
GLOBAL.game.invalidate_fog_of_war = True
# self.game.ticker.ticks_to_advance += self.speed_cost_for(c.AC_ENV_MOVE)
return True
return False
class ActionableEntity:
"""
An actionable entity is an object which is triggered when something (player, monster...) is around (or directly in).
This is typically a door, a trap, a town...
"""
def __init__(self, radius=0, actionable_by_player_only=True, function=None):
self.radius = radius # the radius for triggering the function
self.owner = None
self.actionable_by_player_only = actionable_by_player_only
self._action_field = None
self.function = function
@property
def action_field(self):
if self._action_field is not None:
return self._action_field
else:
if self.owner is not None:
self._action_field = [self.owner.pos]
for i in range(-self.radius, self.radius):
if (self.owner.pos[0] + i, self.owner.pos[1]) not in self._action_field:
self._action_field.append((self.owner.pos[0] + i, self.owner.pos[1]))
if (self.owner.pos[0], self.owner.pos[1] + i) not in self._action_field:
self._action_field.append((self.owner.pos[0], self.owner.pos[1] + i))
return self._action_field
else:
return []
def action(self, entity_that_actioned):
if self.function is None:
print("No function created")
else:
if self.actionable_by_player_only:
if entity_that_actioned == GLOBAL.game.player:
return self.function(self.owner, entity_that_actioned)
else:
return self.function(self.owner, entity_that_actioned)
class AIEntity:
def __init__(self, speed=1):
self.owner = None
self.speed = speed # the speed represents the time between two turns
def move_towards_position(self, pos):
# vector from this object to the target, and distance
dx = pos[0] - self.owner.x
dy = pos[1] - self.owner.y
distance = sqrt(dx ** 2 + dy ** 2)
# normalize it to length 1 (preserving direction), then round it and
# convert to integer so the movement is restricted to the map grid
if distance != 0:
dx = int(round(dx / distance))
dy = int(round(dy / distance))
else:
dx = dy = 0
return self.owner.move(dx, dy)
def move_towards_entity(self, other_entity):
self.move_towards_position(other_entity.pos)
def move_randomly(self, with_fight=False):
"""
Move by 1 around the current position. The destination should be non blocking.
If no tiles match, then no move is taken.
:return:
"""
delta = [(-1, -1), (-1, 0), (-1, 1), (0, 1), (0, -1), (1, -1), (1, 0), (1, 1)]
rd.shuffle(delta)
x, y = self.owner.pos
while len(delta) > 0:
dx, dy = delta.pop()
if self.move_towards_position((x + dx, y + dy)):
return
def take_turn(self):
assert True, "Entity has not redefined the take turn"
class | (AIEntity):
def __init__(self, speed):
AIEntity.__init__(self, speed=speed)
def take_turn(self):
self.move_randomly(with_fight=False)
print("{} moves to {}".format(self.owner.name, self.owner.pos))
GLOBAL.game.world[self.owner.current_region_name].ticker.schedule_turn(self.speed, self)
| WanderingAIEntity | identifier_name |
gameentity.py | from pygame.sprite import Sprite
from pygame import Surface
import pygame as pg
from math import sqrt
from shared import GLOBAL
from default import *
import random as rd
"""
Generic Game entity, which may be displayed on the map (town, player, trap, object...)
Most of the object base code will go there
"""
class GameEntity(Sprite):
COUNTER = 0
def __init__(self,
name=None,
pos=None,
image_ref=None,
z_level=2,
blocking_tile_list=None,
blocking_view_list=None,
vision=1,
blocks=False,
actionable=None,
ai=None):
Sprite.__init__(self)
if not pos:
pos = (-1, -1)
(self.x, self.y) = pos
self.name = name
if name is None:
GameEntity.COUNTER += 1
self.name = "Entity ".format(GameEntity.COUNTER)
# Image settings
self.z_level = z_level # The depth. Default is 2, min is 0.
self.image_ref = image_ref
self.image = None
self.animated = False
self.current_region_name = None
self._current_region = None
self.init_graphics()
# Blocking: what the object can go over, what it can see over, and if the object prevents movement upon itself
self.blocking_tile_list = blocking_tile_list
self.blocking_view_list = blocking_view_list
self.blocks = blocks
self.base_vision_radius = vision
# Components
self.actionable = actionable
if self.actionable:
self.actionable.owner = self
self.ai = ai
if self.ai:
self.ai.owner = self
@property
def pos(self):
return self.x, self.y
@property
def region(self):
assert self._current_region is not None, "Tried accessing a region that is not yet set {}".format(self.name)
return self._current_region
# GRAPHICAL RELATED FUNCTIONS
def init_graphics(self):
"""
Initiate all graphical objects
:return: Nothing
"""
if type(self.image_ref) is Surface:
# This is the case for the special visual effect
self.image = self.image_ref
else:
image = GLOBAL.img(self.image_ref)
if type(image) is tuple:
# for decode purpose
self.image = Surface(TILESIZE_SCREEN)
self.image.fill(image)
elif type(image) is list or type(image) is dict:
self.animated = True
self.current_frame = 0
self.last_update = 0
if type(image) is list:
self.list_image = image
self.image = self.list_image[self.current_frame]
else:
self.last_direction = (1, 0)
self.dict_image = image
self.image = self.dict_image['E'][self.current_frame]
else:
self.image = image
self._reposition_rect()
def update_graphics(self, new_image_ref):
"""
Update the graphical reference, and force a reset of the graphical function
:param new_image_ref: the new image reference
:return: nothing
"""
self.image_ref = new_image_ref
self.init_graphics()
def clean_before_save(self, image_only=False):
"""
Clean all graphical objects, remove from sprite dictionary and remove the game reference
:return:
"""
self.image = None
self.animated = False
if hasattr(self, "dict_image"):
# self.dict_image = None
delattr(self, "dict_image")
if hasattr(self, "list_image"):
self.list_image = None
delattr(self, "list_image")
def _reposition_rect(self):
|
def assign_entity_to_region(self, region):
self.add(region.all_groups[self.z_level])
self.current_region_name = region.name
self._current_region = region
region.region_entities.add(self)
if self.ai is not None:
region.ticker.schedule_turn(self.ai.speed, self.ai)
def remove_entity_from_region(self, region):
self.remove(region.all_groups[self.z_level])
self.current_region_name = None
self._current_region = None
region.region_entities.remove(self)
region.ticker.unregister(self.ai)
def animate(self):
now = pg.time.get_ticks()
delta = 200
if hasattr(self, "ai") and self.ai is not None:
if hasattr(self.ai, "speed"):
delta = self.ai.speed * 30
elif hasattr(self, "speed"):
delta = self.speed * 30
if now - self.last_update > delta:
self.last_update = now
reference = 'E'
if hasattr(self, "dict_image"):
if self.last_direction[0] < 0:
reference = 'W'
if self.last_direction[0] > 0:
reference = 'E'
if self.last_direction[1] < 0:
reference = 'N'
if self.last_direction[1] > 0:
reference = 'S'
if "NW" in self.dict_image:
if self.last_direction == (-1, -1):
reference = "NW"
elif self.last_direction == (1, 1):
reference = "SE"
elif self.last_direction == (-1, 1):
reference = "SW"
elif self.last_direction == (1, -1):
reference = "NE"
self.current_frame = (self.current_frame + 1) % len(self.dict_image[reference])
self.image = self.dict_image[reference][self.current_frame]
else:
self.current_frame = (self.current_frame + 1) % len(self.list_image)
self.image = self.list_image[self.current_frame]
def update(self):
if self.animated:
self.animate()
self._reposition_rect()
def move(self, dx=0, dy=0):
"""
Try to move the entity.
Return True if an action was done (either move or attack)
"""
# Test if we enter the actionable zone of an entity
# Note: this can be a door to open, or a fight!
for entity in GLOBAL.game.current_region.region_entities:
if entity != self and hasattr(entity, "actionable") and entity.actionable is not None and \
(self.x + dx, self.y + dy) in entity.actionable.action_field:
self.x += dx
self.y += dy
ok_to_move = entity.actionable.action(self)
self.x -= dx
self.y -= dy
if ok_to_move is not None and not ok_to_move:
# We triggered an object, and it prevented the move (like a door not opening)
return False
if entity != self and hasattr(entity, "fighter") and entity.fighter is not None and \
(self.x + dx, self.y + dy) in entity.fighter.action_field:
self.x += dx
self.y += dy
ok_to_move = entity.fighter.action(self)
self.x -= dx
self.y -= dy
if ok_to_move is not None and not ok_to_move:
# We came in a fight...
return False
# Test if we collide with the terrain, and terrain only
destination_tile = GLOBAL.game.current_region.tiles[self.x + dx][self.y + dy]
if not destination_tile.block_for(self):
# now test the list of objects
for entity in GLOBAL.game.current_region.region_entities:
if entity != self and entity.blocks and entity.x == self.x + dx and entity.y == self.y + dy:
return False
# success
self.x += dx
self.y += dy
if self.animated and (dx != 0 or dy != 0):
self.last_direction = (dx, dy)
GLOBAL.game.invalidate_fog_of_war = True
# self.game.ticker.ticks_to_advance += self.speed_cost_for(c.AC_ENV_MOVE)
return True
return False
class ActionableEntity:
"""
An actionable entity is an object which is triggered when something (player, monster...) is around (or directly in).
This is typically a door, a trap, a town...
"""
def __init__(self, radius=0, actionable_by_player_only=True, function=None):
self.radius = radius # the radius for triggering the function
self.owner = None
self.actionable_by_player_only = actionable_by_player_only
self._action_field = None
self.function = function
@property
def action_field(self):
if self._action_field is not None:
return self._action_field
else:
if self.owner is not None:
self._action_field = [self.owner.pos]
for i in range(-self.radius, self.radius):
if (self.owner.pos[0] + i, self.owner.pos[1]) not in self._action_field:
self._action_field.append((self.owner.pos[0] + i, self.owner.pos[1]))
if (self.owner.pos[0], self.owner.pos[1] + i) not in self._action_field:
self._action_field.append((self.owner.pos[0], self.owner.pos[1] + i))
return self._action_field
else:
return []
def action(self, entity_that_actioned):
if self.function is None:
print("No function created")
else:
if self.actionable_by_player_only:
if entity_that_actioned == GLOBAL.game.player:
return self.function(self.owner, entity_that_actioned)
else:
return self.function(self.owner, entity_that_actioned)
class AIEntity:
def __init__(self, speed=1):
self.owner = None
self.speed = speed # the speed represents the time between two turns
def move_towards_position(self, pos):
# vector from this object to the target, and distance
dx = pos[0] - self.owner.x
dy = pos[1] - self.owner.y
distance = sqrt(dx ** 2 + dy ** 2)
# normalize it to length 1 (preserving direction), then round it and
# convert to integer so the movement is restricted to the map grid
if distance != 0:
dx = int(round(dx / distance))
dy = int(round(dy / distance))
else:
dx = dy = 0
return self.owner.move(dx, dy)
def move_towards_entity(self, other_entity):
self.move_towards_position(other_entity.pos)
def move_randomly(self, with_fight=False):
"""
Move by 1 around the current position. The destination should be non blocking.
If no tiles match, then no move is taken.
:return:
"""
delta = [(-1, -1), (-1, 0), (-1, 1), (0, 1), (0, -1), (1, -1), (1, 0), (1, 1)]
rd.shuffle(delta)
x, y = self.owner.pos
while len(delta) > 0:
dx, dy = delta.pop()
if self.move_towards_position((x + dx, y + dy)):
return
def take_turn(self):
assert True, "Entity has not redefined the take turn"
class WanderingAIEntity(AIEntity):
def __init__(self, speed):
AIEntity.__init__(self, speed=speed)
def take_turn(self):
self.move_randomly(with_fight=False)
print("{} moves to {}".format(self.owner.name, self.owner.pos))
GLOBAL.game.world[self.owner.current_region_name].ticker.schedule_turn(self.speed, self)
| self.rect = self.image.get_rect()
self.rect.centerx = self.x * TILESIZE_SCREEN[0] + int(TILESIZE_SCREEN[1] / 2) # initial position for the camera
self.rect.centery = self.y * TILESIZE_SCREEN[0] + int(TILESIZE_SCREEN[1] / 2) | identifier_body |
engine.math2d.js | /**
* The Render Engine
* Math
*
* @fileoverview A 2D math library with static methods, plus objects to represent
* points, rectangles and circles.
*
* @author: Brett Fattori (brettf@renderengine.com)
* @author: $Author$
* @version: $Revision$
*
* Copyright (c) 2010 Brett Fattori (brettf@renderengine.com)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
Engine.include("/engine/engine.mathprimitives.js");
Engine.initObject("Math2D", null, function() {
/**
* @class A static class with methods and fields that are helpful
* when dealing with two dimensional mathematics.
*
* @static
*/
var Math2D = Base.extend(/** @scope Math2D.prototype */{
constructor: null,
/**
* An approximation of PI for speedier calculations. (3.14159)
* @type {Number}
* @const
*/
PI: 3.14159,
/**
* An approximation of the inverse of PI so we can
* avoid divisions. (0.31831)
* @type {Number}
* @const
*/
INV_PI: 0.31831,
/**
* Convert degrees to radians.
* @param degrees {Number} An angle in degrees
* @return {Number} The degrees value converted to radians
*/
degToRad: function(degrees) {
return (0.01745 * degrees);
},
/**
* Convert radians to degrees.
* @param radians {Number} An angle in radians
* @return {Number} The radians value converted to degrees
*/
radToDeg: function(radians) {
return (radians * 180 / Math2D.PI);
},
/**
* Perform AAB (axis-aligned box) to AAB collision testing, returning <tt>true</tt>
* if the two boxes overlap.
*
* @param box1 {Rectangle2D} The collision box of object 1
* @param box2 {Rectangle2D} The collision box of object 2
* @return {Boolean} <tt>true</tt> if the rectangles overlap
*/
boxBoxCollision: function(box1, box2) {
return box1.isIntersecting(box2);
},
/**
* Perform point to AAB collision, returning <code>true</code>
* if a collision occurs.
*
* @param box {Rectangle2D} The collision box of the object
* @param point {Point2D} The point to test, in world coordinates
* @return {Boolean} <tt>true</tt> if the point is within the rectangle
*/
boxPointCollision: function(box, point) {
return box.containsPoint(point);
},
/**
* Check to see if a line intersects another
*
* @param p1 {Point2D} Start of line 1
* @param p2 {Point2D} End of line 1
* @param p3 {Point2D} Start of line 2
* @param p4 {Point2D} End of line 2
* @return {Boolean} <tt>true</tt> if the lines intersect
*/
lineLineCollision: function(p1, p2, p3, p4) {
var d = ((p4.y - p3.y) * (p2.x - p1.x)) - ((p4.x - p3.x) * (p2.y - p1.y));
var n1 = ((p4.x - p3.x) * (p1.y - p3.y)) - ((p4.y - p3.y) * (p1.x - p3.x));
var n2 = ((p2.x - p1.x) * (p1.y - p3.y)) - ((p2.y - p1.y) * (p1.x - p3.x));
if ( d == 0.0 )
{
if ( n1 == 0.0 && n2 == 0.0 )
{
return false; //COINCIDENT;
}
return false; // PARALLEL;
}
var ua = n1 / d;
var ub = n2 / d;
return (ua >= 0.0 && ua <= 1.0 && ub >= 0.0 && ub <= 1.0);
},
/**
* Test to see if a line intersects a Rectangle.
*
* @param p1 {Point2D} The start of the line
* @param p2 {Point2D} The end of the line
* @param rect {Rectangle} The box to test against
* @return {Boolean} <tt>true</tt> if the line intersects the box
*/
lineBoxCollision: function(p1, p2, rect) {
// Convert the line to a box itself and do a quick box box test
var lRect = Rectangle2D.create(p1.x, p1.y, p2.x - p1.x, p2.y - p1.y);
var coll = Math2D.boxBoxCollision(lRect, rect);
lRect.destroy();
return coll;
},
/*
* Test to see if a line intersects a Rectangle.
*
* @param p1 {Point2D} The start of the line
* @param p2 {Point2D} The end of the line
* @param rect {Rectangle} The box to test against
* @return <tt>true</tt> if the line intersects the box
* @type Boolean
lineBoxCollision: function(p1, p2, rect)
{
if (Math2D.boxPointCollision(rect, p1) &&
Math2D.boxPointCollision(rect, p2))
{
// line inside
return true;
}
// check each line for intersection
var topLeft = rect.getTopLeft();
var bottomRight = rect.getBottomRight();
var topRight = new Point2D(rect.x, rect.y).add(new Point2D(rect.width, 0));
var bottomLeft = new Point2D(rect.x, rect.y).add(new Point2D(0, rect.height));
if (Math2D.lineLineCollision(p1, p2, topLeft, topRight)) return true;
if (Math2D.lineLineCollision(p1, p2, topRight, bottomRight)) return true;
if (Math2D.lineLineCollision(p1, p2, bottomRight, bottomLeft)) return true;
if (Math2D.lineLineCollision(p1, p2, bottomLeft, topLeft)) return true;
return false;
},
*/
/**
* A static method used to calculate a direction vector
* from a heading angle.
*
* @param origin {Point2D} The origin of the shape
* @param baseVec {Vector2D} The base vector
* @param angle {Number} The rotation in degrees
* @return {Vector2D} The direction vector
*/
getDirectionVector: function(origin, baseVec, angle) {
var r = Math2D.degToRad(angle);
var x = Math.cos(r) * baseVec.x - Math.sin(r) * baseVec.y;
var y = Math.sin(r) * baseVec.x + Math.cos(r) * baseVec.y;
var v = Vector2D.create(x, y).sub(origin);
return v.normalize();
},
/**
* Given a {@link Rectangle2D}, generate a random point within it.
*
* @param rect {Rectangle2D} The rectangle
* @return {Point2D} A random point within the rectangle
*/
randomPoint: function(rect) {
var r = rect.get();
return Point2D.create(Math.floor(r.x + Math2.random() * r.w),
Math.floor(r.y + Math2.random() * r.h));
},
/**
* Calculate an approximate 2D convex hull for the given array of points.
* <p/>
* Copyright 2001, softSurfer (www.softsurfer.com)
* This code may be freely used and modified for any purpose
* providing that this copyright notice is included with it.
* SoftSurfer makes no warranty for this code, and cannot be held
* liable for any real or imagined damage resulting from its use.
* Users of this code must verify correctness for their application.
*
* @param points {Array} An array of {@link Point2D} instances
* @param k {Number} The approximation accuracy (larger = more accurate)
* @return {Array} An array of {@link Point2D} which contains the
* approximate hull of the given points
*/
convexHull: function(points, k) {
// Tests if a point is Left|On|Right of an infinite line.
function | (point0, point1, point2) {
var p0 = point0.get(), p1 = point1.get(), p2 = point2.get();
return (p1.x - p0.x)*(p2.y - p0.y) - (p2.x - p0.x)*(p1.y - p0.y);
}
var Bin = Base.extend({
B: null,
constructor: function(size) {
this.B = [];
for (var i = 0; i < size; i++) {
this.B.push({
min: 0,
max: 0
});
}
}
});
var NONE = -1;
var minmin=0, minmax=0,
maxmin=0, maxmax=0,
xmin = points[0].get().x, xmax = points[0].get().x,
cP, bot=0, top=(-1), n = points.length, // indices for bottom and top of the stack
hull = [];
// Get the points with (1) min-max x-coord, and (2) min-max y-coord
for ( i=1; i < n; i++) {
cP = points[i].get();
if (cP.x <= xmin) {
if (cP.x < xmin) { // new xmin
xmin = cP.x;
minmin = minmax = i;
} else { // another xmin
if (cP.y < points[minmin].get().y)
minmin = i;
else if (cP.y > points[minmax].get().y)
minmax = i;
}
}
if (cP.x >= xmax) {
if (cP.x > xmax) { // new xmax
xmax = cP.x;
maxmin = maxmax = i;
} else { // another xmax
if (cP.y < points[maxmin].get().y)
maxmin = i;
else if (cP.y > points[maxmax].get().y)
maxmax = i;
}
}
}
if (xmin == xmax) { // degenerate case: all x-coords == xmin
hull[++top] = points[minmin]; // a point, or
if (minmax != minmin) // a nontrivial segment
hull[++top] = points[minmax];
return hull; // one or two points
}
// Next, get the max and min points in the k range bins
var bin = new Bin(k+2); // first allocate the bins
bin.B[0].min = minmin; bin.B[0].max = minmax; // set bin 0
bin.B[k+1].min = maxmin; bin.B[k+1].max = maxmax; // set bin k+1
for (var b = 1; b <= k; b++) { // initially nothing is in the other bins
bin.B[b].min = bin.B[b].max = NONE;
}
for (var b, i=0; i < n; i++) {
var cPP = points[i];
cP = cPP.get();
if (cP.x == xmin || cP.x == xmax) // already have bins 0 and k+1
continue;
// check if a lower or upper point
if (isLeft(points[minmin], points[maxmin], cPP) < 0) { // below lower line
b = (k * (cP.x - xmin) / (xmax - xmin) ) + 1; // bin #
if (bin.B[b].min == NONE) // no min point in this range
bin.B[b].min = i; // first min
else if (cP.y < points[bin.B[b].min].get().y)
bin.B[b].min = i; // new min
continue;
}
if (isLeft(points[minmax], points[maxmax], cPP) > 0) { // above upper line
b = (k * (cP.x - xmin) / (xmax - xmin) ) + 1; // bin #
if (bin.B[b].max == NONE) // no max point in this range
bin.B[b].max = i; // first max
else if (cP.y > points[bin.B[b].max].get().y)
bin.B[b].max = i; // new max
continue;
}
}
// Now, use the chain algorithm to get the lower and upper hulls
// the output array hull[] will be used as the stack
// First, compute the lower hull on the stack hull[]
for (var i = 0; i <= k+1; ++i) {
if (bin.B[i].min == NONE) // no min point in this range
continue;
var cPP = points[bin.B[i].min]; // select the current min point
cP = cPP.get();
while (top > 0) { // there are at least 2 points on the stack
// test if current point is left of the line at the stack top
if (isLeft(hull[top-1], hull[top], cPP) > 0)
break; // cP is a new hull vertex
else
top--; // pop top point off stack
}
hull[++top] = cPP; // push current point onto stack
}
// Next, compute the upper hull on the stack H above the bottom hull
if (maxmax != maxmin) // if distinct xmax points
hull[++top] = points[maxmax]; // push maxmax point onto stack
bot = top; // the bottom point of the upper hull stack
for (var i = k; i >= 0; --i) {
if (bin.B[i].max == NONE) // no max point in this range
continue;
var cPP = points[bin.B[i].max]; // select the current max point
cP = cPP.get();
while (top > bot) { // at least 2 points on the upper stack
// test if current point is left of the line at the stack top
if (isLeft(hull[top-1], hull[top], cPP) > 0)
break; // current point is a new hull vertex
else
top--; // pop top point off stack
}
hull[++top] = cPP; // push current point onto stack
}
if (minmax != minmin)
hull[++top] = points[minmin]; // push joining endpoint onto stack
bin = null; // free bins before returning
return hull; // # of points on the stack
},
/**
* Determine the Minkowski Difference of two convex hulls. Useful for
* calculating collision response.
*
* @param hullA {Array} An array of {@link Point2D}
* @param hullB {Array} An array of {@link Point2D}
* @return {Array} An array of {@link Point2D} which are the Minkowski Difference of
* the two hulls.
*/
minkDiff: function(hullA, hullB) {
var cP = 0, minkDiff = new Array(hullA.length * hullB.length);
for (var a in hullA) {
for (var b in hullB) {
var ha = hullA[a].get(), hb = hullB[b].get(),
pt = Point2D.create(hb.x - ha.x, hb.y - ha.y);
minkDiff[cP++] = pt;
}
}
return minkDiff;
},
ISOMETRIC_PROJECTION: 0,
DIMETRIC_SIDE_PROJECTION: 1,
DIMETRIC_TOP_PROJECTION: 2
});
return Math2D;
});
| isLeft | identifier_name |
engine.math2d.js | /**
* The Render Engine
* Math
*
* @fileoverview A 2D math library with static methods, plus objects to represent
* points, rectangles and circles.
*
* @author: Brett Fattori (brettf@renderengine.com)
* @author: $Author$
* @version: $Revision$
*
* Copyright (c) 2010 Brett Fattori (brettf@renderengine.com)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
Engine.include("/engine/engine.mathprimitives.js");
Engine.initObject("Math2D", null, function() {
/**
* @class A static class with methods and fields that are helpful
* when dealing with two dimensional mathematics.
*
* @static
*/
var Math2D = Base.extend(/** @scope Math2D.prototype */{
constructor: null,
/**
* An approximation of PI for speedier calculations. (3.14159)
* @type {Number}
* @const
*/
PI: 3.14159,
/**
* An approximation of the inverse of PI so we can
* avoid divisions. (0.31831)
* @type {Number}
* @const
*/
INV_PI: 0.31831,
/**
* Convert degrees to radians.
* @param degrees {Number} An angle in degrees
* @return {Number} The degrees value converted to radians
*/
degToRad: function(degrees) {
return (0.01745 * degrees);
},
/**
* Convert radians to degrees.
* @param radians {Number} An angle in radians
* @return {Number} The radians value converted to degrees
*/
radToDeg: function(radians) {
return (radians * 180 / Math2D.PI);
},
/**
* Perform AAB (axis-aligned box) to AAB collision testing, returning <tt>true</tt>
* if the two boxes overlap.
*
* @param box1 {Rectangle2D} The collision box of object 1
* @param box2 {Rectangle2D} The collision box of object 2
* @return {Boolean} <tt>true</tt> if the rectangles overlap
*/
boxBoxCollision: function(box1, box2) {
return box1.isIntersecting(box2);
},
/**
* Perform point to AAB collision, returning <code>true</code>
* if a collision occurs.
*
* @param box {Rectangle2D} The collision box of the object
* @param point {Point2D} The point to test, in world coordinates
* @return {Boolean} <tt>true</tt> if the point is within the rectangle
*/
boxPointCollision: function(box, point) {
return box.containsPoint(point);
},
/**
* Check to see if a line intersects another
*
* @param p1 {Point2D} Start of line 1
* @param p2 {Point2D} End of line 1
* @param p3 {Point2D} Start of line 2
* @param p4 {Point2D} End of line 2
* @return {Boolean} <tt>true</tt> if the lines intersect
*/
lineLineCollision: function(p1, p2, p3, p4) {
var d = ((p4.y - p3.y) * (p2.x - p1.x)) - ((p4.x - p3.x) * (p2.y - p1.y));
var n1 = ((p4.x - p3.x) * (p1.y - p3.y)) - ((p4.y - p3.y) * (p1.x - p3.x));
var n2 = ((p2.x - p1.x) * (p1.y - p3.y)) - ((p2.y - p1.y) * (p1.x - p3.x));
if ( d == 0.0 )
{
if ( n1 == 0.0 && n2 == 0.0 )
{
return false; //COINCIDENT;
}
return false; // PARALLEL;
}
var ua = n1 / d;
var ub = n2 / d;
return (ua >= 0.0 && ua <= 1.0 && ub >= 0.0 && ub <= 1.0);
},
/**
* Test to see if a line intersects a Rectangle.
*
* @param p1 {Point2D} The start of the line
* @param p2 {Point2D} The end of the line
* @param rect {Rectangle} The box to test against
* @return {Boolean} <tt>true</tt> if the line intersects the box
*/
lineBoxCollision: function(p1, p2, rect) {
// Convert the line to a box itself and do a quick box box test
var lRect = Rectangle2D.create(p1.x, p1.y, p2.x - p1.x, p2.y - p1.y);
var coll = Math2D.boxBoxCollision(lRect, rect);
lRect.destroy();
return coll;
},
/*
* Test to see if a line intersects a Rectangle.
*
* @param p1 {Point2D} The start of the line
* @param p2 {Point2D} The end of the line
* @param rect {Rectangle} The box to test against
* @return <tt>true</tt> if the line intersects the box
* @type Boolean
lineBoxCollision: function(p1, p2, rect)
{
if (Math2D.boxPointCollision(rect, p1) &&
Math2D.boxPointCollision(rect, p2))
{
// line inside
return true;
}
// check each line for intersection
var topLeft = rect.getTopLeft();
var bottomRight = rect.getBottomRight();
var topRight = new Point2D(rect.x, rect.y).add(new Point2D(rect.width, 0));
var bottomLeft = new Point2D(rect.x, rect.y).add(new Point2D(0, rect.height));
if (Math2D.lineLineCollision(p1, p2, topLeft, topRight)) return true;
if (Math2D.lineLineCollision(p1, p2, topRight, bottomRight)) return true;
if (Math2D.lineLineCollision(p1, p2, bottomRight, bottomLeft)) return true;
if (Math2D.lineLineCollision(p1, p2, bottomLeft, topLeft)) return true;
return false;
},
*/
/**
* A static method used to calculate a direction vector
* from a heading angle.
*
* @param origin {Point2D} The origin of the shape
* @param baseVec {Vector2D} The base vector
* @param angle {Number} The rotation in degrees
* @return {Vector2D} The direction vector
*/
getDirectionVector: function(origin, baseVec, angle) {
var r = Math2D.degToRad(angle);
var x = Math.cos(r) * baseVec.x - Math.sin(r) * baseVec.y;
var y = Math.sin(r) * baseVec.x + Math.cos(r) * baseVec.y;
var v = Vector2D.create(x, y).sub(origin);
return v.normalize();
},
/**
* Given a {@link Rectangle2D}, generate a random point within it.
*
* @param rect {Rectangle2D} The rectangle
* @return {Point2D} A random point within the rectangle
*/
randomPoint: function(rect) {
var r = rect.get();
return Point2D.create(Math.floor(r.x + Math2.random() * r.w),
Math.floor(r.y + Math2.random() * r.h));
},
/**
* Calculate an approximate 2D convex hull for the given array of points.
* <p/>
* Copyright 2001, softSurfer (www.softsurfer.com)
* This code may be freely used and modified for any purpose
* providing that this copyright notice is included with it.
* SoftSurfer makes no warranty for this code, and cannot be held
* liable for any real or imagined damage resulting from its use.
* Users of this code must verify correctness for their application.
*
* @param points {Array} An array of {@link Point2D} instances
* @param k {Number} The approximation accuracy (larger = more accurate)
* @return {Array} An array of {@link Point2D} which contains the
* approximate hull of the given points
*/
convexHull: function(points, k) {
// Tests if a point is Left|On|Right of an infinite line.
function isLeft(point0, point1, point2) {
var p0 = point0.get(), p1 = point1.get(), p2 = point2.get();
return (p1.x - p0.x)*(p2.y - p0.y) - (p2.x - p0.x)*(p1.y - p0.y);
}
var Bin = Base.extend({
B: null,
constructor: function(size) {
this.B = [];
for (var i = 0; i < size; i++) {
this.B.push({
min: 0,
max: 0
});
}
}
});
var NONE = -1;
var minmin=0, minmax=0,
maxmin=0, maxmax=0,
xmin = points[0].get().x, xmax = points[0].get().x,
cP, bot=0, top=(-1), n = points.length, // indices for bottom and top of the stack
hull = [];
// Get the points with (1) min-max x-coord, and (2) min-max y-coord
for ( i=1; i < n; i++) {
cP = points[i].get();
if (cP.x <= xmin) {
if (cP.x < xmin) { // new xmin
xmin = cP.x;
minmin = minmax = i;
} else { // another xmin
if (cP.y < points[minmin].get().y)
minmin = i;
else if (cP.y > points[minmax].get().y)
minmax = i;
}
}
if (cP.x >= xmax) {
if (cP.x > xmax) { // new xmax
| xmax = cP.x;
maxmin = maxmax = i;
} else { // another xmax
if (cP.y < points[maxmin].get().y)
maxmin = i;
else if (cP.y > points[maxmax].get().y)
maxmax = i;
}
}
}
if (xmin == xmax) { // degenerate case: all x-coords == xmin
hull[++top] = points[minmin]; // a point, or
if (minmax != minmin) // a nontrivial segment
hull[++top] = points[minmax];
return hull; // one or two points
}
// Next, get the max and min points in the k range bins
var bin = new Bin(k+2); // first allocate the bins
bin.B[0].min = minmin; bin.B[0].max = minmax; // set bin 0
bin.B[k+1].min = maxmin; bin.B[k+1].max = maxmax; // set bin k+1
for (var b = 1; b <= k; b++) { // initially nothing is in the other bins
bin.B[b].min = bin.B[b].max = NONE;
}
for (var b, i=0; i < n; i++) {
var cPP = points[i];
cP = cPP.get();
if (cP.x == xmin || cP.x == xmax) // already have bins 0 and k+1
continue;
// check if a lower or upper point
if (isLeft(points[minmin], points[maxmin], cPP) < 0) { // below lower line
b = (k * (cP.x - xmin) / (xmax - xmin) ) + 1; // bin #
if (bin.B[b].min == NONE) // no min point in this range
bin.B[b].min = i; // first min
else if (cP.y < points[bin.B[b].min].get().y)
bin.B[b].min = i; // new min
continue;
}
if (isLeft(points[minmax], points[maxmax], cPP) > 0) { // above upper line
b = (k * (cP.x - xmin) / (xmax - xmin) ) + 1; // bin #
if (bin.B[b].max == NONE) // no max point in this range
bin.B[b].max = i; // first max
else if (cP.y > points[bin.B[b].max].get().y)
bin.B[b].max = i; // new max
continue;
}
}
// Now, use the chain algorithm to get the lower and upper hulls
// the output array hull[] will be used as the stack
// First, compute the lower hull on the stack hull[]
for (var i = 0; i <= k+1; ++i) {
if (bin.B[i].min == NONE) // no min point in this range
continue;
var cPP = points[bin.B[i].min]; // select the current min point
cP = cPP.get();
while (top > 0) { // there are at least 2 points on the stack
// test if current point is left of the line at the stack top
if (isLeft(hull[top-1], hull[top], cPP) > 0)
break; // cP is a new hull vertex
else
top--; // pop top point off stack
}
hull[++top] = cPP; // push current point onto stack
}
// Next, compute the upper hull on the stack H above the bottom hull
if (maxmax != maxmin) // if distinct xmax points
hull[++top] = points[maxmax]; // push maxmax point onto stack
bot = top; // the bottom point of the upper hull stack
for (var i = k; i >= 0; --i) {
if (bin.B[i].max == NONE) // no max point in this range
continue;
var cPP = points[bin.B[i].max]; // select the current max point
cP = cPP.get();
while (top > bot) { // at least 2 points on the upper stack
// test if current point is left of the line at the stack top
if (isLeft(hull[top-1], hull[top], cPP) > 0)
break; // current point is a new hull vertex
else
top--; // pop top point off stack
}
hull[++top] = cPP; // push current point onto stack
}
if (minmax != minmin)
hull[++top] = points[minmin]; // push joining endpoint onto stack
bin = null; // free bins before returning
return hull; // # of points on the stack
},
/**
* Determine the Minkowski Difference of two convex hulls. Useful for
* calculating collision response.
*
* @param hullA {Array} An array of {@link Point2D}
* @param hullB {Array} An array of {@link Point2D}
* @return {Array} An array of {@link Point2D} which are the Minkowski Difference of
* the two hulls.
*/
minkDiff: function(hullA, hullB) {
var cP = 0, minkDiff = new Array(hullA.length * hullB.length);
for (var a in hullA) {
for (var b in hullB) {
var ha = hullA[a].get(), hb = hullB[b].get(),
pt = Point2D.create(hb.x - ha.x, hb.y - ha.y);
minkDiff[cP++] = pt;
}
}
return minkDiff;
},
ISOMETRIC_PROJECTION: 0,
DIMETRIC_SIDE_PROJECTION: 1,
DIMETRIC_TOP_PROJECTION: 2
});
return Math2D;
}); | random_line_split | |
engine.math2d.js | /**
* The Render Engine
* Math
*
* @fileoverview A 2D math library with static methods, plus objects to represent
* points, rectangles and circles.
*
* @author: Brett Fattori (brettf@renderengine.com)
* @author: $Author$
* @version: $Revision$
*
* Copyright (c) 2010 Brett Fattori (brettf@renderengine.com)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
Engine.include("/engine/engine.mathprimitives.js");
Engine.initObject("Math2D", null, function() {
/**
* @class A static class with methods and fields that are helpful
* when dealing with two dimensional mathematics.
*
* @static
*/
var Math2D = Base.extend(/** @scope Math2D.prototype */{
constructor: null,
/**
* An approximation of PI for speedier calculations. (3.14159)
* @type {Number}
* @const
*/
PI: 3.14159,
/**
* An approximation of the inverse of PI so we can
* avoid divisions. (0.31831)
* @type {Number}
* @const
*/
INV_PI: 0.31831,
/**
* Convert degrees to radians.
* @param degrees {Number} An angle in degrees
* @return {Number} The degrees value converted to radians
*/
degToRad: function(degrees) {
return (0.01745 * degrees);
},
/**
* Convert radians to degrees.
* @param radians {Number} An angle in radians
* @return {Number} The radians value converted to degrees
*/
radToDeg: function(radians) {
return (radians * 180 / Math2D.PI);
},
/**
* Perform AAB (axis-aligned box) to AAB collision testing, returning <tt>true</tt>
* if the two boxes overlap.
*
* @param box1 {Rectangle2D} The collision box of object 1
* @param box2 {Rectangle2D} The collision box of object 2
* @return {Boolean} <tt>true</tt> if the rectangles overlap
*/
boxBoxCollision: function(box1, box2) {
return box1.isIntersecting(box2);
},
/**
* Perform point to AAB collision, returning <code>true</code>
* if a collision occurs.
*
* @param box {Rectangle2D} The collision box of the object
* @param point {Point2D} The point to test, in world coordinates
* @return {Boolean} <tt>true</tt> if the point is within the rectangle
*/
boxPointCollision: function(box, point) {
return box.containsPoint(point);
},
/**
* Check to see if a line intersects another
*
* @param p1 {Point2D} Start of line 1
* @param p2 {Point2D} End of line 1
* @param p3 {Point2D} Start of line 2
* @param p4 {Point2D} End of line 2
* @return {Boolean} <tt>true</tt> if the lines intersect
*/
lineLineCollision: function(p1, p2, p3, p4) {
var d = ((p4.y - p3.y) * (p2.x - p1.x)) - ((p4.x - p3.x) * (p2.y - p1.y));
var n1 = ((p4.x - p3.x) * (p1.y - p3.y)) - ((p4.y - p3.y) * (p1.x - p3.x));
var n2 = ((p2.x - p1.x) * (p1.y - p3.y)) - ((p2.y - p1.y) * (p1.x - p3.x));
if ( d == 0.0 )
{
if ( n1 == 0.0 && n2 == 0.0 )
{
return false; //COINCIDENT;
}
return false; // PARALLEL;
}
var ua = n1 / d;
var ub = n2 / d;
return (ua >= 0.0 && ua <= 1.0 && ub >= 0.0 && ub <= 1.0);
},
/**
* Test to see if a line intersects a Rectangle.
*
* @param p1 {Point2D} The start of the line
* @param p2 {Point2D} The end of the line
* @param rect {Rectangle} The box to test against
* @return {Boolean} <tt>true</tt> if the line intersects the box
*/
lineBoxCollision: function(p1, p2, rect) {
// Convert the line to a box itself and do a quick box box test
var lRect = Rectangle2D.create(p1.x, p1.y, p2.x - p1.x, p2.y - p1.y);
var coll = Math2D.boxBoxCollision(lRect, rect);
lRect.destroy();
return coll;
},
/*
* Test to see if a line intersects a Rectangle.
*
* @param p1 {Point2D} The start of the line
* @param p2 {Point2D} The end of the line
* @param rect {Rectangle} The box to test against
* @return <tt>true</tt> if the line intersects the box
* @type Boolean
lineBoxCollision: function(p1, p2, rect)
{
if (Math2D.boxPointCollision(rect, p1) &&
Math2D.boxPointCollision(rect, p2))
{
// line inside
return true;
}
// check each line for intersection
var topLeft = rect.getTopLeft();
var bottomRight = rect.getBottomRight();
var topRight = new Point2D(rect.x, rect.y).add(new Point2D(rect.width, 0));
var bottomLeft = new Point2D(rect.x, rect.y).add(new Point2D(0, rect.height));
if (Math2D.lineLineCollision(p1, p2, topLeft, topRight)) return true;
if (Math2D.lineLineCollision(p1, p2, topRight, bottomRight)) return true;
if (Math2D.lineLineCollision(p1, p2, bottomRight, bottomLeft)) return true;
if (Math2D.lineLineCollision(p1, p2, bottomLeft, topLeft)) return true;
return false;
},
*/
/**
* A static method used to calculate a direction vector
* from a heading angle.
*
* @param origin {Point2D} The origin of the shape
* @param baseVec {Vector2D} The base vector
* @param angle {Number} The rotation in degrees
* @return {Vector2D} The direction vector
*/
getDirectionVector: function(origin, baseVec, angle) {
var r = Math2D.degToRad(angle);
var x = Math.cos(r) * baseVec.x - Math.sin(r) * baseVec.y;
var y = Math.sin(r) * baseVec.x + Math.cos(r) * baseVec.y;
var v = Vector2D.create(x, y).sub(origin);
return v.normalize();
},
/**
* Given a {@link Rectangle2D}, generate a random point within it.
*
* @param rect {Rectangle2D} The rectangle
* @return {Point2D} A random point within the rectangle
*/
randomPoint: function(rect) {
var r = rect.get();
return Point2D.create(Math.floor(r.x + Math2.random() * r.w),
Math.floor(r.y + Math2.random() * r.h));
},
/**
* Calculate an approximate 2D convex hull for the given array of points.
* <p/>
* Copyright 2001, softSurfer (www.softsurfer.com)
* This code may be freely used and modified for any purpose
* providing that this copyright notice is included with it.
* SoftSurfer makes no warranty for this code, and cannot be held
* liable for any real or imagined damage resulting from its use.
* Users of this code must verify correctness for their application.
*
* @param points {Array} An array of {@link Point2D} instances
* @param k {Number} The approximation accuracy (larger = more accurate)
* @return {Array} An array of {@link Point2D} which contains the
* approximate hull of the given points
*/
convexHull: function(points, k) {
// Tests if a point is Left|On|Right of an infinite line.
function isLeft(point0, point1, point2) |
var Bin = Base.extend({
B: null,
constructor: function(size) {
this.B = [];
for (var i = 0; i < size; i++) {
this.B.push({
min: 0,
max: 0
});
}
}
});
var NONE = -1;
var minmin=0, minmax=0,
maxmin=0, maxmax=0,
xmin = points[0].get().x, xmax = points[0].get().x,
cP, bot=0, top=(-1), n = points.length, // indices for bottom and top of the stack
hull = [];
// Get the points with (1) min-max x-coord, and (2) min-max y-coord
for ( i=1; i < n; i++) {
cP = points[i].get();
if (cP.x <= xmin) {
if (cP.x < xmin) { // new xmin
xmin = cP.x;
minmin = minmax = i;
} else { // another xmin
if (cP.y < points[minmin].get().y)
minmin = i;
else if (cP.y > points[minmax].get().y)
minmax = i;
}
}
if (cP.x >= xmax) {
if (cP.x > xmax) { // new xmax
xmax = cP.x;
maxmin = maxmax = i;
} else { // another xmax
if (cP.y < points[maxmin].get().y)
maxmin = i;
else if (cP.y > points[maxmax].get().y)
maxmax = i;
}
}
}
if (xmin == xmax) { // degenerate case: all x-coords == xmin
hull[++top] = points[minmin]; // a point, or
if (minmax != minmin) // a nontrivial segment
hull[++top] = points[minmax];
return hull; // one or two points
}
// Next, get the max and min points in the k range bins
var bin = new Bin(k+2); // first allocate the bins
bin.B[0].min = minmin; bin.B[0].max = minmax; // set bin 0
bin.B[k+1].min = maxmin; bin.B[k+1].max = maxmax; // set bin k+1
for (var b = 1; b <= k; b++) { // initially nothing is in the other bins
bin.B[b].min = bin.B[b].max = NONE;
}
for (var b, i=0; i < n; i++) {
var cPP = points[i];
cP = cPP.get();
if (cP.x == xmin || cP.x == xmax) // already have bins 0 and k+1
continue;
// check if a lower or upper point
if (isLeft(points[minmin], points[maxmin], cPP) < 0) { // below lower line
b = (k * (cP.x - xmin) / (xmax - xmin) ) + 1; // bin #
if (bin.B[b].min == NONE) // no min point in this range
bin.B[b].min = i; // first min
else if (cP.y < points[bin.B[b].min].get().y)
bin.B[b].min = i; // new min
continue;
}
if (isLeft(points[minmax], points[maxmax], cPP) > 0) { // above upper line
b = (k * (cP.x - xmin) / (xmax - xmin) ) + 1; // bin #
if (bin.B[b].max == NONE) // no max point in this range
bin.B[b].max = i; // first max
else if (cP.y > points[bin.B[b].max].get().y)
bin.B[b].max = i; // new max
continue;
}
}
// Now, use the chain algorithm to get the lower and upper hulls
// the output array hull[] will be used as the stack
// First, compute the lower hull on the stack hull[]
for (var i = 0; i <= k+1; ++i) {
if (bin.B[i].min == NONE) // no min point in this range
continue;
var cPP = points[bin.B[i].min]; // select the current min point
cP = cPP.get();
while (top > 0) { // there are at least 2 points on the stack
// test if current point is left of the line at the stack top
if (isLeft(hull[top-1], hull[top], cPP) > 0)
break; // cP is a new hull vertex
else
top--; // pop top point off stack
}
hull[++top] = cPP; // push current point onto stack
}
// Next, compute the upper hull on the stack H above the bottom hull
if (maxmax != maxmin) // if distinct xmax points
hull[++top] = points[maxmax]; // push maxmax point onto stack
bot = top; // the bottom point of the upper hull stack
for (var i = k; i >= 0; --i) {
if (bin.B[i].max == NONE) // no max point in this range
continue;
var cPP = points[bin.B[i].max]; // select the current max point
cP = cPP.get();
while (top > bot) { // at least 2 points on the upper stack
// test if current point is left of the line at the stack top
if (isLeft(hull[top-1], hull[top], cPP) > 0)
break; // current point is a new hull vertex
else
top--; // pop top point off stack
}
hull[++top] = cPP; // push current point onto stack
}
if (minmax != minmin)
hull[++top] = points[minmin]; // push joining endpoint onto stack
bin = null; // free bins before returning
return hull; // # of points on the stack
},
/**
* Determine the Minkowski Difference of two convex hulls. Useful for
* calculating collision response.
*
* @param hullA {Array} An array of {@link Point2D}
* @param hullB {Array} An array of {@link Point2D}
* @return {Array} An array of {@link Point2D} which are the Minkowski Difference of
* the two hulls.
*/
minkDiff: function(hullA, hullB) {
var cP = 0, minkDiff = new Array(hullA.length * hullB.length);
for (var a in hullA) {
for (var b in hullB) {
var ha = hullA[a].get(), hb = hullB[b].get(),
pt = Point2D.create(hb.x - ha.x, hb.y - ha.y);
minkDiff[cP++] = pt;
}
}
return minkDiff;
},
ISOMETRIC_PROJECTION: 0,
DIMETRIC_SIDE_PROJECTION: 1,
DIMETRIC_TOP_PROJECTION: 2
});
return Math2D;
});
| {
var p0 = point0.get(), p1 = point1.get(), p2 = point2.get();
return (p1.x - p0.x)*(p2.y - p0.y) - (p2.x - p0.x)*(p1.y - p0.y);
} | identifier_body |
client.go | package ocppj
import (
"fmt"
"gopkg.in/go-playground/validator.v9"
"github.com/lorenzodonini/ocpp-go/ocpp"
"github.com/lorenzodonini/ocpp-go/ws"
)
// The endpoint initiating the connection to an OCPP server, in an OCPP-J topology.
// During message exchange, the two roles may be reversed (depending on the message direction), but a client struct remains associated to a charge point/charging station.
type Client struct {
Endpoint
client ws.WsClient
Id string
requestHandler func(request ocpp.Request, requestId string, action string)
responseHandler func(response ocpp.Response, requestId string)
errorHandler func(err *ocpp.Error, details interface{})
onDisconnectedHandler func(err error)
onReconnectedHandler func()
invalidMessageHook func(err *ocpp.Error, rawMessage string, parsedFields []interface{}) *ocpp.Error
dispatcher ClientDispatcher
RequestState ClientState
}
// Creates a new Client endpoint.
// Requires a unique client ID, a websocket client, a struct for queueing/dispatching requests,
// a state handler and a list of supported profiles (optional).
//
// You may create a simple new server by using these default values:
//
// s := ocppj.NewClient(ws.NewClient(), nil, nil)
//
// The wsClient parameter cannot be nil. Refer to the ws package for information on how to create and
// customize a websocket client.
func NewClient(id string, wsClient ws.WsClient, dispatcher ClientDispatcher, stateHandler ClientState, profiles ...*ocpp.Profile) *Client {
endpoint := Endpoint{}
if wsClient == nil {
panic("wsClient parameter cannot be nil")
}
for _, profile := range profiles {
endpoint.AddProfile(profile)
}
if dispatcher == nil {
dispatcher = NewDefaultClientDispatcher(NewFIFOClientQueue(10))
}
if stateHandler == nil {
stateHandler = NewClientState()
}
dispatcher.SetNetworkClient(wsClient)
dispatcher.SetPendingRequestState(stateHandler)
return &Client{Endpoint: endpoint, client: wsClient, Id: id, dispatcher: dispatcher, RequestState: stateHandler}
}
// Registers a handler for incoming requests.
func (c *Client) SetRequestHandler(handler func(request ocpp.Request, requestId string, action string)) {
c.requestHandler = handler
}
// Registers a handler for incoming responses.
func (c *Client) SetResponseHandler(handler func(response ocpp.Response, requestId string)) {
c.responseHandler = handler
}
// Registers a handler for incoming error messages.
func (c *Client) SetErrorHandler(handler func(err *ocpp.Error, details interface{})) {
c.errorHandler = handler
}
// SetInvalidMessageHook registers an optional hook for incoming messages that couldn't be parsed.
// This hook is called when a message is received but cannot be parsed to the target OCPP message struct.
//
// The application is notified synchronously of the error.
// The callback provides the raw JSON string, along with the parsed fields.
// The application MUST return as soon as possible, since the hook is called synchronously and awaits a return value.
//
// While the hook does not allow responding to the message directly,
// the return value will be used to send an OCPP error to the other endpoint.
//
// If no handler is registered (or no error is returned by the hook),
// the internal error message is sent to the client without further processing.
//
// Note: Failing to return from the hook will cause the client to block indefinitely.
func (c *Client) SetInvalidMessageHook(hook func(err *ocpp.Error, rawMessage string, parsedFields []interface{}) *ocpp.Error) {
c.invalidMessageHook = hook
}
func (c *Client) SetOnDisconnectedHandler(handler func(err error)) {
c.onDisconnectedHandler = handler
}
func (c *Client) SetOnReconnectedHandler(handler func()) {
c.onReconnectedHandler = handler
}
// Registers the handler to be called on timeout.
func (c *Client) SetOnRequestCanceled(handler func(requestId string, request ocpp.Request, err *ocpp.Error)) {
c.dispatcher.SetOnRequestCanceled(handler)
}
// Connects to the given serverURL and starts running the I/O loop for the underlying connection. | // The read/write routines are run on dedicated goroutines, so the main thread can perform other operations.
//
// In case of disconnection, the client handles re-connection automatically.
// The client will attempt to re-connect to the server forever, until it is stopped by invoking the Stop method.
//
// An error may be returned, if establishing the connection failed.
func (c *Client) Start(serverURL string) error {
// Set internal message handler
c.client.SetMessageHandler(c.ocppMessageHandler)
c.client.SetDisconnectedHandler(c.onDisconnected)
c.client.SetReconnectedHandler(c.onReconnected)
// Connect & run
fullUrl := fmt.Sprintf("%v/%v", serverURL, c.Id)
err := c.client.Start(fullUrl)
if err == nil {
c.dispatcher.Start()
}
return err
}
// Stops the client.
// The underlying I/O loop is stopped and all pending requests are cleared.
func (c *Client) Stop() {
// Overwrite handler to intercept disconnected signal
cleanupC := make(chan struct{}, 1)
if c.IsConnected() {
c.client.SetDisconnectedHandler(func(err error) {
cleanupC <- struct{}{}
})
} else {
close(cleanupC)
}
c.client.Stop()
if c.dispatcher.IsRunning() {
c.dispatcher.Stop()
}
// Wait for websocket to be cleaned up
<-cleanupC
}
func (c *Client) IsConnected() bool {
return c.client.IsConnected()
}
// Sends an OCPP Request to the server.
// The protocol is based on request-response and cannot send multiple messages concurrently.
// To guarantee this, outgoing messages are added to a queue and processed sequentially.
//
// Returns an error in the following cases:
//
// - the client wasn't started
//
// - message validation fails (request is malformed)
//
// - the endpoint doesn't support the feature
//
// - the output queue is full
func (c *Client) SendRequest(request ocpp.Request) error {
if !c.dispatcher.IsRunning() {
return fmt.Errorf("ocppj client is not started, couldn't send request")
}
call, err := c.CreateCall(request)
if err != nil {
return err
}
jsonMessage, err := call.MarshalJSON()
if err != nil {
return err
}
// Message will be processed by dispatcher. A dedicated mechanism allows to delegate the message queue handling.
if err = c.dispatcher.SendRequest(RequestBundle{Call: call, Data: jsonMessage}); err != nil {
log.Errorf("error dispatching request [%s, %s]: %v", call.UniqueId, call.Action, err)
return err
}
log.Debugf("enqueued CALL [%s, %s]", call.UniqueId, call.Action)
return nil
}
// Sends an OCPP Response to the server.
// The requestID parameter is required and identifies the previously received request.
//
// Returns an error in the following cases:
//
// - message validation fails (response is malformed)
//
// - the endpoint doesn't support the feature
//
// - a network error occurred
func (c *Client) SendResponse(requestId string, response ocpp.Response) error {
callResult, err := c.CreateCallResult(response, requestId)
if err != nil {
return err
}
jsonMessage, err := callResult.MarshalJSON()
if err != nil {
return ocpp.NewError(GenericError, err.Error(), requestId)
}
if err = c.client.Write(jsonMessage); err != nil {
log.Errorf("error sending response [%s]: %v", callResult.GetUniqueId(), err)
return ocpp.NewError(GenericError, err.Error(), requestId)
}
log.Debugf("sent CALL RESULT [%s]", callResult.GetUniqueId())
log.Debugf("sent JSON message to server: %s", string(jsonMessage))
return nil
}
// Sends an OCPP Error to the server.
// The requestID parameter is required and identifies the previously received request.
//
// Returns an error in the following cases:
//
// - message validation fails (error is malformed)
//
// - a network error occurred
func (c *Client) SendError(requestId string, errorCode ocpp.ErrorCode, description string, details interface{}) error {
callError, err := c.CreateCallError(requestId, errorCode, description, details)
if err != nil {
return err
}
jsonMessage, err := callError.MarshalJSON()
if err != nil {
return ocpp.NewError(GenericError, err.Error(), requestId)
}
if err = c.client.Write(jsonMessage); err != nil {
log.Errorf("error sending response error [%s]: %v", callError.UniqueId, err)
return ocpp.NewError(GenericError, err.Error(), requestId)
}
log.Debugf("sent CALL ERROR [%s]", callError.UniqueId)
log.Debugf("sent JSON message to server: %s", string(jsonMessage))
return nil
}
func (c *Client) ocppMessageHandler(data []byte) error {
parsedJson, err := ParseRawJsonMessage(data)
if err != nil {
log.Error(err)
return err
}
log.Debugf("received JSON message from server: %s", string(data))
message, err := c.ParseMessage(parsedJson, c.RequestState)
if err != nil {
ocppErr := err.(*ocpp.Error)
messageID := ocppErr.MessageId
// Support ad-hoc callback for invalid message handling
if c.invalidMessageHook != nil {
err2 := c.invalidMessageHook(ocppErr, string(data), parsedJson)
// If the hook returns an error, use it as output error. If not, use the original error.
if err2 != nil {
ocppErr = err2
ocppErr.MessageId = messageID
}
}
err = ocppErr
// Send error to other endpoint if a message ID is available
if ocppErr.MessageId != "" {
err2 := c.SendError(ocppErr.MessageId, ocppErr.Code, ocppErr.Description, nil)
if err2 != nil {
return err2
}
}
log.Error(err)
return err
}
if message != nil {
switch message.GetMessageTypeId() {
case CALL:
call := message.(*Call)
log.Debugf("handling incoming CALL [%s, %s]", call.UniqueId, call.Action)
c.requestHandler(call.Payload, call.UniqueId, call.Action)
case CALL_RESULT:
callResult := message.(*CallResult)
log.Debugf("handling incoming CALL RESULT [%s]", callResult.UniqueId)
c.dispatcher.CompleteRequest(callResult.GetUniqueId()) // Remove current request from queue and send next one
if c.responseHandler != nil {
c.responseHandler(callResult.Payload, callResult.UniqueId)
}
case CALL_ERROR:
callError := message.(*CallError)
log.Debugf("handling incoming CALL ERROR [%s]", callError.UniqueId)
c.dispatcher.CompleteRequest(callError.GetUniqueId()) // Remove current request from queue and send next one
if c.errorHandler != nil {
c.errorHandler(ocpp.NewError(callError.ErrorCode, callError.ErrorDescription, callError.UniqueId), callError.ErrorDetails)
}
}
}
return nil
}
// HandleFailedResponseError allows to handle failures while sending responses (either CALL_RESULT or CALL_ERROR).
// It internally analyzes and creates an ocpp.Error based on the given error.
// It will the attempt to send it to the server.
//
// The function helps to prevent starvation on the other endpoint, which is caused by a response never reaching it.
// The method will, however, only attempt to send a default error once.
// If this operation fails, the other endpoint may still starve.
func (c *Client) HandleFailedResponseError(requestID string, err error, featureName string) {
log.Debugf("handling error for failed response [%s]", requestID)
var responseErr *ocpp.Error
// There's several possible errors: invalid profile, invalid payload or send error
switch err.(type) {
case validator.ValidationErrors:
// Validation error
validationErr := err.(validator.ValidationErrors)
responseErr = errorFromValidation(validationErr, requestID, featureName)
case *ocpp.Error:
// Internal OCPP error
responseErr = err.(*ocpp.Error)
case error:
// Unknown error
responseErr = ocpp.NewError(GenericError, err.Error(), requestID)
}
// Send an OCPP error to the target, since no regular response could be sent
_ = c.SendError(requestID, responseErr.Code, responseErr.Description, nil)
}
func (c *Client) onDisconnected(err error) {
log.Error("disconnected from server", err)
c.dispatcher.Pause()
if c.onDisconnectedHandler != nil {
c.onDisconnectedHandler(err)
}
}
func (c *Client) onReconnected() {
if c.onReconnectedHandler != nil {
c.onReconnectedHandler()
}
c.dispatcher.Resume()
} | //
// If the connection is established successfully, the function returns control to the caller immediately. | random_line_split |
client.go | package ocppj
import (
"fmt"
"gopkg.in/go-playground/validator.v9"
"github.com/lorenzodonini/ocpp-go/ocpp"
"github.com/lorenzodonini/ocpp-go/ws"
)
// The endpoint initiating the connection to an OCPP server, in an OCPP-J topology.
// During message exchange, the two roles may be reversed (depending on the message direction), but a client struct remains associated to a charge point/charging station.
type Client struct {
Endpoint
client ws.WsClient
Id string
requestHandler func(request ocpp.Request, requestId string, action string)
responseHandler func(response ocpp.Response, requestId string)
errorHandler func(err *ocpp.Error, details interface{})
onDisconnectedHandler func(err error)
onReconnectedHandler func()
invalidMessageHook func(err *ocpp.Error, rawMessage string, parsedFields []interface{}) *ocpp.Error
dispatcher ClientDispatcher
RequestState ClientState
}
// Creates a new Client endpoint.
// Requires a unique client ID, a websocket client, a struct for queueing/dispatching requests,
// a state handler and a list of supported profiles (optional).
//
// You may create a simple new server by using these default values:
//
// s := ocppj.NewClient(ws.NewClient(), nil, nil)
//
// The wsClient parameter cannot be nil. Refer to the ws package for information on how to create and
// customize a websocket client.
func NewClient(id string, wsClient ws.WsClient, dispatcher ClientDispatcher, stateHandler ClientState, profiles ...*ocpp.Profile) *Client {
endpoint := Endpoint{}
if wsClient == nil {
panic("wsClient parameter cannot be nil")
}
for _, profile := range profiles {
endpoint.AddProfile(profile)
}
if dispatcher == nil {
dispatcher = NewDefaultClientDispatcher(NewFIFOClientQueue(10))
}
if stateHandler == nil {
stateHandler = NewClientState()
}
dispatcher.SetNetworkClient(wsClient)
dispatcher.SetPendingRequestState(stateHandler)
return &Client{Endpoint: endpoint, client: wsClient, Id: id, dispatcher: dispatcher, RequestState: stateHandler}
}
// Registers a handler for incoming requests.
func (c *Client) SetRequestHandler(handler func(request ocpp.Request, requestId string, action string)) {
c.requestHandler = handler
}
// Registers a handler for incoming responses.
func (c *Client) SetResponseHandler(handler func(response ocpp.Response, requestId string)) {
c.responseHandler = handler
}
// Registers a handler for incoming error messages.
func (c *Client) SetErrorHandler(handler func(err *ocpp.Error, details interface{})) {
c.errorHandler = handler
}
// SetInvalidMessageHook registers an optional hook for incoming messages that couldn't be parsed.
// This hook is called when a message is received but cannot be parsed to the target OCPP message struct.
//
// The application is notified synchronously of the error.
// The callback provides the raw JSON string, along with the parsed fields.
// The application MUST return as soon as possible, since the hook is called synchronously and awaits a return value.
//
// While the hook does not allow responding to the message directly,
// the return value will be used to send an OCPP error to the other endpoint.
//
// If no handler is registered (or no error is returned by the hook),
// the internal error message is sent to the client without further processing.
//
// Note: Failing to return from the hook will cause the client to block indefinitely.
func (c *Client) SetInvalidMessageHook(hook func(err *ocpp.Error, rawMessage string, parsedFields []interface{}) *ocpp.Error) {
c.invalidMessageHook = hook
}
func (c *Client) SetOnDisconnectedHandler(handler func(err error)) {
c.onDisconnectedHandler = handler
}
func (c *Client) SetOnReconnectedHandler(handler func()) {
c.onReconnectedHandler = handler
}
// Registers the handler to be called on timeout.
func (c *Client) SetOnRequestCanceled(handler func(requestId string, request ocpp.Request, err *ocpp.Error)) {
c.dispatcher.SetOnRequestCanceled(handler)
}
// Connects to the given serverURL and starts running the I/O loop for the underlying connection.
//
// If the connection is established successfully, the function returns control to the caller immediately.
// The read/write routines are run on dedicated goroutines, so the main thread can perform other operations.
//
// In case of disconnection, the client handles re-connection automatically.
// The client will attempt to re-connect to the server forever, until it is stopped by invoking the Stop method.
//
// An error may be returned, if establishing the connection failed.
func (c *Client) Start(serverURL string) error {
// Set internal message handler
c.client.SetMessageHandler(c.ocppMessageHandler)
c.client.SetDisconnectedHandler(c.onDisconnected)
c.client.SetReconnectedHandler(c.onReconnected)
// Connect & run
fullUrl := fmt.Sprintf("%v/%v", serverURL, c.Id)
err := c.client.Start(fullUrl)
if err == nil {
c.dispatcher.Start()
}
return err
}
// Stops the client.
// The underlying I/O loop is stopped and all pending requests are cleared.
func (c *Client) Stop() {
// Overwrite handler to intercept disconnected signal
cleanupC := make(chan struct{}, 1)
if c.IsConnected() {
c.client.SetDisconnectedHandler(func(err error) {
cleanupC <- struct{}{}
})
} else {
close(cleanupC)
}
c.client.Stop()
if c.dispatcher.IsRunning() {
c.dispatcher.Stop()
}
// Wait for websocket to be cleaned up
<-cleanupC
}
func (c *Client) IsConnected() bool {
return c.client.IsConnected()
}
// Sends an OCPP Request to the server.
// The protocol is based on request-response and cannot send multiple messages concurrently.
// To guarantee this, outgoing messages are added to a queue and processed sequentially.
//
// Returns an error in the following cases:
//
// - the client wasn't started
//
// - message validation fails (request is malformed)
//
// - the endpoint doesn't support the feature
//
// - the output queue is full
func (c *Client) SendRequest(request ocpp.Request) error {
if !c.dispatcher.IsRunning() {
return fmt.Errorf("ocppj client is not started, couldn't send request")
}
call, err := c.CreateCall(request)
if err != nil {
return err
}
jsonMessage, err := call.MarshalJSON()
if err != nil {
return err
}
// Message will be processed by dispatcher. A dedicated mechanism allows to delegate the message queue handling.
if err = c.dispatcher.SendRequest(RequestBundle{Call: call, Data: jsonMessage}); err != nil {
log.Errorf("error dispatching request [%s, %s]: %v", call.UniqueId, call.Action, err)
return err
}
log.Debugf("enqueued CALL [%s, %s]", call.UniqueId, call.Action)
return nil
}
// Sends an OCPP Response to the server.
// The requestID parameter is required and identifies the previously received request.
//
// Returns an error in the following cases:
//
// - message validation fails (response is malformed)
//
// - the endpoint doesn't support the feature
//
// - a network error occurred
func (c *Client) SendResponse(requestId string, response ocpp.Response) error {
callResult, err := c.CreateCallResult(response, requestId)
if err != nil {
return err
}
jsonMessage, err := callResult.MarshalJSON()
if err != nil {
return ocpp.NewError(GenericError, err.Error(), requestId)
}
if err = c.client.Write(jsonMessage); err != nil {
log.Errorf("error sending response [%s]: %v", callResult.GetUniqueId(), err)
return ocpp.NewError(GenericError, err.Error(), requestId)
}
log.Debugf("sent CALL RESULT [%s]", callResult.GetUniqueId())
log.Debugf("sent JSON message to server: %s", string(jsonMessage))
return nil
}
// Sends an OCPP Error to the server.
// The requestID parameter is required and identifies the previously received request.
//
// Returns an error in the following cases:
//
// - message validation fails (error is malformed)
//
// - a network error occurred
func (c *Client) SendError(requestId string, errorCode ocpp.ErrorCode, description string, details interface{}) error {
callError, err := c.CreateCallError(requestId, errorCode, description, details)
if err != nil {
return err
}
jsonMessage, err := callError.MarshalJSON()
if err != nil {
return ocpp.NewError(GenericError, err.Error(), requestId)
}
if err = c.client.Write(jsonMessage); err != nil {
log.Errorf("error sending response error [%s]: %v", callError.UniqueId, err)
return ocpp.NewError(GenericError, err.Error(), requestId)
}
log.Debugf("sent CALL ERROR [%s]", callError.UniqueId)
log.Debugf("sent JSON message to server: %s", string(jsonMessage))
return nil
}
func (c *Client) ocppMessageHandler(data []byte) error |
// HandleFailedResponseError allows to handle failures while sending responses (either CALL_RESULT or CALL_ERROR).
// It internally analyzes and creates an ocpp.Error based on the given error.
// It will the attempt to send it to the server.
//
// The function helps to prevent starvation on the other endpoint, which is caused by a response never reaching it.
// The method will, however, only attempt to send a default error once.
// If this operation fails, the other endpoint may still starve.
func (c *Client) HandleFailedResponseError(requestID string, err error, featureName string) {
log.Debugf("handling error for failed response [%s]", requestID)
var responseErr *ocpp.Error
// There's several possible errors: invalid profile, invalid payload or send error
switch err.(type) {
case validator.ValidationErrors:
// Validation error
validationErr := err.(validator.ValidationErrors)
responseErr = errorFromValidation(validationErr, requestID, featureName)
case *ocpp.Error:
// Internal OCPP error
responseErr = err.(*ocpp.Error)
case error:
// Unknown error
responseErr = ocpp.NewError(GenericError, err.Error(), requestID)
}
// Send an OCPP error to the target, since no regular response could be sent
_ = c.SendError(requestID, responseErr.Code, responseErr.Description, nil)
}
func (c *Client) onDisconnected(err error) {
log.Error("disconnected from server", err)
c.dispatcher.Pause()
if c.onDisconnectedHandler != nil {
c.onDisconnectedHandler(err)
}
}
func (c *Client) onReconnected() {
if c.onReconnectedHandler != nil {
c.onReconnectedHandler()
}
c.dispatcher.Resume()
}
| {
parsedJson, err := ParseRawJsonMessage(data)
if err != nil {
log.Error(err)
return err
}
log.Debugf("received JSON message from server: %s", string(data))
message, err := c.ParseMessage(parsedJson, c.RequestState)
if err != nil {
ocppErr := err.(*ocpp.Error)
messageID := ocppErr.MessageId
// Support ad-hoc callback for invalid message handling
if c.invalidMessageHook != nil {
err2 := c.invalidMessageHook(ocppErr, string(data), parsedJson)
// If the hook returns an error, use it as output error. If not, use the original error.
if err2 != nil {
ocppErr = err2
ocppErr.MessageId = messageID
}
}
err = ocppErr
// Send error to other endpoint if a message ID is available
if ocppErr.MessageId != "" {
err2 := c.SendError(ocppErr.MessageId, ocppErr.Code, ocppErr.Description, nil)
if err2 != nil {
return err2
}
}
log.Error(err)
return err
}
if message != nil {
switch message.GetMessageTypeId() {
case CALL:
call := message.(*Call)
log.Debugf("handling incoming CALL [%s, %s]", call.UniqueId, call.Action)
c.requestHandler(call.Payload, call.UniqueId, call.Action)
case CALL_RESULT:
callResult := message.(*CallResult)
log.Debugf("handling incoming CALL RESULT [%s]", callResult.UniqueId)
c.dispatcher.CompleteRequest(callResult.GetUniqueId()) // Remove current request from queue and send next one
if c.responseHandler != nil {
c.responseHandler(callResult.Payload, callResult.UniqueId)
}
case CALL_ERROR:
callError := message.(*CallError)
log.Debugf("handling incoming CALL ERROR [%s]", callError.UniqueId)
c.dispatcher.CompleteRequest(callError.GetUniqueId()) // Remove current request from queue and send next one
if c.errorHandler != nil {
c.errorHandler(ocpp.NewError(callError.ErrorCode, callError.ErrorDescription, callError.UniqueId), callError.ErrorDetails)
}
}
}
return nil
} | identifier_body |
client.go | package ocppj
import (
"fmt"
"gopkg.in/go-playground/validator.v9"
"github.com/lorenzodonini/ocpp-go/ocpp"
"github.com/lorenzodonini/ocpp-go/ws"
)
// The endpoint initiating the connection to an OCPP server, in an OCPP-J topology.
// During message exchange, the two roles may be reversed (depending on the message direction), but a client struct remains associated to a charge point/charging station.
type Client struct {
Endpoint
client ws.WsClient
Id string
requestHandler func(request ocpp.Request, requestId string, action string)
responseHandler func(response ocpp.Response, requestId string)
errorHandler func(err *ocpp.Error, details interface{})
onDisconnectedHandler func(err error)
onReconnectedHandler func()
invalidMessageHook func(err *ocpp.Error, rawMessage string, parsedFields []interface{}) *ocpp.Error
dispatcher ClientDispatcher
RequestState ClientState
}
// Creates a new Client endpoint.
// Requires a unique client ID, a websocket client, a struct for queueing/dispatching requests,
// a state handler and a list of supported profiles (optional).
//
// You may create a simple new server by using these default values:
//
// s := ocppj.NewClient(ws.NewClient(), nil, nil)
//
// The wsClient parameter cannot be nil. Refer to the ws package for information on how to create and
// customize a websocket client.
func NewClient(id string, wsClient ws.WsClient, dispatcher ClientDispatcher, stateHandler ClientState, profiles ...*ocpp.Profile) *Client {
endpoint := Endpoint{}
if wsClient == nil {
panic("wsClient parameter cannot be nil")
}
for _, profile := range profiles {
endpoint.AddProfile(profile)
}
if dispatcher == nil {
dispatcher = NewDefaultClientDispatcher(NewFIFOClientQueue(10))
}
if stateHandler == nil {
stateHandler = NewClientState()
}
dispatcher.SetNetworkClient(wsClient)
dispatcher.SetPendingRequestState(stateHandler)
return &Client{Endpoint: endpoint, client: wsClient, Id: id, dispatcher: dispatcher, RequestState: stateHandler}
}
// Registers a handler for incoming requests.
func (c *Client) SetRequestHandler(handler func(request ocpp.Request, requestId string, action string)) {
c.requestHandler = handler
}
// Registers a handler for incoming responses.
func (c *Client) SetResponseHandler(handler func(response ocpp.Response, requestId string)) {
c.responseHandler = handler
}
// Registers a handler for incoming error messages.
func (c *Client) SetErrorHandler(handler func(err *ocpp.Error, details interface{})) {
c.errorHandler = handler
}
// SetInvalidMessageHook registers an optional hook for incoming messages that couldn't be parsed.
// This hook is called when a message is received but cannot be parsed to the target OCPP message struct.
//
// The application is notified synchronously of the error.
// The callback provides the raw JSON string, along with the parsed fields.
// The application MUST return as soon as possible, since the hook is called synchronously and awaits a return value.
//
// While the hook does not allow responding to the message directly,
// the return value will be used to send an OCPP error to the other endpoint.
//
// If no handler is registered (or no error is returned by the hook),
// the internal error message is sent to the client without further processing.
//
// Note: Failing to return from the hook will cause the client to block indefinitely.
func (c *Client) SetInvalidMessageHook(hook func(err *ocpp.Error, rawMessage string, parsedFields []interface{}) *ocpp.Error) {
c.invalidMessageHook = hook
}
func (c *Client) SetOnDisconnectedHandler(handler func(err error)) {
c.onDisconnectedHandler = handler
}
func (c *Client) SetOnReconnectedHandler(handler func()) {
c.onReconnectedHandler = handler
}
// Registers the handler to be called on timeout.
func (c *Client) SetOnRequestCanceled(handler func(requestId string, request ocpp.Request, err *ocpp.Error)) {
c.dispatcher.SetOnRequestCanceled(handler)
}
// Connects to the given serverURL and starts running the I/O loop for the underlying connection.
//
// If the connection is established successfully, the function returns control to the caller immediately.
// The read/write routines are run on dedicated goroutines, so the main thread can perform other operations.
//
// In case of disconnection, the client handles re-connection automatically.
// The client will attempt to re-connect to the server forever, until it is stopped by invoking the Stop method.
//
// An error may be returned, if establishing the connection failed.
func (c *Client) Start(serverURL string) error {
// Set internal message handler
c.client.SetMessageHandler(c.ocppMessageHandler)
c.client.SetDisconnectedHandler(c.onDisconnected)
c.client.SetReconnectedHandler(c.onReconnected)
// Connect & run
fullUrl := fmt.Sprintf("%v/%v", serverURL, c.Id)
err := c.client.Start(fullUrl)
if err == nil {
c.dispatcher.Start()
}
return err
}
// Stops the client.
// The underlying I/O loop is stopped and all pending requests are cleared.
func (c *Client) Stop() {
// Overwrite handler to intercept disconnected signal
cleanupC := make(chan struct{}, 1)
if c.IsConnected() {
c.client.SetDisconnectedHandler(func(err error) {
cleanupC <- struct{}{}
})
} else {
close(cleanupC)
}
c.client.Stop()
if c.dispatcher.IsRunning() {
c.dispatcher.Stop()
}
// Wait for websocket to be cleaned up
<-cleanupC
}
func (c *Client) IsConnected() bool {
return c.client.IsConnected()
}
// Sends an OCPP Request to the server.
// The protocol is based on request-response and cannot send multiple messages concurrently.
// To guarantee this, outgoing messages are added to a queue and processed sequentially.
//
// Returns an error in the following cases:
//
// - the client wasn't started
//
// - message validation fails (request is malformed)
//
// - the endpoint doesn't support the feature
//
// - the output queue is full
func (c *Client) SendRequest(request ocpp.Request) error {
if !c.dispatcher.IsRunning() {
return fmt.Errorf("ocppj client is not started, couldn't send request")
}
call, err := c.CreateCall(request)
if err != nil {
return err
}
jsonMessage, err := call.MarshalJSON()
if err != nil {
return err
}
// Message will be processed by dispatcher. A dedicated mechanism allows to delegate the message queue handling.
if err = c.dispatcher.SendRequest(RequestBundle{Call: call, Data: jsonMessage}); err != nil {
log.Errorf("error dispatching request [%s, %s]: %v", call.UniqueId, call.Action, err)
return err
}
log.Debugf("enqueued CALL [%s, %s]", call.UniqueId, call.Action)
return nil
}
// Sends an OCPP Response to the server.
// The requestID parameter is required and identifies the previously received request.
//
// Returns an error in the following cases:
//
// - message validation fails (response is malformed)
//
// - the endpoint doesn't support the feature
//
// - a network error occurred
func (c *Client) SendResponse(requestId string, response ocpp.Response) error {
callResult, err := c.CreateCallResult(response, requestId)
if err != nil {
return err
}
jsonMessage, err := callResult.MarshalJSON()
if err != nil {
return ocpp.NewError(GenericError, err.Error(), requestId)
}
if err = c.client.Write(jsonMessage); err != nil {
log.Errorf("error sending response [%s]: %v", callResult.GetUniqueId(), err)
return ocpp.NewError(GenericError, err.Error(), requestId)
}
log.Debugf("sent CALL RESULT [%s]", callResult.GetUniqueId())
log.Debugf("sent JSON message to server: %s", string(jsonMessage))
return nil
}
// Sends an OCPP Error to the server.
// The requestID parameter is required and identifies the previously received request.
//
// Returns an error in the following cases:
//
// - message validation fails (error is malformed)
//
// - a network error occurred
func (c *Client) SendError(requestId string, errorCode ocpp.ErrorCode, description string, details interface{}) error {
callError, err := c.CreateCallError(requestId, errorCode, description, details)
if err != nil |
jsonMessage, err := callError.MarshalJSON()
if err != nil {
return ocpp.NewError(GenericError, err.Error(), requestId)
}
if err = c.client.Write(jsonMessage); err != nil {
log.Errorf("error sending response error [%s]: %v", callError.UniqueId, err)
return ocpp.NewError(GenericError, err.Error(), requestId)
}
log.Debugf("sent CALL ERROR [%s]", callError.UniqueId)
log.Debugf("sent JSON message to server: %s", string(jsonMessage))
return nil
}
func (c *Client) ocppMessageHandler(data []byte) error {
parsedJson, err := ParseRawJsonMessage(data)
if err != nil {
log.Error(err)
return err
}
log.Debugf("received JSON message from server: %s", string(data))
message, err := c.ParseMessage(parsedJson, c.RequestState)
if err != nil {
ocppErr := err.(*ocpp.Error)
messageID := ocppErr.MessageId
// Support ad-hoc callback for invalid message handling
if c.invalidMessageHook != nil {
err2 := c.invalidMessageHook(ocppErr, string(data), parsedJson)
// If the hook returns an error, use it as output error. If not, use the original error.
if err2 != nil {
ocppErr = err2
ocppErr.MessageId = messageID
}
}
err = ocppErr
// Send error to other endpoint if a message ID is available
if ocppErr.MessageId != "" {
err2 := c.SendError(ocppErr.MessageId, ocppErr.Code, ocppErr.Description, nil)
if err2 != nil {
return err2
}
}
log.Error(err)
return err
}
if message != nil {
switch message.GetMessageTypeId() {
case CALL:
call := message.(*Call)
log.Debugf("handling incoming CALL [%s, %s]", call.UniqueId, call.Action)
c.requestHandler(call.Payload, call.UniqueId, call.Action)
case CALL_RESULT:
callResult := message.(*CallResult)
log.Debugf("handling incoming CALL RESULT [%s]", callResult.UniqueId)
c.dispatcher.CompleteRequest(callResult.GetUniqueId()) // Remove current request from queue and send next one
if c.responseHandler != nil {
c.responseHandler(callResult.Payload, callResult.UniqueId)
}
case CALL_ERROR:
callError := message.(*CallError)
log.Debugf("handling incoming CALL ERROR [%s]", callError.UniqueId)
c.dispatcher.CompleteRequest(callError.GetUniqueId()) // Remove current request from queue and send next one
if c.errorHandler != nil {
c.errorHandler(ocpp.NewError(callError.ErrorCode, callError.ErrorDescription, callError.UniqueId), callError.ErrorDetails)
}
}
}
return nil
}
// HandleFailedResponseError allows to handle failures while sending responses (either CALL_RESULT or CALL_ERROR).
// It internally analyzes and creates an ocpp.Error based on the given error.
// It will the attempt to send it to the server.
//
// The function helps to prevent starvation on the other endpoint, which is caused by a response never reaching it.
// The method will, however, only attempt to send a default error once.
// If this operation fails, the other endpoint may still starve.
func (c *Client) HandleFailedResponseError(requestID string, err error, featureName string) {
log.Debugf("handling error for failed response [%s]", requestID)
var responseErr *ocpp.Error
// There's several possible errors: invalid profile, invalid payload or send error
switch err.(type) {
case validator.ValidationErrors:
// Validation error
validationErr := err.(validator.ValidationErrors)
responseErr = errorFromValidation(validationErr, requestID, featureName)
case *ocpp.Error:
// Internal OCPP error
responseErr = err.(*ocpp.Error)
case error:
// Unknown error
responseErr = ocpp.NewError(GenericError, err.Error(), requestID)
}
// Send an OCPP error to the target, since no regular response could be sent
_ = c.SendError(requestID, responseErr.Code, responseErr.Description, nil)
}
func (c *Client) onDisconnected(err error) {
log.Error("disconnected from server", err)
c.dispatcher.Pause()
if c.onDisconnectedHandler != nil {
c.onDisconnectedHandler(err)
}
}
func (c *Client) onReconnected() {
if c.onReconnectedHandler != nil {
c.onReconnectedHandler()
}
c.dispatcher.Resume()
}
| {
return err
} | conditional_block |
client.go | package ocppj
import (
"fmt"
"gopkg.in/go-playground/validator.v9"
"github.com/lorenzodonini/ocpp-go/ocpp"
"github.com/lorenzodonini/ocpp-go/ws"
)
// The endpoint initiating the connection to an OCPP server, in an OCPP-J topology.
// During message exchange, the two roles may be reversed (depending on the message direction), but a client struct remains associated to a charge point/charging station.
type Client struct {
Endpoint
client ws.WsClient
Id string
requestHandler func(request ocpp.Request, requestId string, action string)
responseHandler func(response ocpp.Response, requestId string)
errorHandler func(err *ocpp.Error, details interface{})
onDisconnectedHandler func(err error)
onReconnectedHandler func()
invalidMessageHook func(err *ocpp.Error, rawMessage string, parsedFields []interface{}) *ocpp.Error
dispatcher ClientDispatcher
RequestState ClientState
}
// Creates a new Client endpoint.
// Requires a unique client ID, a websocket client, a struct for queueing/dispatching requests,
// a state handler and a list of supported profiles (optional).
//
// You may create a simple new server by using these default values:
//
// s := ocppj.NewClient(ws.NewClient(), nil, nil)
//
// The wsClient parameter cannot be nil. Refer to the ws package for information on how to create and
// customize a websocket client.
func NewClient(id string, wsClient ws.WsClient, dispatcher ClientDispatcher, stateHandler ClientState, profiles ...*ocpp.Profile) *Client {
endpoint := Endpoint{}
if wsClient == nil {
panic("wsClient parameter cannot be nil")
}
for _, profile := range profiles {
endpoint.AddProfile(profile)
}
if dispatcher == nil {
dispatcher = NewDefaultClientDispatcher(NewFIFOClientQueue(10))
}
if stateHandler == nil {
stateHandler = NewClientState()
}
dispatcher.SetNetworkClient(wsClient)
dispatcher.SetPendingRequestState(stateHandler)
return &Client{Endpoint: endpoint, client: wsClient, Id: id, dispatcher: dispatcher, RequestState: stateHandler}
}
// Registers a handler for incoming requests.
func (c *Client) SetRequestHandler(handler func(request ocpp.Request, requestId string, action string)) {
c.requestHandler = handler
}
// Registers a handler for incoming responses.
func (c *Client) | (handler func(response ocpp.Response, requestId string)) {
c.responseHandler = handler
}
// Registers a handler for incoming error messages.
func (c *Client) SetErrorHandler(handler func(err *ocpp.Error, details interface{})) {
c.errorHandler = handler
}
// SetInvalidMessageHook registers an optional hook for incoming messages that couldn't be parsed.
// This hook is called when a message is received but cannot be parsed to the target OCPP message struct.
//
// The application is notified synchronously of the error.
// The callback provides the raw JSON string, along with the parsed fields.
// The application MUST return as soon as possible, since the hook is called synchronously and awaits a return value.
//
// While the hook does not allow responding to the message directly,
// the return value will be used to send an OCPP error to the other endpoint.
//
// If no handler is registered (or no error is returned by the hook),
// the internal error message is sent to the client without further processing.
//
// Note: Failing to return from the hook will cause the client to block indefinitely.
func (c *Client) SetInvalidMessageHook(hook func(err *ocpp.Error, rawMessage string, parsedFields []interface{}) *ocpp.Error) {
c.invalidMessageHook = hook
}
func (c *Client) SetOnDisconnectedHandler(handler func(err error)) {
c.onDisconnectedHandler = handler
}
func (c *Client) SetOnReconnectedHandler(handler func()) {
c.onReconnectedHandler = handler
}
// Registers the handler to be called on timeout.
func (c *Client) SetOnRequestCanceled(handler func(requestId string, request ocpp.Request, err *ocpp.Error)) {
c.dispatcher.SetOnRequestCanceled(handler)
}
// Connects to the given serverURL and starts running the I/O loop for the underlying connection.
//
// If the connection is established successfully, the function returns control to the caller immediately.
// The read/write routines are run on dedicated goroutines, so the main thread can perform other operations.
//
// In case of disconnection, the client handles re-connection automatically.
// The client will attempt to re-connect to the server forever, until it is stopped by invoking the Stop method.
//
// An error may be returned, if establishing the connection failed.
func (c *Client) Start(serverURL string) error {
// Set internal message handler
c.client.SetMessageHandler(c.ocppMessageHandler)
c.client.SetDisconnectedHandler(c.onDisconnected)
c.client.SetReconnectedHandler(c.onReconnected)
// Connect & run
fullUrl := fmt.Sprintf("%v/%v", serverURL, c.Id)
err := c.client.Start(fullUrl)
if err == nil {
c.dispatcher.Start()
}
return err
}
// Stops the client.
// The underlying I/O loop is stopped and all pending requests are cleared.
func (c *Client) Stop() {
// Overwrite handler to intercept disconnected signal
cleanupC := make(chan struct{}, 1)
if c.IsConnected() {
c.client.SetDisconnectedHandler(func(err error) {
cleanupC <- struct{}{}
})
} else {
close(cleanupC)
}
c.client.Stop()
if c.dispatcher.IsRunning() {
c.dispatcher.Stop()
}
// Wait for websocket to be cleaned up
<-cleanupC
}
func (c *Client) IsConnected() bool {
return c.client.IsConnected()
}
// Sends an OCPP Request to the server.
// The protocol is based on request-response and cannot send multiple messages concurrently.
// To guarantee this, outgoing messages are added to a queue and processed sequentially.
//
// Returns an error in the following cases:
//
// - the client wasn't started
//
// - message validation fails (request is malformed)
//
// - the endpoint doesn't support the feature
//
// - the output queue is full
func (c *Client) SendRequest(request ocpp.Request) error {
if !c.dispatcher.IsRunning() {
return fmt.Errorf("ocppj client is not started, couldn't send request")
}
call, err := c.CreateCall(request)
if err != nil {
return err
}
jsonMessage, err := call.MarshalJSON()
if err != nil {
return err
}
// Message will be processed by dispatcher. A dedicated mechanism allows to delegate the message queue handling.
if err = c.dispatcher.SendRequest(RequestBundle{Call: call, Data: jsonMessage}); err != nil {
log.Errorf("error dispatching request [%s, %s]: %v", call.UniqueId, call.Action, err)
return err
}
log.Debugf("enqueued CALL [%s, %s]", call.UniqueId, call.Action)
return nil
}
// Sends an OCPP Response to the server.
// The requestID parameter is required and identifies the previously received request.
//
// Returns an error in the following cases:
//
// - message validation fails (response is malformed)
//
// - the endpoint doesn't support the feature
//
// - a network error occurred
func (c *Client) SendResponse(requestId string, response ocpp.Response) error {
callResult, err := c.CreateCallResult(response, requestId)
if err != nil {
return err
}
jsonMessage, err := callResult.MarshalJSON()
if err != nil {
return ocpp.NewError(GenericError, err.Error(), requestId)
}
if err = c.client.Write(jsonMessage); err != nil {
log.Errorf("error sending response [%s]: %v", callResult.GetUniqueId(), err)
return ocpp.NewError(GenericError, err.Error(), requestId)
}
log.Debugf("sent CALL RESULT [%s]", callResult.GetUniqueId())
log.Debugf("sent JSON message to server: %s", string(jsonMessage))
return nil
}
// Sends an OCPP Error to the server.
// The requestID parameter is required and identifies the previously received request.
//
// Returns an error in the following cases:
//
// - message validation fails (error is malformed)
//
// - a network error occurred
func (c *Client) SendError(requestId string, errorCode ocpp.ErrorCode, description string, details interface{}) error {
callError, err := c.CreateCallError(requestId, errorCode, description, details)
if err != nil {
return err
}
jsonMessage, err := callError.MarshalJSON()
if err != nil {
return ocpp.NewError(GenericError, err.Error(), requestId)
}
if err = c.client.Write(jsonMessage); err != nil {
log.Errorf("error sending response error [%s]: %v", callError.UniqueId, err)
return ocpp.NewError(GenericError, err.Error(), requestId)
}
log.Debugf("sent CALL ERROR [%s]", callError.UniqueId)
log.Debugf("sent JSON message to server: %s", string(jsonMessage))
return nil
}
func (c *Client) ocppMessageHandler(data []byte) error {
parsedJson, err := ParseRawJsonMessage(data)
if err != nil {
log.Error(err)
return err
}
log.Debugf("received JSON message from server: %s", string(data))
message, err := c.ParseMessage(parsedJson, c.RequestState)
if err != nil {
ocppErr := err.(*ocpp.Error)
messageID := ocppErr.MessageId
// Support ad-hoc callback for invalid message handling
if c.invalidMessageHook != nil {
err2 := c.invalidMessageHook(ocppErr, string(data), parsedJson)
// If the hook returns an error, use it as output error. If not, use the original error.
if err2 != nil {
ocppErr = err2
ocppErr.MessageId = messageID
}
}
err = ocppErr
// Send error to other endpoint if a message ID is available
if ocppErr.MessageId != "" {
err2 := c.SendError(ocppErr.MessageId, ocppErr.Code, ocppErr.Description, nil)
if err2 != nil {
return err2
}
}
log.Error(err)
return err
}
if message != nil {
switch message.GetMessageTypeId() {
case CALL:
call := message.(*Call)
log.Debugf("handling incoming CALL [%s, %s]", call.UniqueId, call.Action)
c.requestHandler(call.Payload, call.UniqueId, call.Action)
case CALL_RESULT:
callResult := message.(*CallResult)
log.Debugf("handling incoming CALL RESULT [%s]", callResult.UniqueId)
c.dispatcher.CompleteRequest(callResult.GetUniqueId()) // Remove current request from queue and send next one
if c.responseHandler != nil {
c.responseHandler(callResult.Payload, callResult.UniqueId)
}
case CALL_ERROR:
callError := message.(*CallError)
log.Debugf("handling incoming CALL ERROR [%s]", callError.UniqueId)
c.dispatcher.CompleteRequest(callError.GetUniqueId()) // Remove current request from queue and send next one
if c.errorHandler != nil {
c.errorHandler(ocpp.NewError(callError.ErrorCode, callError.ErrorDescription, callError.UniqueId), callError.ErrorDetails)
}
}
}
return nil
}
// HandleFailedResponseError allows to handle failures while sending responses (either CALL_RESULT or CALL_ERROR).
// It internally analyzes and creates an ocpp.Error based on the given error.
// It will the attempt to send it to the server.
//
// The function helps to prevent starvation on the other endpoint, which is caused by a response never reaching it.
// The method will, however, only attempt to send a default error once.
// If this operation fails, the other endpoint may still starve.
func (c *Client) HandleFailedResponseError(requestID string, err error, featureName string) {
log.Debugf("handling error for failed response [%s]", requestID)
var responseErr *ocpp.Error
// There's several possible errors: invalid profile, invalid payload or send error
switch err.(type) {
case validator.ValidationErrors:
// Validation error
validationErr := err.(validator.ValidationErrors)
responseErr = errorFromValidation(validationErr, requestID, featureName)
case *ocpp.Error:
// Internal OCPP error
responseErr = err.(*ocpp.Error)
case error:
// Unknown error
responseErr = ocpp.NewError(GenericError, err.Error(), requestID)
}
// Send an OCPP error to the target, since no regular response could be sent
_ = c.SendError(requestID, responseErr.Code, responseErr.Description, nil)
}
func (c *Client) onDisconnected(err error) {
log.Error("disconnected from server", err)
c.dispatcher.Pause()
if c.onDisconnectedHandler != nil {
c.onDisconnectedHandler(err)
}
}
func (c *Client) onReconnected() {
if c.onReconnectedHandler != nil {
c.onReconnectedHandler()
}
c.dispatcher.Resume()
}
| SetResponseHandler | identifier_name |
sourcefit.py |
import astropy.wcs as wcs
import astropy.units as u
import numpy as np
from astropy.coordinates import SkyCoord
import matplotlib.pyplot as plt
import numpy as np
import os
from reproject import reproject_interp
from tqdm import tqdm
import h5py
from astropy.modeling import models, fitting
import emcee
import corner
from scipy.optimize import minimize
import multiprocessing as mp
mp.set_start_method('fork')
import time
from mapext.core.processClass import Process
from mapext.core.mapClass import astroMap
from mapext.core.srcClass import astroSrc
class single_source(Process):
def | (self,map,src_lst,burnin=2500,runtime=1000,fit_radius=15*u.arcmin,Nwalker=50):
# load map in Jy/pix units
MAP, WCS = map.convert_unit(units=u.Jy/(u.pixel**2),update=False)
# ensure src_lst is a list of sources and retrieve coordinates of each one
if type(src_lst) is astroSrc:
src_lst = [src_lst]
map_skyCoord = map.rtn_coords()
map_noise = 0.01
for _s,s in enumerate(src_lst):
x_pc,y_pc = wcs.utils.skycoord_to_pixel(s.COORD,map.WCS)
x_pc,y_pc = int(x_pc),int(y_pc)
D=int(abs(fit_radius.to(u.degree).value/WCS.wcs.cdelt[0])+3)
map_cutout = MAP[y_pc-D:y_pc+D+1,x_pc-D:x_pc+D+1].value
coords_cutout = map.rtn_coords()[y_pc-D:y_pc+D+1,x_pc-D:x_pc+D+1]
wcs_cutout = wcs.WCS(naxis=2)
wcs_cutout.wcs.cdelt = WCS.wcs.cdelt
wcs_cutout.wcs.crval = WCS.wcs.crval
wcs_cutout.wcs.ctype = WCS.wcs.ctype
wcs_cutout.wcs.crpix = [WCS.wcs.crpix[0]-(x_pc-D),WCS.wcs.crpix[1]-(y_pc-D)]
x_cutout = coords_cutout.l.degree
y_cutout = coords_cutout.b.degree
dx_cutout = x_cutout-s.COORD.l.degree
dy_cutout = y_cutout-s.COORD.b.degree
dr_cutout = np.sqrt(dx_cutout**2 + dy_cutout**2)*u.degree
fitting_area = dr_cutout<fit_radius
p0 = [np.nanmedian(map_cutout), # intercept
0.0, # slope_x
0.0, # slope_y
1, # Amplitude
0.0, # x_mean
0.0, # y_mean
5/(60*np.sqrt(8*np.log(2))), # x_stddev
5/(60*np.sqrt(8*np.log(2))), # ratio
0] # epsilon
nll = lambda *args: -lnlike(*args)
soln = minimize(nll, p0, args=(dx_cutout, dy_cutout, map_cutout, map_noise, fitting_area),
bounds = ((0,np.inf),(None,None),(None,None),
(1e-2,np.inf),
(-2/60,2/60),(-2/60,2/60),
(1.6/60,5./60),(1.6/60,5./60),
(-np.pi/2,np.pi/2)))
# fig,axs = plt.subplots(nrows=2,ncols=2)
# axs[0,0].imshow(map_cutout)
# axs[0,1].imshow(gaussian_model1(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,1].imshow(map_cutout-gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[0])
# plt.show()
Ndim = len(p0)
P0 = [soln.x + 1e-11*np.random.randn(Ndim) for i in range(Nwalker)]
pool = mp.Pool()
sampler = emcee.EnsembleSampler(Nwalker, Ndim, lnprob,
args=(dx_cutout,
dy_cutout,
map_cutout,
map_noise,
fitting_area),
pool=pool)
state = sampler.run_mcmc(P0,burnin+runtime,skip_initial_state_check=True,progress=True)
pool.close()
pars = [r'$c$',r'$m_x$',r'$m_y$',r'$A$',r'$x_c$',r'$y_c$',r'$a$',r'$b$',r'$\theta$']
chains = sampler.chain[:]
# sort a>b
mask = chains[:,:,6]<chains[:,:,7] #filter where a<b
lower = chains[:,:,6][mask]
chains[:,:,6][mask] = chains[:,:,7][mask]
chains[:,:,7][mask] = lower
chains[:,:,8][mask] -= np.pi/2
# collapse angles into mode±pi/2 range
chains[:,:,8] -= chains[:,:,8]//np.pi * np.pi
theta_hist, edge = np.histogram(chains[:,:,8], bins=20, range=(0, np.pi))
cen = (edge[1:]+edge[:-1])/2
max_index = np.argmax(theta_hist)
cval = cen[max_index]
chains[:,:,8][chains[:,:,8]<cval-np.pi/2] += np.pi
chains[:,:,8][chains[:,:,8]>cval+np.pi/2] -= np.pi
def create_filter(chain,n,thresh=1.5):
final_amps = chain[:,-1,n]
q1 = np.percentile(final_amps, 25, interpolation='midpoint')
q3 = np.percentile(final_amps, 75, interpolation='midpoint')
iqr = q3-q1
lower, upper = q1-(thresh*iqr), q3+(thresh*iqr)
walkermask = np.all([final_amps>=lower, final_amps<=upper],axis=0)
return walkermask
walkermask = np.all([create_filter(chains,4),
create_filter(chains,4)],
axis=0)
print('discard ',np.sum(walkermask==False),' walkers of ',Nwalker)
chains_collM = np.nanmean(chains[walkermask],axis=0)
chains_coll = np.nanmedian(chains[walkermask],axis=0)
soln2 = np.nanmean(chains_coll[burnin:],axis=0)
errs2 = np.nanstd(chains_coll[burnin:],axis=0)
# OUTPUT MODEL INFO
src_model_entry = [(map.SURV,map.NAME,
(map.FREQ/u.Hz).value,
soln2[3:],errs2[3:])]
s.add_src_model(src_model_entry)
Sv = 2*np.pi*soln2[3]*soln2[6]*soln2[7]*(60**2)*u.Jy
Sv_e = Sv*np.sqrt((errs2[3]/soln2[3])**2 + ((errs2[6]/soln2[6])**2) + (errs2[7]/soln2[7])**2)
print(_s,' : Sv = ',Sv.value,'±',Sv_e.value)
# OUTPUT FLUX INFO
flux_info = [('2DGaussFit',map.SURV,map.NAME,
(map.FREQ/u.Hz).value,Sv.value,Sv_e.value)]
s.add_flux(flux_info)
# # BACKUP TXT FILE
# with open('{}_{}_sfit.txt'.format(s.NAME,map.NAME), 'a') as the_file:
# the_file.write('Sv = {} ± {}'.format(Sv.value,Sv_e.value))
# GRAPHS AND output
samples_flat = sampler.chain[walkermask, burnin:, :].reshape((-1, Ndim))
fig = corner.corner(samples_flat, labels=pars, truths=soln2, quantiles=[0.25,0.5,0.75])
plt.savefig('{}_{}_cornerplot.pdf'.format(s.NAME,map.NAME))
plt.close()
fig, axs = plt.subplots(ncols=3,nrows=4)
for n in range(Ndim):
ax = axs[n//3,n%3]
ax.plot(chains[:,:,n].T, c='black', alpha = 0.2)
ax.plot(chains_collM[:,n].T, c='blue')
ax.plot(chains_coll[:,n].T, c='red')
ax.set_title(pars[n])
ax.set_ylim(np.nanmin(chains_coll[:,n]),np.nanmax(chains_coll[:,n]))
axs[-1,0].scatter(chains_collM[:,4],chains_collM[:,5],c=np.arange(burnin+runtime),s=1)
axs[-1,0].set_title('center')
axs[-1,1].scatter(chains_collM[:,6],chains_collM[:,7],c=np.arange(burnin+runtime),s=1)
plt.savefig('{}_{}_chains.pdf'.format(s.NAME,map.NAME))
plt.close()
# fig,axs = plt.subplots(nrows=2,ncols=2)
# axs[0,0].imshow(map_cutout)
# axs[0,1].imshow(gaussian_model1(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,0].imshow(map_cutout-gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,1].imshow(gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[1])
fig,axs = plt.subplots(nrows=2,ncols=2,subplot_kw={'projection':wcs_cutout})
im1 = axs[0,0].imshow(map_cutout,origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im1,ax=axs[0,0])
im2 = axs[0,1].imshow(gaussian_model1(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im2,ax=axs[0,1])
im3 = axs[1,0].imshow(map_cutout-gaussian_model1_s(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im3,ax=axs[1,0])
im4 = axs[1,1].imshow(gaussian_model1_s(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im4,ax=axs[1,1])
plt.savefig('{}_{}_model.pdf'.format(s.NAME,map.NAME))
plt.close()
def lnprior(P):
c,mx,my,A,xc,yc,a,b,t = P
if np.all([A>1e-4,
abs(xc)<=2.5/60.,
abs(yc)<=2.5/60.,
a>1.5/60., a<7./60.,
b>1.5/60., b<7./60.],axis=0):
return 0.
return -np.inf
def lnlike(P,x,y,map,map_noise,fitting_area):
model = gaussian_model1(P,x,y)
denom = np.power(map_noise,2)
lp = -0.5*np.nansum(np.power(map[fitting_area]-model[fitting_area],2)/denom + np.log(denom) + np.log(2*np.pi))
# corr = (mask.shape[0]*mask.shape[1])/np.nansum(mask)
# lp = lp*corr
return lp
def lnprob(P,x,y,map,map_noise,fitting_area):
lp = lnprior(P)
if not np.isfinite(lp):
return -np.inf
ll = lnlike(P,x,y,map,map_noise,fitting_area)
return lp+ll
def gaussian_model1(P,x,y):
i,sx,sy,A,x0,y0,xs,ys,t = P
BGD = models.Planar2D(intercept=i,
slope_x=sx,
slope_y=sy)
SRC = models.Gaussian2D(amplitude=A,
x_mean=x0,
y_mean=y0,
x_stddev=xs,
y_stddev=ys,
theta=t)
# MASK = SRC(x,y) >= A*0.1
MOD = BGD(x,y) + SRC(x,y)
return MOD#, MASK
def gaussian_model1_s(P,x,y):
i,sx,sy,A,x0,y0,xs,ys,t = P
SRC = models.Gaussian2D(amplitude=A,
x_mean=x0,
y_mean=y0,
x_stddev=xs,
y_stddev=ys,
theta=t)
# MASK = SRC(x,y) >= A*0.1
MOD = SRC(x,y)
return MOD#, MASK
| run | identifier_name |
sourcefit.py |
import astropy.wcs as wcs
import astropy.units as u
import numpy as np
from astropy.coordinates import SkyCoord
import matplotlib.pyplot as plt
import numpy as np
import os
from reproject import reproject_interp
from tqdm import tqdm
import h5py
from astropy.modeling import models, fitting
import emcee
import corner
from scipy.optimize import minimize
import multiprocessing as mp
mp.set_start_method('fork')
import time
from mapext.core.processClass import Process
from mapext.core.mapClass import astroMap
from mapext.core.srcClass import astroSrc
class single_source(Process):
def run(self,map,src_lst,burnin=2500,runtime=1000,fit_radius=15*u.arcmin,Nwalker=50):
# load map in Jy/pix units
| ef lnprior(P):
c,mx,my,A,xc,yc,a,b,t = P
if np.all([A>1e-4,
abs(xc)<=2.5/60.,
abs(yc)<=2.5/60.,
a>1.5/60., a<7./60.,
b>1.5/60., b<7./60.],axis=0):
return 0.
return -np.inf
def lnlike(P,x,y,map,map_noise,fitting_area):
model = gaussian_model1(P,x,y)
denom = np.power(map_noise,2)
lp = -0.5*np.nansum(np.power(map[fitting_area]-model[fitting_area],2)/denom + np.log(denom) + np.log(2*np.pi))
# corr = (mask.shape[0]*mask.shape[1])/np.nansum(mask)
# lp = lp*corr
return lp
def lnprob(P,x,y,map,map_noise,fitting_area):
lp = lnprior(P)
if not np.isfinite(lp):
return -np.inf
ll = lnlike(P,x,y,map,map_noise,fitting_area)
return lp+ll
def gaussian_model1(P,x,y):
i,sx,sy,A,x0,y0,xs,ys,t = P
BGD = models.Planar2D(intercept=i,
slope_x=sx,
slope_y=sy)
SRC = models.Gaussian2D(amplitude=A,
x_mean=x0,
y_mean=y0,
x_stddev=xs,
y_stddev=ys,
theta=t)
# MASK = SRC(x,y) >= A*0.1
MOD = BGD(x,y) + SRC(x,y)
return MOD#, MASK
def gaussian_model1_s(P,x,y):
i,sx,sy,A,x0,y0,xs,ys,t = P
SRC = models.Gaussian2D(amplitude=A,
x_mean=x0,
y_mean=y0,
x_stddev=xs,
y_stddev=ys,
theta=t)
# MASK = SRC(x,y) >= A*0.1
MOD = SRC(x,y)
return MOD#, MASK
| MAP, WCS = map.convert_unit(units=u.Jy/(u.pixel**2),update=False)
# ensure src_lst is a list of sources and retrieve coordinates of each one
if type(src_lst) is astroSrc:
src_lst = [src_lst]
map_skyCoord = map.rtn_coords()
map_noise = 0.01
for _s,s in enumerate(src_lst):
x_pc,y_pc = wcs.utils.skycoord_to_pixel(s.COORD,map.WCS)
x_pc,y_pc = int(x_pc),int(y_pc)
D=int(abs(fit_radius.to(u.degree).value/WCS.wcs.cdelt[0])+3)
map_cutout = MAP[y_pc-D:y_pc+D+1,x_pc-D:x_pc+D+1].value
coords_cutout = map.rtn_coords()[y_pc-D:y_pc+D+1,x_pc-D:x_pc+D+1]
wcs_cutout = wcs.WCS(naxis=2)
wcs_cutout.wcs.cdelt = WCS.wcs.cdelt
wcs_cutout.wcs.crval = WCS.wcs.crval
wcs_cutout.wcs.ctype = WCS.wcs.ctype
wcs_cutout.wcs.crpix = [WCS.wcs.crpix[0]-(x_pc-D),WCS.wcs.crpix[1]-(y_pc-D)]
x_cutout = coords_cutout.l.degree
y_cutout = coords_cutout.b.degree
dx_cutout = x_cutout-s.COORD.l.degree
dy_cutout = y_cutout-s.COORD.b.degree
dr_cutout = np.sqrt(dx_cutout**2 + dy_cutout**2)*u.degree
fitting_area = dr_cutout<fit_radius
p0 = [np.nanmedian(map_cutout), # intercept
0.0, # slope_x
0.0, # slope_y
1, # Amplitude
0.0, # x_mean
0.0, # y_mean
5/(60*np.sqrt(8*np.log(2))), # x_stddev
5/(60*np.sqrt(8*np.log(2))), # ratio
0] # epsilon
nll = lambda *args: -lnlike(*args)
soln = minimize(nll, p0, args=(dx_cutout, dy_cutout, map_cutout, map_noise, fitting_area),
bounds = ((0,np.inf),(None,None),(None,None),
(1e-2,np.inf),
(-2/60,2/60),(-2/60,2/60),
(1.6/60,5./60),(1.6/60,5./60),
(-np.pi/2,np.pi/2)))
# fig,axs = plt.subplots(nrows=2,ncols=2)
# axs[0,0].imshow(map_cutout)
# axs[0,1].imshow(gaussian_model1(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,1].imshow(map_cutout-gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[0])
# plt.show()
Ndim = len(p0)
P0 = [soln.x + 1e-11*np.random.randn(Ndim) for i in range(Nwalker)]
pool = mp.Pool()
sampler = emcee.EnsembleSampler(Nwalker, Ndim, lnprob,
args=(dx_cutout,
dy_cutout,
map_cutout,
map_noise,
fitting_area),
pool=pool)
state = sampler.run_mcmc(P0,burnin+runtime,skip_initial_state_check=True,progress=True)
pool.close()
pars = [r'$c$',r'$m_x$',r'$m_y$',r'$A$',r'$x_c$',r'$y_c$',r'$a$',r'$b$',r'$\theta$']
chains = sampler.chain[:]
# sort a>b
mask = chains[:,:,6]<chains[:,:,7] #filter where a<b
lower = chains[:,:,6][mask]
chains[:,:,6][mask] = chains[:,:,7][mask]
chains[:,:,7][mask] = lower
chains[:,:,8][mask] -= np.pi/2
# collapse angles into mode±pi/2 range
chains[:,:,8] -= chains[:,:,8]//np.pi * np.pi
theta_hist, edge = np.histogram(chains[:,:,8], bins=20, range=(0, np.pi))
cen = (edge[1:]+edge[:-1])/2
max_index = np.argmax(theta_hist)
cval = cen[max_index]
chains[:,:,8][chains[:,:,8]<cval-np.pi/2] += np.pi
chains[:,:,8][chains[:,:,8]>cval+np.pi/2] -= np.pi
def create_filter(chain,n,thresh=1.5):
final_amps = chain[:,-1,n]
q1 = np.percentile(final_amps, 25, interpolation='midpoint')
q3 = np.percentile(final_amps, 75, interpolation='midpoint')
iqr = q3-q1
lower, upper = q1-(thresh*iqr), q3+(thresh*iqr)
walkermask = np.all([final_amps>=lower, final_amps<=upper],axis=0)
return walkermask
walkermask = np.all([create_filter(chains,4),
create_filter(chains,4)],
axis=0)
print('discard ',np.sum(walkermask==False),' walkers of ',Nwalker)
chains_collM = np.nanmean(chains[walkermask],axis=0)
chains_coll = np.nanmedian(chains[walkermask],axis=0)
soln2 = np.nanmean(chains_coll[burnin:],axis=0)
errs2 = np.nanstd(chains_coll[burnin:],axis=0)
# OUTPUT MODEL INFO
src_model_entry = [(map.SURV,map.NAME,
(map.FREQ/u.Hz).value,
soln2[3:],errs2[3:])]
s.add_src_model(src_model_entry)
Sv = 2*np.pi*soln2[3]*soln2[6]*soln2[7]*(60**2)*u.Jy
Sv_e = Sv*np.sqrt((errs2[3]/soln2[3])**2 + ((errs2[6]/soln2[6])**2) + (errs2[7]/soln2[7])**2)
print(_s,' : Sv = ',Sv.value,'±',Sv_e.value)
# OUTPUT FLUX INFO
flux_info = [('2DGaussFit',map.SURV,map.NAME,
(map.FREQ/u.Hz).value,Sv.value,Sv_e.value)]
s.add_flux(flux_info)
# # BACKUP TXT FILE
# with open('{}_{}_sfit.txt'.format(s.NAME,map.NAME), 'a') as the_file:
# the_file.write('Sv = {} ± {}'.format(Sv.value,Sv_e.value))
# GRAPHS AND output
samples_flat = sampler.chain[walkermask, burnin:, :].reshape((-1, Ndim))
fig = corner.corner(samples_flat, labels=pars, truths=soln2, quantiles=[0.25,0.5,0.75])
plt.savefig('{}_{}_cornerplot.pdf'.format(s.NAME,map.NAME))
plt.close()
fig, axs = plt.subplots(ncols=3,nrows=4)
for n in range(Ndim):
ax = axs[n//3,n%3]
ax.plot(chains[:,:,n].T, c='black', alpha = 0.2)
ax.plot(chains_collM[:,n].T, c='blue')
ax.plot(chains_coll[:,n].T, c='red')
ax.set_title(pars[n])
ax.set_ylim(np.nanmin(chains_coll[:,n]),np.nanmax(chains_coll[:,n]))
axs[-1,0].scatter(chains_collM[:,4],chains_collM[:,5],c=np.arange(burnin+runtime),s=1)
axs[-1,0].set_title('center')
axs[-1,1].scatter(chains_collM[:,6],chains_collM[:,7],c=np.arange(burnin+runtime),s=1)
plt.savefig('{}_{}_chains.pdf'.format(s.NAME,map.NAME))
plt.close()
# fig,axs = plt.subplots(nrows=2,ncols=2)
# axs[0,0].imshow(map_cutout)
# axs[0,1].imshow(gaussian_model1(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,0].imshow(map_cutout-gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,1].imshow(gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[1])
fig,axs = plt.subplots(nrows=2,ncols=2,subplot_kw={'projection':wcs_cutout})
im1 = axs[0,0].imshow(map_cutout,origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im1,ax=axs[0,0])
im2 = axs[0,1].imshow(gaussian_model1(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im2,ax=axs[0,1])
im3 = axs[1,0].imshow(map_cutout-gaussian_model1_s(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im3,ax=axs[1,0])
im4 = axs[1,1].imshow(gaussian_model1_s(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im4,ax=axs[1,1])
plt.savefig('{}_{}_model.pdf'.format(s.NAME,map.NAME))
plt.close()
d | identifier_body |
sourcefit.py |
import astropy.wcs as wcs
import astropy.units as u
import numpy as np
from astropy.coordinates import SkyCoord
import matplotlib.pyplot as plt
import numpy as np
import os
from reproject import reproject_interp
from tqdm import tqdm
import h5py
from astropy.modeling import models, fitting
import emcee
import corner
from scipy.optimize import minimize
import multiprocessing as mp
mp.set_start_method('fork')
import time
from mapext.core.processClass import Process
from mapext.core.mapClass import astroMap
from mapext.core.srcClass import astroSrc
class single_source(Process):
def run(self,map,src_lst,burnin=2500,runtime=1000,fit_radius=15*u.arcmin,Nwalker=50):
# load map in Jy/pix units
MAP, WCS = map.convert_unit(units=u.Jy/(u.pixel**2),update=False)
# ensure src_lst is a list of sources and retrieve coordinates of each one
if type(src_lst) is astroSrc:
src_lst = [src_lst]
map_skyCoord = map.rtn_coords()
map_noise = 0.01
for _s,s in enumerate(src_lst):
| ef lnprior(P):
c,mx,my,A,xc,yc,a,b,t = P
if np.all([A>1e-4,
abs(xc)<=2.5/60.,
abs(yc)<=2.5/60.,
a>1.5/60., a<7./60.,
b>1.5/60., b<7./60.],axis=0):
return 0.
return -np.inf
def lnlike(P,x,y,map,map_noise,fitting_area):
model = gaussian_model1(P,x,y)
denom = np.power(map_noise,2)
lp = -0.5*np.nansum(np.power(map[fitting_area]-model[fitting_area],2)/denom + np.log(denom) + np.log(2*np.pi))
# corr = (mask.shape[0]*mask.shape[1])/np.nansum(mask)
# lp = lp*corr
return lp
def lnprob(P,x,y,map,map_noise,fitting_area):
lp = lnprior(P)
if not np.isfinite(lp):
return -np.inf
ll = lnlike(P,x,y,map,map_noise,fitting_area)
return lp+ll
def gaussian_model1(P,x,y):
i,sx,sy,A,x0,y0,xs,ys,t = P
BGD = models.Planar2D(intercept=i,
slope_x=sx,
slope_y=sy)
SRC = models.Gaussian2D(amplitude=A,
x_mean=x0,
y_mean=y0,
x_stddev=xs,
y_stddev=ys,
theta=t)
# MASK = SRC(x,y) >= A*0.1
MOD = BGD(x,y) + SRC(x,y)
return MOD#, MASK
def gaussian_model1_s(P,x,y):
i,sx,sy,A,x0,y0,xs,ys,t = P
SRC = models.Gaussian2D(amplitude=A,
x_mean=x0,
y_mean=y0,
x_stddev=xs,
y_stddev=ys,
theta=t)
# MASK = SRC(x,y) >= A*0.1
MOD = SRC(x,y)
return MOD#, MASK
| x_pc,y_pc = wcs.utils.skycoord_to_pixel(s.COORD,map.WCS)
x_pc,y_pc = int(x_pc),int(y_pc)
D=int(abs(fit_radius.to(u.degree).value/WCS.wcs.cdelt[0])+3)
map_cutout = MAP[y_pc-D:y_pc+D+1,x_pc-D:x_pc+D+1].value
coords_cutout = map.rtn_coords()[y_pc-D:y_pc+D+1,x_pc-D:x_pc+D+1]
wcs_cutout = wcs.WCS(naxis=2)
wcs_cutout.wcs.cdelt = WCS.wcs.cdelt
wcs_cutout.wcs.crval = WCS.wcs.crval
wcs_cutout.wcs.ctype = WCS.wcs.ctype
wcs_cutout.wcs.crpix = [WCS.wcs.crpix[0]-(x_pc-D),WCS.wcs.crpix[1]-(y_pc-D)]
x_cutout = coords_cutout.l.degree
y_cutout = coords_cutout.b.degree
dx_cutout = x_cutout-s.COORD.l.degree
dy_cutout = y_cutout-s.COORD.b.degree
dr_cutout = np.sqrt(dx_cutout**2 + dy_cutout**2)*u.degree
fitting_area = dr_cutout<fit_radius
p0 = [np.nanmedian(map_cutout), # intercept
0.0, # slope_x
0.0, # slope_y
1, # Amplitude
0.0, # x_mean
0.0, # y_mean
5/(60*np.sqrt(8*np.log(2))), # x_stddev
5/(60*np.sqrt(8*np.log(2))), # ratio
0] # epsilon
nll = lambda *args: -lnlike(*args)
soln = minimize(nll, p0, args=(dx_cutout, dy_cutout, map_cutout, map_noise, fitting_area),
bounds = ((0,np.inf),(None,None),(None,None),
(1e-2,np.inf),
(-2/60,2/60),(-2/60,2/60),
(1.6/60,5./60),(1.6/60,5./60),
(-np.pi/2,np.pi/2)))
# fig,axs = plt.subplots(nrows=2,ncols=2)
# axs[0,0].imshow(map_cutout)
# axs[0,1].imshow(gaussian_model1(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,1].imshow(map_cutout-gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[0])
# plt.show()
Ndim = len(p0)
P0 = [soln.x + 1e-11*np.random.randn(Ndim) for i in range(Nwalker)]
pool = mp.Pool()
sampler = emcee.EnsembleSampler(Nwalker, Ndim, lnprob,
args=(dx_cutout,
dy_cutout,
map_cutout,
map_noise,
fitting_area),
pool=pool)
state = sampler.run_mcmc(P0,burnin+runtime,skip_initial_state_check=True,progress=True)
pool.close()
pars = [r'$c$',r'$m_x$',r'$m_y$',r'$A$',r'$x_c$',r'$y_c$',r'$a$',r'$b$',r'$\theta$']
chains = sampler.chain[:]
# sort a>b
mask = chains[:,:,6]<chains[:,:,7] #filter where a<b
lower = chains[:,:,6][mask]
chains[:,:,6][mask] = chains[:,:,7][mask]
chains[:,:,7][mask] = lower
chains[:,:,8][mask] -= np.pi/2
# collapse angles into mode±pi/2 range
chains[:,:,8] -= chains[:,:,8]//np.pi * np.pi
theta_hist, edge = np.histogram(chains[:,:,8], bins=20, range=(0, np.pi))
cen = (edge[1:]+edge[:-1])/2
max_index = np.argmax(theta_hist)
cval = cen[max_index]
chains[:,:,8][chains[:,:,8]<cval-np.pi/2] += np.pi
chains[:,:,8][chains[:,:,8]>cval+np.pi/2] -= np.pi
def create_filter(chain,n,thresh=1.5):
final_amps = chain[:,-1,n]
q1 = np.percentile(final_amps, 25, interpolation='midpoint')
q3 = np.percentile(final_amps, 75, interpolation='midpoint')
iqr = q3-q1
lower, upper = q1-(thresh*iqr), q3+(thresh*iqr)
walkermask = np.all([final_amps>=lower, final_amps<=upper],axis=0)
return walkermask
walkermask = np.all([create_filter(chains,4),
create_filter(chains,4)],
axis=0)
print('discard ',np.sum(walkermask==False),' walkers of ',Nwalker)
chains_collM = np.nanmean(chains[walkermask],axis=0)
chains_coll = np.nanmedian(chains[walkermask],axis=0)
soln2 = np.nanmean(chains_coll[burnin:],axis=0)
errs2 = np.nanstd(chains_coll[burnin:],axis=0)
# OUTPUT MODEL INFO
src_model_entry = [(map.SURV,map.NAME,
(map.FREQ/u.Hz).value,
soln2[3:],errs2[3:])]
s.add_src_model(src_model_entry)
Sv = 2*np.pi*soln2[3]*soln2[6]*soln2[7]*(60**2)*u.Jy
Sv_e = Sv*np.sqrt((errs2[3]/soln2[3])**2 + ((errs2[6]/soln2[6])**2) + (errs2[7]/soln2[7])**2)
print(_s,' : Sv = ',Sv.value,'±',Sv_e.value)
# OUTPUT FLUX INFO
flux_info = [('2DGaussFit',map.SURV,map.NAME,
(map.FREQ/u.Hz).value,Sv.value,Sv_e.value)]
s.add_flux(flux_info)
# # BACKUP TXT FILE
# with open('{}_{}_sfit.txt'.format(s.NAME,map.NAME), 'a') as the_file:
# the_file.write('Sv = {} ± {}'.format(Sv.value,Sv_e.value))
# GRAPHS AND output
samples_flat = sampler.chain[walkermask, burnin:, :].reshape((-1, Ndim))
fig = corner.corner(samples_flat, labels=pars, truths=soln2, quantiles=[0.25,0.5,0.75])
plt.savefig('{}_{}_cornerplot.pdf'.format(s.NAME,map.NAME))
plt.close()
fig, axs = plt.subplots(ncols=3,nrows=4)
for n in range(Ndim):
ax = axs[n//3,n%3]
ax.plot(chains[:,:,n].T, c='black', alpha = 0.2)
ax.plot(chains_collM[:,n].T, c='blue')
ax.plot(chains_coll[:,n].T, c='red')
ax.set_title(pars[n])
ax.set_ylim(np.nanmin(chains_coll[:,n]),np.nanmax(chains_coll[:,n]))
axs[-1,0].scatter(chains_collM[:,4],chains_collM[:,5],c=np.arange(burnin+runtime),s=1)
axs[-1,0].set_title('center')
axs[-1,1].scatter(chains_collM[:,6],chains_collM[:,7],c=np.arange(burnin+runtime),s=1)
plt.savefig('{}_{}_chains.pdf'.format(s.NAME,map.NAME))
plt.close()
# fig,axs = plt.subplots(nrows=2,ncols=2)
# axs[0,0].imshow(map_cutout)
# axs[0,1].imshow(gaussian_model1(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,0].imshow(map_cutout-gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,1].imshow(gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[1])
fig,axs = plt.subplots(nrows=2,ncols=2,subplot_kw={'projection':wcs_cutout})
im1 = axs[0,0].imshow(map_cutout,origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im1,ax=axs[0,0])
im2 = axs[0,1].imshow(gaussian_model1(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im2,ax=axs[0,1])
im3 = axs[1,0].imshow(map_cutout-gaussian_model1_s(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im3,ax=axs[1,0])
im4 = axs[1,1].imshow(gaussian_model1_s(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im4,ax=axs[1,1])
plt.savefig('{}_{}_model.pdf'.format(s.NAME,map.NAME))
plt.close()
d | conditional_block |
sourcefit.py | import astropy.wcs as wcs
import astropy.units as u
import numpy as np
from astropy.coordinates import SkyCoord
import matplotlib.pyplot as plt
import numpy as np
import os
from reproject import reproject_interp
from tqdm import tqdm
import h5py
from astropy.modeling import models, fitting
import emcee
import corner
from scipy.optimize import minimize
import multiprocessing as mp
mp.set_start_method('fork')
import time
from mapext.core.processClass import Process
from mapext.core.mapClass import astroMap
from mapext.core.srcClass import astroSrc
class single_source(Process):
def run(self,map,src_lst,burnin=2500,runtime=1000,fit_radius=15*u.arcmin,Nwalker=50):
# load map in Jy/pix units
MAP, WCS = map.convert_unit(units=u.Jy/(u.pixel**2),update=False)
# ensure src_lst is a list of sources and retrieve coordinates of each one
if type(src_lst) is astroSrc:
src_lst = [src_lst]
map_skyCoord = map.rtn_coords()
map_noise = 0.01
for _s,s in enumerate(src_lst):
x_pc,y_pc = wcs.utils.skycoord_to_pixel(s.COORD,map.WCS)
x_pc,y_pc = int(x_pc),int(y_pc)
D=int(abs(fit_radius.to(u.degree).value/WCS.wcs.cdelt[0])+3)
map_cutout = MAP[y_pc-D:y_pc+D+1,x_pc-D:x_pc+D+1].value
coords_cutout = map.rtn_coords()[y_pc-D:y_pc+D+1,x_pc-D:x_pc+D+1]
wcs_cutout = wcs.WCS(naxis=2)
wcs_cutout.wcs.cdelt = WCS.wcs.cdelt
wcs_cutout.wcs.crval = WCS.wcs.crval
wcs_cutout.wcs.ctype = WCS.wcs.ctype
wcs_cutout.wcs.crpix = [WCS.wcs.crpix[0]-(x_pc-D),WCS.wcs.crpix[1]-(y_pc-D)]
x_cutout = coords_cutout.l.degree
y_cutout = coords_cutout.b.degree
dx_cutout = x_cutout-s.COORD.l.degree
dy_cutout = y_cutout-s.COORD.b.degree
dr_cutout = np.sqrt(dx_cutout**2 + dy_cutout**2)*u.degree
fitting_area = dr_cutout<fit_radius
p0 = [np.nanmedian(map_cutout), # intercept
0.0, # slope_x
0.0, # slope_y
1, # Amplitude
0.0, # x_mean
0.0, # y_mean
5/(60*np.sqrt(8*np.log(2))), # x_stddev
5/(60*np.sqrt(8*np.log(2))), # ratio
0] # epsilon
nll = lambda *args: -lnlike(*args)
soln = minimize(nll, p0, args=(dx_cutout, dy_cutout, map_cutout, map_noise, fitting_area),
bounds = ((0,np.inf),(None,None),(None,None),
(1e-2,np.inf),
(-2/60,2/60),(-2/60,2/60),
(1.6/60,5./60),(1.6/60,5./60),
(-np.pi/2,np.pi/2)))
# fig,axs = plt.subplots(nrows=2,ncols=2)
# axs[0,0].imshow(map_cutout)
# axs[0,1].imshow(gaussian_model1(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,1].imshow(map_cutout-gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[0])
# plt.show()
Ndim = len(p0)
P0 = [soln.x + 1e-11*np.random.randn(Ndim) for i in range(Nwalker)]
pool = mp.Pool()
sampler = emcee.EnsembleSampler(Nwalker, Ndim, lnprob,
args=(dx_cutout,
dy_cutout,
map_cutout,
map_noise,
fitting_area),
pool=pool)
state = sampler.run_mcmc(P0,burnin+runtime,skip_initial_state_check=True,progress=True)
pool.close()
pars = [r'$c$',r'$m_x$',r'$m_y$',r'$A$',r'$x_c$',r'$y_c$',r'$a$',r'$b$',r'$\theta$']
chains = sampler.chain[:]
# sort a>b
mask = chains[:,:,6]<chains[:,:,7] #filter where a<b
lower = chains[:,:,6][mask]
chains[:,:,6][mask] = chains[:,:,7][mask]
chains[:,:,7][mask] = lower
chains[:,:,8][mask] -= np.pi/2
# collapse angles into mode±pi/2 range
chains[:,:,8] -= chains[:,:,8]//np.pi * np.pi
theta_hist, edge = np.histogram(chains[:,:,8], bins=20, range=(0, np.pi))
cen = (edge[1:]+edge[:-1])/2
max_index = np.argmax(theta_hist)
cval = cen[max_index]
chains[:,:,8][chains[:,:,8]<cval-np.pi/2] += np.pi | chains[:,:,8][chains[:,:,8]>cval+np.pi/2] -= np.pi
def create_filter(chain,n,thresh=1.5):
final_amps = chain[:,-1,n]
q1 = np.percentile(final_amps, 25, interpolation='midpoint')
q3 = np.percentile(final_amps, 75, interpolation='midpoint')
iqr = q3-q1
lower, upper = q1-(thresh*iqr), q3+(thresh*iqr)
walkermask = np.all([final_amps>=lower, final_amps<=upper],axis=0)
return walkermask
walkermask = np.all([create_filter(chains,4),
create_filter(chains,4)],
axis=0)
print('discard ',np.sum(walkermask==False),' walkers of ',Nwalker)
chains_collM = np.nanmean(chains[walkermask],axis=0)
chains_coll = np.nanmedian(chains[walkermask],axis=0)
soln2 = np.nanmean(chains_coll[burnin:],axis=0)
errs2 = np.nanstd(chains_coll[burnin:],axis=0)
# OUTPUT MODEL INFO
src_model_entry = [(map.SURV,map.NAME,
(map.FREQ/u.Hz).value,
soln2[3:],errs2[3:])]
s.add_src_model(src_model_entry)
Sv = 2*np.pi*soln2[3]*soln2[6]*soln2[7]*(60**2)*u.Jy
Sv_e = Sv*np.sqrt((errs2[3]/soln2[3])**2 + ((errs2[6]/soln2[6])**2) + (errs2[7]/soln2[7])**2)
print(_s,' : Sv = ',Sv.value,'±',Sv_e.value)
# OUTPUT FLUX INFO
flux_info = [('2DGaussFit',map.SURV,map.NAME,
(map.FREQ/u.Hz).value,Sv.value,Sv_e.value)]
s.add_flux(flux_info)
# # BACKUP TXT FILE
# with open('{}_{}_sfit.txt'.format(s.NAME,map.NAME), 'a') as the_file:
# the_file.write('Sv = {} ± {}'.format(Sv.value,Sv_e.value))
# GRAPHS AND output
samples_flat = sampler.chain[walkermask, burnin:, :].reshape((-1, Ndim))
fig = corner.corner(samples_flat, labels=pars, truths=soln2, quantiles=[0.25,0.5,0.75])
plt.savefig('{}_{}_cornerplot.pdf'.format(s.NAME,map.NAME))
plt.close()
fig, axs = plt.subplots(ncols=3,nrows=4)
for n in range(Ndim):
ax = axs[n//3,n%3]
ax.plot(chains[:,:,n].T, c='black', alpha = 0.2)
ax.plot(chains_collM[:,n].T, c='blue')
ax.plot(chains_coll[:,n].T, c='red')
ax.set_title(pars[n])
ax.set_ylim(np.nanmin(chains_coll[:,n]),np.nanmax(chains_coll[:,n]))
axs[-1,0].scatter(chains_collM[:,4],chains_collM[:,5],c=np.arange(burnin+runtime),s=1)
axs[-1,0].set_title('center')
axs[-1,1].scatter(chains_collM[:,6],chains_collM[:,7],c=np.arange(burnin+runtime),s=1)
plt.savefig('{}_{}_chains.pdf'.format(s.NAME,map.NAME))
plt.close()
# fig,axs = plt.subplots(nrows=2,ncols=2)
# axs[0,0].imshow(map_cutout)
# axs[0,1].imshow(gaussian_model1(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,0].imshow(map_cutout-gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[0])
# axs[1,1].imshow(gaussian_model1_s(soln.x[:-1],dx_cutout,dy_cutout)[1])
fig,axs = plt.subplots(nrows=2,ncols=2,subplot_kw={'projection':wcs_cutout})
im1 = axs[0,0].imshow(map_cutout,origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im1,ax=axs[0,0])
im2 = axs[0,1].imshow(gaussian_model1(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im2,ax=axs[0,1])
im3 = axs[1,0].imshow(map_cutout-gaussian_model1_s(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im3,ax=axs[1,0])
im4 = axs[1,1].imshow(gaussian_model1_s(soln2,dx_cutout,dy_cutout),origin='lower')
plt.xlabel(r'$\ell$')
plt.ylabel(r'$b$')
plt.colorbar(im4,ax=axs[1,1])
plt.savefig('{}_{}_model.pdf'.format(s.NAME,map.NAME))
plt.close()
def lnprior(P):
c,mx,my,A,xc,yc,a,b,t = P
if np.all([A>1e-4,
abs(xc)<=2.5/60.,
abs(yc)<=2.5/60.,
a>1.5/60., a<7./60.,
b>1.5/60., b<7./60.],axis=0):
return 0.
return -np.inf
def lnlike(P,x,y,map,map_noise,fitting_area):
model = gaussian_model1(P,x,y)
denom = np.power(map_noise,2)
lp = -0.5*np.nansum(np.power(map[fitting_area]-model[fitting_area],2)/denom + np.log(denom) + np.log(2*np.pi))
# corr = (mask.shape[0]*mask.shape[1])/np.nansum(mask)
# lp = lp*corr
return lp
def lnprob(P,x,y,map,map_noise,fitting_area):
lp = lnprior(P)
if not np.isfinite(lp):
return -np.inf
ll = lnlike(P,x,y,map,map_noise,fitting_area)
return lp+ll
def gaussian_model1(P,x,y):
i,sx,sy,A,x0,y0,xs,ys,t = P
BGD = models.Planar2D(intercept=i,
slope_x=sx,
slope_y=sy)
SRC = models.Gaussian2D(amplitude=A,
x_mean=x0,
y_mean=y0,
x_stddev=xs,
y_stddev=ys,
theta=t)
# MASK = SRC(x,y) >= A*0.1
MOD = BGD(x,y) + SRC(x,y)
return MOD#, MASK
def gaussian_model1_s(P,x,y):
i,sx,sy,A,x0,y0,xs,ys,t = P
SRC = models.Gaussian2D(amplitude=A,
x_mean=x0,
y_mean=y0,
x_stddev=xs,
y_stddev=ys,
theta=t)
# MASK = SRC(x,y) >= A*0.1
MOD = SRC(x,y)
return MOD#, MASK | random_line_split | |
main.rs | <<<<<<< HEAD
/*
* word correction
*
* Reads text from the corpus text and
* Count the frequency of each word, then correct the candidate word in input text, output with the most frequetly
* used one
*
* Background
*
* The purpose of correct is to find possible corrections for misspelled words. It consists of two phases:
* The first phase is a training module, which consumes a corpus of correctly spelled words and counts the
* number of occurrences of each word. The second phase uses the results of the first to check individual words.
* Specifically, it checks whether each word is spelled correctly according to the training module and, if not,
* whether “small edits” can reach a variant that is correctly spelled.
*
* Given a word, an edit action is one of the following:
* the deletion of one letter;
*
* the transposition of two neighboring letters;
*
* the replacement of one letter with another letter; and
*
* the insertion of a letter at any position.
*
* In this context, Norvig suggests that “small edits” means the application of one edit action possibly
* followed by the application of a second one to the result of the first.
* Once the second part has generated all possible candidate for a potentially misspelled word,
* it picks the most frequently used one from the training corpus. If none of the candidates is a correct word,
* correct reports a failure.
*
* INPUT
*
* The input format is using two standard input consuming text. It could contain anything like words, numbers or some marks.
* writtten in ASCII.
*
* Hello world! Where are you now?
* www333
* github.com/rust
* !!!!!@@@@@@@
*
* Any non-alphabetic will be regarded as noise and will not be counted:
*
* 23232
* ++--!!!@@
* ...???''''""""
*
*
* The input terminates with end-of-file.
*
*
* OUTPUT
*
* The correct program consumes a training file on the command line and then reads words—one per line—from
* standard input. For each word from standard in, correct prints one line. The line consists of just the word
* if it is spelled correctly. If the word is not correctly spelled, correct prints the word and the best
* improvement or “-” if there aren’t any improvements found.
*
* hello
*
* hell, hello
*
* word
*
* wordl, world
*
* wor, world
*
* wo, word
*
* w, -
*
* ASSUMPTIONS
*
* - Words are reading according to the language's reading routines,
*
* - A word contained numbers will be count only the alphabetic character
* and ignore the numbers.
*
* - All the symbol, space and numbers will be considered as noise and ignored.
*
* - The input only terminate in the end-of-file, which is kind of unconvenient
* if you want to use console to input your data.
*
* - Once the word has been edited, we would pick the most frequently used one after
* two editions.
*
* - Except fot the normal edition, we add the corner case handler to accelerate the algorithm
*
*
*/
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
//#![allow(dead_code)]
//#![allow(unused_variables)]
use std::io::{stdin};
use std::env;
use std::collections::HashMap;
use trie::Trie;
use trie::Result;
use std::fs::File;
mod readinput;
mod trie;
mod counter;
fn main() {
let args: Vec<_> = env::args().collect();
if args.len() != 2 {
panic!("Missing input file"); //check training file
}
let f = File::open(&args[1]).expect("Error opening training file!");
//calculate the freq. of training file and stored in a hashmap
let dict: HashMap<String, usize> = counter::word_count(&counter::read_input(f));
//read input.txt and store in a vector
let check_words: Vec<String> = readinput::read_input(stdin());
//initialize the Trie
let mut t:Trie = Trie::new();
for (key,value) in &dict {
t.insert(&mut key.to_string(), *value);
}
//check each word in input.txt
for word in check_words {
let mut changeword = word.clone();
let mut changeword2 = word.clone();
let temp = find(&t, &mut changeword,&mut changeword2,&mut "".to_string(), 2).key;
if temp.is_empty(){
println!("{}, -", word);
}
else if temp == word{
println!("{}" , word);
}
else {
println!("{}, {}", word, temp);
}
}
}
/*
* This is the main search function for this program. We implement the DFS(Depth-First-Search)+Regression
* Algorithm to travel the whole trie and find all the candidate word, then pick the most frequently one.
*
* Input : trie: The trie made from corpus.txt, contains all the word
* path: The misspelled word
* pathclone: The remained string need to match
* cur: The trie path
* op: The edit times left for current matching
*
* The stop condition is that current trie path consist a word and match the input/edited word.
*
* We separate each character in the string and find the matched node in current trie. In the meantime,
* we also edit the word in case we can't find the original word in tries. But we set the original search
* with the highest priority.
*
* Output is a Struct with key: String the most frequently used word
* value: maximum frequency.
*/
fn find(trie: & Trie, path: & String,pathclone: & mut String,cur: & mut String, op: i64)-> Result{
if pathclone.len()==0 && trie.value>0 {
return Result{
value: trie.value,
key: cur.clone(),
}
}
else{
let mut max= Result::new();
let mut temp: Result;
let mut temppath =pathclone.clone();
let mut currtrie :&Trie;
if pathclone.len()>0{
let mut curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar) {
cur.push(curchar);
max = find(currtrie,path,& mut temppath, cur, op);
if op==2 && max.key == *path{
return max;
}
cur.pop();
}
//deletion
//if we can get a word after deleting current character
if op > 0{
if pathclone.len()==1 && trie.value>0{
temp= Result{
value: trie.value,
key: cur.clone(),
};
if temp.value>max.value{
max = temp;
}
}
if pathclone.len()==2 &&op==2&& trie.value>0{
temp= Result{
value: trie.value,
key: cur.clone(),
};
if temp.value>max.value{
max = temp;
}
}
if pathclone.len()>1{
| /transpose
if pathclone.len()>1{
temppath = pathclone.clone();
curchar = temppath.remove(0);
temppath.insert(1,curchar);
curchar = temppath.remove(0);
cur.push(curchar);
if let Some(currtrie) = trie.children.get(&curchar) {
let counter = op-1;
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
}
cur.pop();
}
// replace
for key in trie.children.keys(){
temppath = pathclone.clone();
if temppath.len()>1{
temppath.remove(0);
}
else{temppath="".to_string();}
currtrie = trie.children.get(&key).unwrap();
cur.push(*key);
let counter = op-1;
temp = find(&currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
}
if op> 0{
//insertion
for key in trie.children.keys(){
cur.push(*key);
currtrie = trie.children.get(&key).unwrap();
let counter = op-1;
temp = find(&currtrie,path,pathclone,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
return max;
}
}
#[test]
fn test_find_edit_value(){
//use super::{trie,Result};
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bce".to_string(),&mut "bce".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "acd".to_string(),&mut "acd".to_string(),&mut "".to_string(), 2).value, 4);
assert_eq!(find(&t, &mut "acd".to_string(),&mut "acd".to_string(),&mut "".to_string(), 2).key, "acd");
assert_eq!(find(&t, &mut "".to_string(),&mut "".to_string(),&mut "".to_string(), 2).key, "gg");
assert_eq!(find(&t, &mut "cbdca".to_string(),&mut "cbdca".to_string(),&mut "".to_string(), 2).value, 3);
}
#[test]
fn test_find_replace_value(){
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bed".to_string(),&mut "bed".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "b".to_string(),&mut "b".to_string(),&mut "".to_string(), 2).value, 100);
}
#[test]
fn test_find_delete_value(){
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bcdea".to_string(),&mut "bed".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "ggag".to_string(),&mut "b".to_string(),&mut "".to_string(), 2).value, 100);
} | temppath = pathclone.clone();
temppath.remove(0);
curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar){
let counter = op-1;
cur.push(curchar);
<<<<<<< HEAD
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
if pathclone.len()>2 &&op==2{
temppath = pathclone.clone();
temppath.remove(0);
temppath.remove(0);
curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar){
let counter = 0;
cur.push(curchar);
<<<<<<< HEAD
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
}
}
/ | conditional_block |
main.rs | <<<<<<< HEAD
/*
* word correction
*
* Reads text from the corpus text and
* Count the frequency of each word, then correct the candidate word in input text, output with the most frequetly
* used one
*
* Background
*
* The purpose of correct is to find possible corrections for misspelled words. It consists of two phases:
* The first phase is a training module, which consumes a corpus of correctly spelled words and counts the
* number of occurrences of each word. The second phase uses the results of the first to check individual words.
* Specifically, it checks whether each word is spelled correctly according to the training module and, if not,
* whether “small edits” can reach a variant that is correctly spelled.
*
* Given a word, an edit action is one of the following:
* the deletion of one letter;
*
* the transposition of two neighboring letters;
*
* the replacement of one letter with another letter; and
*
* the insertion of a letter at any position.
*
* In this context, Norvig suggests that “small edits” means the application of one edit action possibly
* followed by the application of a second one to the result of the first.
* Once the second part has generated all possible candidate for a potentially misspelled word,
* it picks the most frequently used one from the training corpus. If none of the candidates is a correct word,
* correct reports a failure.
*
* INPUT
*
* The input format is using two standard input consuming text. It could contain anything like words, numbers or some marks.
* writtten in ASCII.
*
* Hello world! Where are you now?
* www333
* github.com/rust
* !!!!!@@@@@@@
*
* Any non-alphabetic will be regarded as noise and will not be counted:
*
* 23232
* ++--!!!@@
* ...???''''""""
*
*
* The input terminates with end-of-file.
*
*
* OUTPUT
*
* The correct program consumes a training file on the command line and then reads words—one per line—from
* standard input. For each word from standard in, correct prints one line. The line consists of just the word
* if it is spelled correctly. If the word is not correctly spelled, correct prints the word and the best
* improvement or “-” if there aren’t any improvements found.
*
* hello
*
* hell, hello
*
* word
*
* wordl, world
*
* wor, world
*
* wo, word
*
* w, -
*
* ASSUMPTIONS
*
* - Words are reading according to the language's reading routines,
*
* - A word contained numbers will be count only the alphabetic character
* and ignore the numbers.
*
* - All the symbol, space and numbers will be considered as noise and ignored.
*
* - The input only terminate in the end-of-file, which is kind of unconvenient
* if you want to use console to input your data.
*
* - Once the word has been edited, we would pick the most frequently used one after
* two editions.
*
* - Except fot the normal edition, we add the corner case handler to accelerate the algorithm
*
*
*/
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
//#![allow(dead_code)]
//#![allow(unused_variables)]
use std::io::{stdin};
use std::env;
use std::collections::HashMap;
use trie::Trie;
use trie::Result;
use std::fs::File;
mod readinput;
mod trie;
mod counter;
fn main() {
let args: Vec<_> = env::args().collect();
if args.len() != 2 {
panic!("Missing input file"); //check training file
}
let f = File::open(&args[1]).expect("Error opening training file!");
//calculate the freq. of training file and stored in a hashmap
let dict: HashMap<String, usize> = counter::word_count(&counter::read_input(f));
//read input.txt and store in a vector
let check_words: Vec<String> = readinput::read_input(stdin());
//initialize the Trie
let mut t:Trie = Trie::new();
for (key,value) in &dict {
t.insert(&mut key.to_string(), *value);
}
//check each word in input.txt
for word in check_words {
let mut changeword = word.clone();
let mut changeword2 = word.clone();
let temp = find(&t, &mut changeword,&mut changeword2,&mut "".to_string(), 2).key;
if temp.is_empty(){
println!("{}, -", word);
}
else if temp == word{
println!("{}" , word);
}
else {
println!("{}, {}", word, temp);
}
}
}
/* | *
* Input : trie: The trie made from corpus.txt, contains all the word
* path: The misspelled word
* pathclone: The remained string need to match
* cur: The trie path
* op: The edit times left for current matching
*
* The stop condition is that current trie path consist a word and match the input/edited word.
*
* We separate each character in the string and find the matched node in current trie. In the meantime,
* we also edit the word in case we can't find the original word in tries. But we set the original search
* with the highest priority.
*
* Output is a Struct with key: String the most frequently used word
* value: maximum frequency.
*/
fn find(trie: & Trie, path: & String,pathclone: & mut String,cur: & mut String, op: i64)-> Result{
if pathclone.len()==0 && trie.value>0 {
return Result{
value: trie.value,
key: cur.clone(),
}
}
else{
let mut max= Result::new();
let mut temp: Result;
let mut temppath =pathclone.clone();
let mut currtrie :&Trie;
if pathclone.len()>0{
let mut curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar) {
cur.push(curchar);
max = find(currtrie,path,& mut temppath, cur, op);
if op==2 && max.key == *path{
return max;
}
cur.pop();
}
//deletion
//if we can get a word after deleting current character
if op > 0{
if pathclone.len()==1 && trie.value>0{
temp= Result{
value: trie.value,
key: cur.clone(),
};
if temp.value>max.value{
max = temp;
}
}
if pathclone.len()==2 &&op==2&& trie.value>0{
temp= Result{
value: trie.value,
key: cur.clone(),
};
if temp.value>max.value{
max = temp;
}
}
if pathclone.len()>1{
temppath = pathclone.clone();
temppath.remove(0);
curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar){
let counter = op-1;
cur.push(curchar);
<<<<<<< HEAD
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
if pathclone.len()>2 &&op==2{
temppath = pathclone.clone();
temppath.remove(0);
temppath.remove(0);
curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar){
let counter = 0;
cur.push(curchar);
<<<<<<< HEAD
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
}
}
//transpose
if pathclone.len()>1{
temppath = pathclone.clone();
curchar = temppath.remove(0);
temppath.insert(1,curchar);
curchar = temppath.remove(0);
cur.push(curchar);
if let Some(currtrie) = trie.children.get(&curchar) {
let counter = op-1;
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
}
cur.pop();
}
// replace
for key in trie.children.keys(){
temppath = pathclone.clone();
if temppath.len()>1{
temppath.remove(0);
}
else{temppath="".to_string();}
currtrie = trie.children.get(&key).unwrap();
cur.push(*key);
let counter = op-1;
temp = find(&currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
}
if op> 0{
//insertion
for key in trie.children.keys(){
cur.push(*key);
currtrie = trie.children.get(&key).unwrap();
let counter = op-1;
temp = find(&currtrie,path,pathclone,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
return max;
}
}
#[test]
fn test_find_edit_value(){
//use super::{trie,Result};
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bce".to_string(),&mut "bce".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "acd".to_string(),&mut "acd".to_string(),&mut "".to_string(), 2).value, 4);
assert_eq!(find(&t, &mut "acd".to_string(),&mut "acd".to_string(),&mut "".to_string(), 2).key, "acd");
assert_eq!(find(&t, &mut "".to_string(),&mut "".to_string(),&mut "".to_string(), 2).key, "gg");
assert_eq!(find(&t, &mut "cbdca".to_string(),&mut "cbdca".to_string(),&mut "".to_string(), 2).value, 3);
}
#[test]
fn test_find_replace_value(){
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bed".to_string(),&mut "bed".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "b".to_string(),&mut "b".to_string(),&mut "".to_string(), 2).value, 100);
}
#[test]
fn test_find_delete_value(){
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bcdea".to_string(),&mut "bed".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "ggag".to_string(),&mut "b".to_string(),&mut "".to_string(), 2).value, 100);
} | * This is the main search function for this program. We implement the DFS(Depth-First-Search)+Regression
* Algorithm to travel the whole trie and find all the candidate word, then pick the most frequently one. | random_line_split |
main.rs | <<<<<<< HEAD
/*
* word correction
*
* Reads text from the corpus text and
* Count the frequency of each word, then correct the candidate word in input text, output with the most frequetly
* used one
*
* Background
*
* The purpose of correct is to find possible corrections for misspelled words. It consists of two phases:
* The first phase is a training module, which consumes a corpus of correctly spelled words and counts the
* number of occurrences of each word. The second phase uses the results of the first to check individual words.
* Specifically, it checks whether each word is spelled correctly according to the training module and, if not,
* whether “small edits” can reach a variant that is correctly spelled.
*
* Given a word, an edit action is one of the following:
* the deletion of one letter;
*
* the transposition of two neighboring letters;
*
* the replacement of one letter with another letter; and
*
* the insertion of a letter at any position.
*
* In this context, Norvig suggests that “small edits” means the application of one edit action possibly
* followed by the application of a second one to the result of the first.
* Once the second part has generated all possible candidate for a potentially misspelled word,
* it picks the most frequently used one from the training corpus. If none of the candidates is a correct word,
* correct reports a failure.
*
* INPUT
*
* The input format is using two standard input consuming text. It could contain anything like words, numbers or some marks.
* writtten in ASCII.
*
* Hello world! Where are you now?
* www333
* github.com/rust
* !!!!!@@@@@@@
*
* Any non-alphabetic will be regarded as noise and will not be counted:
*
* 23232
* ++--!!!@@
* ...???''''""""
*
*
* The input terminates with end-of-file.
*
*
* OUTPUT
*
* The correct program consumes a training file on the command line and then reads words—one per line—from
* standard input. For each word from standard in, correct prints one line. The line consists of just the word
* if it is spelled correctly. If the word is not correctly spelled, correct prints the word and the best
* improvement or “-” if there aren’t any improvements found.
*
* hello
*
* hell, hello
*
* word
*
* wordl, world
*
* wor, world
*
* wo, word
*
* w, -
*
* ASSUMPTIONS
*
* - Words are reading according to the language's reading routines,
*
* - A word contained numbers will be count only the alphabetic character
* and ignore the numbers.
*
* - All the symbol, space and numbers will be considered as noise and ignored.
*
* - The input only terminate in the end-of-file, which is kind of unconvenient
* if you want to use console to input your data.
*
* - Once the word has been edited, we would pick the most frequently used one after
* two editions.
*
* - Except fot the normal edition, we add the corner case handler to accelerate the algorithm
*
*
*/
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
//#![allow(dead_code)]
//#![allow(unused_variables)]
use std::io::{stdin};
use std::env;
use std::collections::HashMap;
use trie::Trie;
use trie::Result;
use std::fs::File;
mod readinput;
mod trie;
mod counter;
fn main() {
let args: Vec<_> = env::args().collect();
if args.len() != 2 {
panic!("Missing input file"); //check training file
}
let f = File::open(&args[1]).expect("Error opening training file!");
//calculate the freq. of training file and stored in a hashmap
let dict: HashMap<String, usize> = counter::word_count(&counter::read_input(f));
//read input.txt and store in a vector
let check_words: Vec<String> = readinput::read_input(stdin());
//initialize the Trie
let mut t:Trie = Trie::new();
for (key,value) in &dict {
t.insert(&mut key.to_string(), *value);
}
//check each word in input.txt
for word in check_words {
let mut changeword = word.clone();
let mut changeword2 = word.clone();
let temp = find(&t, &mut changeword,&mut changeword2,&mut "".to_string(), 2).key;
if temp.is_empty(){
println!("{}, -", word);
}
else if temp == word{
println!("{}" , word);
}
else {
println!("{}, {}", word, temp);
}
}
}
/*
* This is the main search function for this program. We implement the DFS(Depth-First-Search)+Regression
* Algorithm to travel the whole trie and find all the candidate word, then pick the most frequently one.
*
* Input : trie: The trie made from corpus.txt, contains all the word
* path: The misspelled word
* pathclone: The remained string need to match
* cur: The trie path
* op: The edit times left for current matching
*
* The stop condition is that current trie path consist a word and match the input/edited word.
*
* We separate each character in the string and find the matched node in current trie. In the meantime,
* we also edit the word in case we can't find the original word in tries. But we set the original search
* with the highest priority.
*
* Output is a Struct with key: String the most frequently used word
* value: maximum frequency.
*/
fn find(trie: & Trie, path: & String,pathclone: & mut String,cur: & mut String, op: i64)-> Result{
if pathclone.le | #[test]
fn test_find_edit_value(){
//use super::{trie,Result};
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bce".to_string(),&mut "bce".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "acd".to_string(),&mut "acd".to_string(),&mut "".to_string(), 2).value, 4);
assert_eq!(find(&t, &mut "acd".to_string(),&mut "acd".to_string(),&mut "".to_string(), 2).key, "acd");
assert_eq!(find(&t, &mut "".to_string(),&mut "".to_string(),&mut "".to_string(), 2).key, "gg");
assert_eq!(find(&t, &mut "cbdca".to_string(),&mut "cbdca".to_string(),&mut "".to_string(), 2).value, 3);
}
#[test]
fn test_find_replace_value(){
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bed".to_string(),&mut "bed".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "b".to_string(),&mut "b".to_string(),&mut "".to_string(), 2).value, 100);
}
#[test]
fn test_find_delete_value(){
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bcdea".to_string(),&mut "bed".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "ggag".to_string(),&mut "b".to_string(),&mut "".to_string(), 2).value, 100);
} | n()==0 && trie.value>0 {
return Result{
value: trie.value,
key: cur.clone(),
}
}
else{
let mut max= Result::new();
let mut temp: Result;
let mut temppath =pathclone.clone();
let mut currtrie :&Trie;
if pathclone.len()>0{
let mut curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar) {
cur.push(curchar);
max = find(currtrie,path,& mut temppath, cur, op);
if op==2 && max.key == *path{
return max;
}
cur.pop();
}
//deletion
//if we can get a word after deleting current character
if op > 0{
if pathclone.len()==1 && trie.value>0{
temp= Result{
value: trie.value,
key: cur.clone(),
};
if temp.value>max.value{
max = temp;
}
}
if pathclone.len()==2 &&op==2&& trie.value>0{
temp= Result{
value: trie.value,
key: cur.clone(),
};
if temp.value>max.value{
max = temp;
}
}
if pathclone.len()>1{
temppath = pathclone.clone();
temppath.remove(0);
curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar){
let counter = op-1;
cur.push(curchar);
<<<<<<< HEAD
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
if pathclone.len()>2 &&op==2{
temppath = pathclone.clone();
temppath.remove(0);
temppath.remove(0);
curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar){
let counter = 0;
cur.push(curchar);
<<<<<<< HEAD
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
}
}
//transpose
if pathclone.len()>1{
temppath = pathclone.clone();
curchar = temppath.remove(0);
temppath.insert(1,curchar);
curchar = temppath.remove(0);
cur.push(curchar);
if let Some(currtrie) = trie.children.get(&curchar) {
let counter = op-1;
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
}
cur.pop();
}
// replace
for key in trie.children.keys(){
temppath = pathclone.clone();
if temppath.len()>1{
temppath.remove(0);
}
else{temppath="".to_string();}
currtrie = trie.children.get(&key).unwrap();
cur.push(*key);
let counter = op-1;
temp = find(&currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
}
if op> 0{
//insertion
for key in trie.children.keys(){
cur.push(*key);
currtrie = trie.children.get(&key).unwrap();
let counter = op-1;
temp = find(&currtrie,path,pathclone,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
return max;
}
}
| identifier_body |
main.rs | <<<<<<< HEAD
/*
* word correction
*
* Reads text from the corpus text and
* Count the frequency of each word, then correct the candidate word in input text, output with the most frequetly
* used one
*
* Background
*
* The purpose of correct is to find possible corrections for misspelled words. It consists of two phases:
* The first phase is a training module, which consumes a corpus of correctly spelled words and counts the
* number of occurrences of each word. The second phase uses the results of the first to check individual words.
* Specifically, it checks whether each word is spelled correctly according to the training module and, if not,
* whether “small edits” can reach a variant that is correctly spelled.
*
* Given a word, an edit action is one of the following:
* the deletion of one letter;
*
* the transposition of two neighboring letters;
*
* the replacement of one letter with another letter; and
*
* the insertion of a letter at any position.
*
* In this context, Norvig suggests that “small edits” means the application of one edit action possibly
* followed by the application of a second one to the result of the first.
* Once the second part has generated all possible candidate for a potentially misspelled word,
* it picks the most frequently used one from the training corpus. If none of the candidates is a correct word,
* correct reports a failure.
*
* INPUT
*
* The input format is using two standard input consuming text. It could contain anything like words, numbers or some marks.
* writtten in ASCII.
*
* Hello world! Where are you now?
* www333
* github.com/rust
* !!!!!@@@@@@@
*
* Any non-alphabetic will be regarded as noise and will not be counted:
*
* 23232
* ++--!!!@@
* ...???''''""""
*
*
* The input terminates with end-of-file.
*
*
* OUTPUT
*
* The correct program consumes a training file on the command line and then reads words—one per line—from
* standard input. For each word from standard in, correct prints one line. The line consists of just the word
* if it is spelled correctly. If the word is not correctly spelled, correct prints the word and the best
* improvement or “-” if there aren’t any improvements found.
*
* hello
*
* hell, hello
*
* word
*
* wordl, world
*
* wor, world
*
* wo, word
*
* w, -
*
* ASSUMPTIONS
*
* - Words are reading according to the language's reading routines,
*
* - A word contained numbers will be count only the alphabetic character
* and ignore the numbers.
*
* - All the symbol, space and numbers will be considered as noise and ignored.
*
* - The input only terminate in the end-of-file, which is kind of unconvenient
* if you want to use console to input your data.
*
* - Once the word has been edited, we would pick the most frequently used one after
* two editions.
*
* - Except fot the normal edition, we add the corner case handler to accelerate the algorithm
*
*
*/
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
//#![allow(dead_code)]
//#![allow(unused_variables)]
use std::io::{stdin};
use std::env;
use std::collections::HashMap;
use trie::Trie;
use trie::Result;
use std::fs::File;
mod readinput;
mod trie;
mod counter;
fn main() {
let args: Vec<_> = env::args().collect();
if args.len() != 2 {
panic!("Missing input file"); //check training file
}
let f = File::open(&args[1]).expect("Error opening training file!");
//calculate the freq. of training file and stored in a hashmap
let dict: HashMap<String, usize> = counter::word_count(&counter::read_input(f));
//read input.txt and store in a vector
let check_words: Vec<String> = readinput::read_input(stdin());
//initialize the Trie
let mut t:Trie = Trie::new();
for (key,value) in &dict {
t.insert(&mut key.to_string(), *value);
}
//check each word in input.txt
for word in check_words {
let mut changeword = word.clone();
let mut changeword2 = word.clone();
let temp = find(&t, &mut changeword,&mut changeword2,&mut "".to_string(), 2).key;
if temp.is_empty(){
println!("{}, -", word);
}
else if temp == word{
println!("{}" , word);
}
else {
println!("{}, {}", word, temp);
}
}
}
/*
* This is the main search function for this program. We implement the DFS(Depth-First-Search)+Regression
* Algorithm to travel the whole trie and find all the candidate word, then pick the most frequently one.
*
* Input : trie: The trie made from corpus.txt, contains all the word
* path: The misspelled word
* pathclone: The remained string need to match
* cur: The trie path
* op: The edit times left for current matching
*
* The stop condition is that current trie path consist a word and match the input/edited word.
*
* We separate each character in the string and find the matched node in current trie. In the meantime,
* we also edit the word in case we can't find the original word in tries. But we set the original search
* with the highest priority.
*
* Output is a Struct with key: String the most frequently used word
* value: maximum frequency.
*/
fn find(trie: & Trie, | h: & String,pathclone: & mut String,cur: & mut String, op: i64)-> Result{
if pathclone.len()==0 && trie.value>0 {
return Result{
value: trie.value,
key: cur.clone(),
}
}
else{
let mut max= Result::new();
let mut temp: Result;
let mut temppath =pathclone.clone();
let mut currtrie :&Trie;
if pathclone.len()>0{
let mut curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar) {
cur.push(curchar);
max = find(currtrie,path,& mut temppath, cur, op);
if op==2 && max.key == *path{
return max;
}
cur.pop();
}
//deletion
//if we can get a word after deleting current character
if op > 0{
if pathclone.len()==1 && trie.value>0{
temp= Result{
value: trie.value,
key: cur.clone(),
};
if temp.value>max.value{
max = temp;
}
}
if pathclone.len()==2 &&op==2&& trie.value>0{
temp= Result{
value: trie.value,
key: cur.clone(),
};
if temp.value>max.value{
max = temp;
}
}
if pathclone.len()>1{
temppath = pathclone.clone();
temppath.remove(0);
curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar){
let counter = op-1;
cur.push(curchar);
<<<<<<< HEAD
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
if pathclone.len()>2 &&op==2{
temppath = pathclone.clone();
temppath.remove(0);
temppath.remove(0);
curchar = temppath.remove(0);
if let Some(currtrie) = trie.children.get(&curchar){
let counter = 0;
cur.push(curchar);
<<<<<<< HEAD
=======
>>>>>>> 7a6cdabd1d54f8ce2e0980b7aae2d0728eca04f0
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
}
}
//transpose
if pathclone.len()>1{
temppath = pathclone.clone();
curchar = temppath.remove(0);
temppath.insert(1,curchar);
curchar = temppath.remove(0);
cur.push(curchar);
if let Some(currtrie) = trie.children.get(&curchar) {
let counter = op-1;
temp = find(currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
}
cur.pop();
}
// replace
for key in trie.children.keys(){
temppath = pathclone.clone();
if temppath.len()>1{
temppath.remove(0);
}
else{temppath="".to_string();}
currtrie = trie.children.get(&key).unwrap();
cur.push(*key);
let counter = op-1;
temp = find(&currtrie,path,&mut temppath,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
}
if op> 0{
//insertion
for key in trie.children.keys(){
cur.push(*key);
currtrie = trie.children.get(&key).unwrap();
let counter = op-1;
temp = find(&currtrie,path,pathclone,cur,counter);
if temp.value>max.value{
max = temp;
}
cur.pop();
}
}
return max;
}
}
#[test]
fn test_find_edit_value(){
//use super::{trie,Result};
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bce".to_string(),&mut "bce".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "acd".to_string(),&mut "acd".to_string(),&mut "".to_string(), 2).value, 4);
assert_eq!(find(&t, &mut "acd".to_string(),&mut "acd".to_string(),&mut "".to_string(), 2).key, "acd");
assert_eq!(find(&t, &mut "".to_string(),&mut "".to_string(),&mut "".to_string(), 2).key, "gg");
assert_eq!(find(&t, &mut "cbdca".to_string(),&mut "cbdca".to_string(),&mut "".to_string(), 2).value, 3);
}
#[test]
fn test_find_replace_value(){
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bed".to_string(),&mut "bed".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "b".to_string(),&mut "b".to_string(),&mut "".to_string(), 2).value, 100);
}
#[test]
fn test_find_delete_value(){
let mut t = Trie::new();
t.insert(&mut "acd".to_string(), 4);
t.insert(&mut "bce".to_string(), 5);
t.insert(&mut "cbdca".to_string(),3);
t.insert(&mut "gg".to_string(),100);
assert_eq!(find(&t, &mut "bcdea".to_string(),&mut "bed".to_string(),&mut "".to_string(), 2).value, 5);
assert_eq!(find(&t, &mut "ggag".to_string(),&mut "b".to_string(),&mut "".to_string(), 2).value, 100);
} | pat | identifier_name |
sheet.py | import logging
from datetime import datetime
from typing import (
Optional,
Dict,
List,
ClassVar,
Generic,
Type,
TypeVar,
Tuple,
Any,
Union,
cast,
TYPE_CHECKING,
)
import attr
from marshmallow import fields, pre_load
from simple_smartsheet import config
from simple_smartsheet import exceptions
from simple_smartsheet import utils
from simple_smartsheet.types import IndexKeysDict, IndexesType
from simple_smartsheet.models.base import Schema, CoreSchema, Object, CoreObject
from simple_smartsheet.models.cell import Cell
from simple_smartsheet.models.column import Column, ColumnSchema, ColumnType
from simple_smartsheet.models.row import Row, RowSchema, _RowBase
if TYPE_CHECKING:
try:
import pandas as pd
except ImportError:
pass
logger = logging.getLogger(__name__)
class UserSettingsSchema(Schema):
critical_path_enabled = fields.Bool(data_key="criticalPathEnabled")
display_summary_tasks = fields.Bool(data_key="displaySummaryTasks")
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class UserSettings(Object):
critical_path_enabled: bool
display_summary_tasks: bool
class UserPermissionsSchema(Schema):
summary_permissions = fields.Str(data_key="summaryPermissions")
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class UserPermissions(Object):
summary_permissions: str
class WorkspaceSchema(Schema):
id = fields.Int()
name = fields.Str()
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class Workspace(Object):
id: int
name: str
class SheetSchema(CoreSchema):
"""Marshmallow Schema for Smartsheet Sheet object
Additional details about fields can be found here:
http://smartsheet-platform.github.io/api-docs/#sheets
"""
id = fields.Int()
name = fields.Str()
access_level = fields.Str(data_key="accessLevel")
permalink = fields.Str()
favorite = fields.Bool()
created_at = fields.DateTime(data_key="createdAt")
modified_at = fields.DateTime(data_key="modifiedAt")
version = fields.Int()
total_row_count = fields.Int(data_key="totalRowCount")
effective_attachment_options = fields.List(
fields.Str(), data_key="effectiveAttachmentOptions"
)
gantt_enabled = fields.Bool(data_key="ganttEnabled")
read_only = fields.Bool(data_key="readOnly")
dependencies_enabled = fields.Bool(data_key="dependenciesEnabled") | cell_image_upload_enabled = fields.Bool(data_key="cellImageUploadEnabled")
user_settings = fields.Nested(UserSettingsSchema, data_key="userSettings")
user_permissions = fields.Nested(UserPermissionsSchema, data_key="userPermissions")
has_summary_fields = fields.Bool(data_key="hasSummaryFields")
is_multi_picklist_enabled = fields.Bool(data_key="isMultiPicklistEnabled")
columns = fields.List(fields.Nested(ColumnSchema))
rows = fields.List(fields.Nested(RowSchema))
workspace = fields.Nested(WorkspaceSchema)
class Meta:
unknown = utils.get_unknown_field_handling(config.STRICT_VALIDATION)
ordered = True
@pre_load
def update_context(self, data, many: bool, **kwargs):
self.context["column_id_to_type"] = {}
return data
RowT = TypeVar("RowT", bound=_RowBase[Any])
ColumnT = TypeVar("ColumnT", bound=Column)
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class _SheetBase(CoreObject, Generic[RowT, ColumnT]):
"""Represents Smartsheet Sheet object
Additional details about fields can be found here:
http://smartsheet-platform.github.io/api-docs/#sheets
Extra attributes:
indexes: contains all built indices
"""
name: str
id: Optional[int] = None
access_level: Optional[str] = None
permalink: Optional[str] = None
favorite: Optional[bool] = None
created_at: Optional[datetime] = None
modified_at: Optional[datetime] = None
version: Optional[int] = None
total_row_count: Optional[int] = None
effective_attachment_options: List[str] = attr.Factory(list)
gantt_enabled: Optional[bool] = None
read_only: Optional[bool] = None
dependencies_enabled: Optional[bool] = None
resource_management_enabled: Optional[bool] = None
cell_image_upload_enabled: Optional[bool] = None
user_settings: Optional[UserSettings] = None
user_permissions: Optional[UserPermissions] = None
has_summary_fields: Optional[bool] = None
is_multi_picklist_enabled: Optional[bool] = None
columns: List[ColumnT] = attr.Factory(list)
rows: List[RowT] = attr.Factory(list)
workspace: Optional[Workspace] = None
_row_num_to_row: Dict[int, RowT] = attr.ib(attr.Factory(dict), init=False)
_row_id_to_row: Dict[int, RowT] = attr.ib(attr.Factory(dict), init=False)
_column_title_to_column: Dict[str, ColumnT] = attr.ib(
attr.Factory(dict), init=False
)
_column_id_to_column: Dict[int, ColumnT] = attr.ib(attr.Factory(dict), init=False)
indexes: IndexesType = attr.ib(attr.Factory(dict), init=False)
_schema: ClassVar[Type[SheetSchema]] = SheetSchema
def __attrs_post_init__(self) -> None:
self._update_column_lookup()
self._update_row_cell_lookup()
def _update_column_lookup(self) -> None:
self._column_title_to_column.clear()
self._column_id_to_column.clear()
for column in self.columns:
column_id = column._id
if column_id is None:
continue
self._column_id_to_column[column_id] = column
column_title = column.title
if column_title is None:
continue
if column_title in self._column_title_to_column:
logger.info(
"Column with the title %s is already present in the index",
column_title,
)
self._column_title_to_column[column_title] = column
def _update_row_cell_lookup(self) -> None:
self._row_num_to_row.clear()
self._row_id_to_row.clear()
for row in self.rows:
if row.num:
self._row_num_to_row[row.num] = row
if row.id:
self._row_id_to_row[row.id] = row
row._update_cell_lookup(self)
def build_index(self, indexes: List[IndexKeysDict]) -> None:
for index in indexes:
columns = index["columns"]
unique = index["unique"]
self.indexes[columns] = {"index": {}, "unique": unique}
for row in self.rows:
row._update_index(self)
def get_row(
self,
row_num: Optional[int] = None,
row_id: Optional[int] = None,
filter: Optional[Dict[str, Any]] = None,
) -> Optional[RowT]:
"""Returns Row object by row number or ID
Either row_num or row_id must be provided
Args:
row_num: row number
row_id: row id
filter: a dictionary with column title to value
mappings in the same order as index was built. Index must be unique.
Returns:
Row object
"""
if row_num is not None:
return self._row_num_to_row.get(row_num)
elif row_id is not None:
return self._row_id_to_row.get(row_id)
elif filter is not None:
columns, query = zip(*sorted(filter.items()))
index_dict = self.indexes.get(columns)
if index_dict is None:
raise exceptions.SmartsheetIndexNotFound(
f"Index {columns} is not found, "
f"build it first with build_index method"
)
unique = index_dict["unique"]
if not unique:
raise exceptions.SmartsheetIndexNotUnique(
f"Index {columns} is non-unique and lookup will potentially "
"return multiple rows, use get_rows method instead"
)
index = cast(Dict[Tuple[Any, ...], RowT], index_dict["index"])
return index[query]
else:
raise ValueError("Either row_num or row_id argument should be provided")
def get_rows(self, filter: Dict[str, Any]) -> List[RowT]:
"""Returns Row objects by index query
Args:
filter: a dictionary or ordered dictionary with column title to value
mappings in the same order as index was built. Index must be non-unique.
Returns:
Row object
"""
columns, query = zip(*sorted(filter.items()))
index_dict = self.indexes.get(columns)
if index_dict is None:
raise exceptions.SmartsheetIndexNotFound(
f"Index {columns} is not found, "
f"build it first with build_index method"
)
unique = index_dict["unique"]
if unique:
unique_index = cast(Dict[Tuple[Any, ...], RowT], index_dict["index"])
result = unique_index.get(query)
if result is not None:
return [result]
else:
return []
else:
non_unique_index = cast(
Dict[Tuple[Any, ...], List[RowT]], index_dict["index"]
)
return non_unique_index.get(query, [])
def get_column(
self, column_title: Optional[str] = None, column_id: Optional[int] = None
) -> ColumnT:
"""Returns Column object by column title or ID
Either column_title or column_id must be provided
Args:
column_title: column title (case-sensitive)
column_id: column id
Returns:
Column object
"""
if column_title is not None:
return self._column_title_to_column[column_title]
elif column_id is not None:
return self._column_id_to_column[column_id]
else:
raise ValueError(
"Either column_title or column_id argument should be provided"
)
def as_list(self) -> List[Dict[str, Union[float, str, datetime, None]]]:
"""Returns a list of dictionaries with column titles and cell values"""
return [row.as_dict() for row in self.rows]
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class Sheet(_SheetBase[Row, Column]):
columns: List[Column] = cast(List[Column], attr.Factory(list))
rows: List[Row] = attr.Factory(list)
def make_cell(self, column_title: str, field_value: Any) -> Cell:
"""Creates a Cell object for an existing column
Args:
column_title: title of an existing column
field_value: value of the cell
Returns:
Cell object
"""
column = self.get_column(column_title)
if column is None:
raise ValueError(
"A column with the title %s does not exist in this sheet", column_title
)
if column.type == ColumnType.MULTI_PICKLIST:
if not column.id:
raise ValueError(f"Column {column!r} does not have ID")
cell = Cell.create_multi_picklist(column_id=column.id, values=field_value)
else:
cell = Cell(column_id=column.id, value=field_value)
return cell
def make_cells(self, fields: Dict[str, Any]) -> List[Cell]:
"""Create a list of Cell objects from dictionary
Args:
fields: dictionary where key is a column title and value is a cell value
Returns:
list of Cell objects
"""
result: List[Cell] = []
for column_title, field_value in fields.items():
result.append(self.make_cell(column_title, field_value))
return result
def as_list(self) -> List[Dict[str, Union[float, str, datetime, None]]]:
"""Returns a list of dictionaries with column titles and cell values"""
return [row.as_dict() for row in self.rows]
def as_dataframe(self) -> "pd.DataFrame":
"""Return the sheet as pandas DataFrame
Columns will includes row id, row number and all columns from the sheet
Pandas must be installed either separately or as extras:
`pip install simple-smartsheet[pandas]`
"""
import pandas as pd
df = pd.DataFrame([row.as_series() for row in self.rows])
return df | resource_management_enabled = fields.Bool(data_key="resourceManagementEnabled") | random_line_split |
sheet.py | import logging
from datetime import datetime
from typing import (
Optional,
Dict,
List,
ClassVar,
Generic,
Type,
TypeVar,
Tuple,
Any,
Union,
cast,
TYPE_CHECKING,
)
import attr
from marshmallow import fields, pre_load
from simple_smartsheet import config
from simple_smartsheet import exceptions
from simple_smartsheet import utils
from simple_smartsheet.types import IndexKeysDict, IndexesType
from simple_smartsheet.models.base import Schema, CoreSchema, Object, CoreObject
from simple_smartsheet.models.cell import Cell
from simple_smartsheet.models.column import Column, ColumnSchema, ColumnType
from simple_smartsheet.models.row import Row, RowSchema, _RowBase
if TYPE_CHECKING:
try:
import pandas as pd
except ImportError:
pass
logger = logging.getLogger(__name__)
class UserSettingsSchema(Schema):
critical_path_enabled = fields.Bool(data_key="criticalPathEnabled")
display_summary_tasks = fields.Bool(data_key="displaySummaryTasks")
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class UserSettings(Object):
critical_path_enabled: bool
display_summary_tasks: bool
class UserPermissionsSchema(Schema):
summary_permissions = fields.Str(data_key="summaryPermissions")
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class UserPermissions(Object):
summary_permissions: str
class WorkspaceSchema(Schema):
id = fields.Int()
name = fields.Str()
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class Workspace(Object):
id: int
name: str
class SheetSchema(CoreSchema):
"""Marshmallow Schema for Smartsheet Sheet object
Additional details about fields can be found here:
http://smartsheet-platform.github.io/api-docs/#sheets
"""
id = fields.Int()
name = fields.Str()
access_level = fields.Str(data_key="accessLevel")
permalink = fields.Str()
favorite = fields.Bool()
created_at = fields.DateTime(data_key="createdAt")
modified_at = fields.DateTime(data_key="modifiedAt")
version = fields.Int()
total_row_count = fields.Int(data_key="totalRowCount")
effective_attachment_options = fields.List(
fields.Str(), data_key="effectiveAttachmentOptions"
)
gantt_enabled = fields.Bool(data_key="ganttEnabled")
read_only = fields.Bool(data_key="readOnly")
dependencies_enabled = fields.Bool(data_key="dependenciesEnabled")
resource_management_enabled = fields.Bool(data_key="resourceManagementEnabled")
cell_image_upload_enabled = fields.Bool(data_key="cellImageUploadEnabled")
user_settings = fields.Nested(UserSettingsSchema, data_key="userSettings")
user_permissions = fields.Nested(UserPermissionsSchema, data_key="userPermissions")
has_summary_fields = fields.Bool(data_key="hasSummaryFields")
is_multi_picklist_enabled = fields.Bool(data_key="isMultiPicklistEnabled")
columns = fields.List(fields.Nested(ColumnSchema))
rows = fields.List(fields.Nested(RowSchema))
workspace = fields.Nested(WorkspaceSchema)
class Meta:
unknown = utils.get_unknown_field_handling(config.STRICT_VALIDATION)
ordered = True
@pre_load
def update_context(self, data, many: bool, **kwargs):
self.context["column_id_to_type"] = {}
return data
RowT = TypeVar("RowT", bound=_RowBase[Any])
ColumnT = TypeVar("ColumnT", bound=Column)
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class _SheetBase(CoreObject, Generic[RowT, ColumnT]):
"""Represents Smartsheet Sheet object
Additional details about fields can be found here:
http://smartsheet-platform.github.io/api-docs/#sheets
Extra attributes:
indexes: contains all built indices
"""
name: str
id: Optional[int] = None
access_level: Optional[str] = None
permalink: Optional[str] = None
favorite: Optional[bool] = None
created_at: Optional[datetime] = None
modified_at: Optional[datetime] = None
version: Optional[int] = None
total_row_count: Optional[int] = None
effective_attachment_options: List[str] = attr.Factory(list)
gantt_enabled: Optional[bool] = None
read_only: Optional[bool] = None
dependencies_enabled: Optional[bool] = None
resource_management_enabled: Optional[bool] = None
cell_image_upload_enabled: Optional[bool] = None
user_settings: Optional[UserSettings] = None
user_permissions: Optional[UserPermissions] = None
has_summary_fields: Optional[bool] = None
is_multi_picklist_enabled: Optional[bool] = None
columns: List[ColumnT] = attr.Factory(list)
rows: List[RowT] = attr.Factory(list)
workspace: Optional[Workspace] = None
_row_num_to_row: Dict[int, RowT] = attr.ib(attr.Factory(dict), init=False)
_row_id_to_row: Dict[int, RowT] = attr.ib(attr.Factory(dict), init=False)
_column_title_to_column: Dict[str, ColumnT] = attr.ib(
attr.Factory(dict), init=False
)
_column_id_to_column: Dict[int, ColumnT] = attr.ib(attr.Factory(dict), init=False)
indexes: IndexesType = attr.ib(attr.Factory(dict), init=False)
_schema: ClassVar[Type[SheetSchema]] = SheetSchema
def __attrs_post_init__(self) -> None:
self._update_column_lookup()
self._update_row_cell_lookup()
def _update_column_lookup(self) -> None:
self._column_title_to_column.clear()
self._column_id_to_column.clear()
for column in self.columns:
column_id = column._id
if column_id is None:
continue
self._column_id_to_column[column_id] = column
column_title = column.title
if column_title is None:
continue
if column_title in self._column_title_to_column:
logger.info(
"Column with the title %s is already present in the index",
column_title,
)
self._column_title_to_column[column_title] = column
def _update_row_cell_lookup(self) -> None:
self._row_num_to_row.clear()
self._row_id_to_row.clear()
for row in self.rows:
if row.num:
self._row_num_to_row[row.num] = row
if row.id:
self._row_id_to_row[row.id] = row
row._update_cell_lookup(self)
def build_index(self, indexes: List[IndexKeysDict]) -> None:
for index in indexes:
|
for row in self.rows:
row._update_index(self)
def get_row(
self,
row_num: Optional[int] = None,
row_id: Optional[int] = None,
filter: Optional[Dict[str, Any]] = None,
) -> Optional[RowT]:
"""Returns Row object by row number or ID
Either row_num or row_id must be provided
Args:
row_num: row number
row_id: row id
filter: a dictionary with column title to value
mappings in the same order as index was built. Index must be unique.
Returns:
Row object
"""
if row_num is not None:
return self._row_num_to_row.get(row_num)
elif row_id is not None:
return self._row_id_to_row.get(row_id)
elif filter is not None:
columns, query = zip(*sorted(filter.items()))
index_dict = self.indexes.get(columns)
if index_dict is None:
raise exceptions.SmartsheetIndexNotFound(
f"Index {columns} is not found, "
f"build it first with build_index method"
)
unique = index_dict["unique"]
if not unique:
raise exceptions.SmartsheetIndexNotUnique(
f"Index {columns} is non-unique and lookup will potentially "
"return multiple rows, use get_rows method instead"
)
index = cast(Dict[Tuple[Any, ...], RowT], index_dict["index"])
return index[query]
else:
raise ValueError("Either row_num or row_id argument should be provided")
def get_rows(self, filter: Dict[str, Any]) -> List[RowT]:
"""Returns Row objects by index query
Args:
filter: a dictionary or ordered dictionary with column title to value
mappings in the same order as index was built. Index must be non-unique.
Returns:
Row object
"""
columns, query = zip(*sorted(filter.items()))
index_dict = self.indexes.get(columns)
if index_dict is None:
raise exceptions.SmartsheetIndexNotFound(
f"Index {columns} is not found, "
f"build it first with build_index method"
)
unique = index_dict["unique"]
if unique:
unique_index = cast(Dict[Tuple[Any, ...], RowT], index_dict["index"])
result = unique_index.get(query)
if result is not None:
return [result]
else:
return []
else:
non_unique_index = cast(
Dict[Tuple[Any, ...], List[RowT]], index_dict["index"]
)
return non_unique_index.get(query, [])
def get_column(
self, column_title: Optional[str] = None, column_id: Optional[int] = None
) -> ColumnT:
"""Returns Column object by column title or ID
Either column_title or column_id must be provided
Args:
column_title: column title (case-sensitive)
column_id: column id
Returns:
Column object
"""
if column_title is not None:
return self._column_title_to_column[column_title]
elif column_id is not None:
return self._column_id_to_column[column_id]
else:
raise ValueError(
"Either column_title or column_id argument should be provided"
)
def as_list(self) -> List[Dict[str, Union[float, str, datetime, None]]]:
"""Returns a list of dictionaries with column titles and cell values"""
return [row.as_dict() for row in self.rows]
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class Sheet(_SheetBase[Row, Column]):
columns: List[Column] = cast(List[Column], attr.Factory(list))
rows: List[Row] = attr.Factory(list)
def make_cell(self, column_title: str, field_value: Any) -> Cell:
"""Creates a Cell object for an existing column
Args:
column_title: title of an existing column
field_value: value of the cell
Returns:
Cell object
"""
column = self.get_column(column_title)
if column is None:
raise ValueError(
"A column with the title %s does not exist in this sheet", column_title
)
if column.type == ColumnType.MULTI_PICKLIST:
if not column.id:
raise ValueError(f"Column {column!r} does not have ID")
cell = Cell.create_multi_picklist(column_id=column.id, values=field_value)
else:
cell = Cell(column_id=column.id, value=field_value)
return cell
def make_cells(self, fields: Dict[str, Any]) -> List[Cell]:
"""Create a list of Cell objects from dictionary
Args:
fields: dictionary where key is a column title and value is a cell value
Returns:
list of Cell objects
"""
result: List[Cell] = []
for column_title, field_value in fields.items():
result.append(self.make_cell(column_title, field_value))
return result
def as_list(self) -> List[Dict[str, Union[float, str, datetime, None]]]:
"""Returns a list of dictionaries with column titles and cell values"""
return [row.as_dict() for row in self.rows]
def as_dataframe(self) -> "pd.DataFrame":
"""Return the sheet as pandas DataFrame
Columns will includes row id, row number and all columns from the sheet
Pandas must be installed either separately or as extras:
`pip install simple-smartsheet[pandas]`
"""
import pandas as pd
df = pd.DataFrame([row.as_series() for row in self.rows])
return df
| columns = index["columns"]
unique = index["unique"]
self.indexes[columns] = {"index": {}, "unique": unique} | conditional_block |
sheet.py | import logging
from datetime import datetime
from typing import (
Optional,
Dict,
List,
ClassVar,
Generic,
Type,
TypeVar,
Tuple,
Any,
Union,
cast,
TYPE_CHECKING,
)
import attr
from marshmallow import fields, pre_load
from simple_smartsheet import config
from simple_smartsheet import exceptions
from simple_smartsheet import utils
from simple_smartsheet.types import IndexKeysDict, IndexesType
from simple_smartsheet.models.base import Schema, CoreSchema, Object, CoreObject
from simple_smartsheet.models.cell import Cell
from simple_smartsheet.models.column import Column, ColumnSchema, ColumnType
from simple_smartsheet.models.row import Row, RowSchema, _RowBase
if TYPE_CHECKING:
try:
import pandas as pd
except ImportError:
pass
logger = logging.getLogger(__name__)
class UserSettingsSchema(Schema):
critical_path_enabled = fields.Bool(data_key="criticalPathEnabled")
display_summary_tasks = fields.Bool(data_key="displaySummaryTasks")
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class UserSettings(Object):
critical_path_enabled: bool
display_summary_tasks: bool
class UserPermissionsSchema(Schema):
summary_permissions = fields.Str(data_key="summaryPermissions")
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class UserPermissions(Object):
summary_permissions: str
class WorkspaceSchema(Schema):
id = fields.Int()
name = fields.Str()
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class Workspace(Object):
id: int
name: str
class SheetSchema(CoreSchema):
"""Marshmallow Schema for Smartsheet Sheet object
Additional details about fields can be found here:
http://smartsheet-platform.github.io/api-docs/#sheets
"""
id = fields.Int()
name = fields.Str()
access_level = fields.Str(data_key="accessLevel")
permalink = fields.Str()
favorite = fields.Bool()
created_at = fields.DateTime(data_key="createdAt")
modified_at = fields.DateTime(data_key="modifiedAt")
version = fields.Int()
total_row_count = fields.Int(data_key="totalRowCount")
effective_attachment_options = fields.List(
fields.Str(), data_key="effectiveAttachmentOptions"
)
gantt_enabled = fields.Bool(data_key="ganttEnabled")
read_only = fields.Bool(data_key="readOnly")
dependencies_enabled = fields.Bool(data_key="dependenciesEnabled")
resource_management_enabled = fields.Bool(data_key="resourceManagementEnabled")
cell_image_upload_enabled = fields.Bool(data_key="cellImageUploadEnabled")
user_settings = fields.Nested(UserSettingsSchema, data_key="userSettings")
user_permissions = fields.Nested(UserPermissionsSchema, data_key="userPermissions")
has_summary_fields = fields.Bool(data_key="hasSummaryFields")
is_multi_picklist_enabled = fields.Bool(data_key="isMultiPicklistEnabled")
columns = fields.List(fields.Nested(ColumnSchema))
rows = fields.List(fields.Nested(RowSchema))
workspace = fields.Nested(WorkspaceSchema)
class Meta:
unknown = utils.get_unknown_field_handling(config.STRICT_VALIDATION)
ordered = True
@pre_load
def update_context(self, data, many: bool, **kwargs):
self.context["column_id_to_type"] = {}
return data
RowT = TypeVar("RowT", bound=_RowBase[Any])
ColumnT = TypeVar("ColumnT", bound=Column)
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class _SheetBase(CoreObject, Generic[RowT, ColumnT]):
"""Represents Smartsheet Sheet object
Additional details about fields can be found here:
http://smartsheet-platform.github.io/api-docs/#sheets
Extra attributes:
indexes: contains all built indices
"""
name: str
id: Optional[int] = None
access_level: Optional[str] = None
permalink: Optional[str] = None
favorite: Optional[bool] = None
created_at: Optional[datetime] = None
modified_at: Optional[datetime] = None
version: Optional[int] = None
total_row_count: Optional[int] = None
effective_attachment_options: List[str] = attr.Factory(list)
gantt_enabled: Optional[bool] = None
read_only: Optional[bool] = None
dependencies_enabled: Optional[bool] = None
resource_management_enabled: Optional[bool] = None
cell_image_upload_enabled: Optional[bool] = None
user_settings: Optional[UserSettings] = None
user_permissions: Optional[UserPermissions] = None
has_summary_fields: Optional[bool] = None
is_multi_picklist_enabled: Optional[bool] = None
columns: List[ColumnT] = attr.Factory(list)
rows: List[RowT] = attr.Factory(list)
workspace: Optional[Workspace] = None
_row_num_to_row: Dict[int, RowT] = attr.ib(attr.Factory(dict), init=False)
_row_id_to_row: Dict[int, RowT] = attr.ib(attr.Factory(dict), init=False)
_column_title_to_column: Dict[str, ColumnT] = attr.ib(
attr.Factory(dict), init=False
)
_column_id_to_column: Dict[int, ColumnT] = attr.ib(attr.Factory(dict), init=False)
indexes: IndexesType = attr.ib(attr.Factory(dict), init=False)
_schema: ClassVar[Type[SheetSchema]] = SheetSchema
def __attrs_post_init__(self) -> None:
self._update_column_lookup()
self._update_row_cell_lookup()
def _update_column_lookup(self) -> None:
self._column_title_to_column.clear()
self._column_id_to_column.clear()
for column in self.columns:
column_id = column._id
if column_id is None:
continue
self._column_id_to_column[column_id] = column
column_title = column.title
if column_title is None:
continue
if column_title in self._column_title_to_column:
logger.info(
"Column with the title %s is already present in the index",
column_title,
)
self._column_title_to_column[column_title] = column
def _update_row_cell_lookup(self) -> None:
self._row_num_to_row.clear()
self._row_id_to_row.clear()
for row in self.rows:
if row.num:
self._row_num_to_row[row.num] = row
if row.id:
self._row_id_to_row[row.id] = row
row._update_cell_lookup(self)
def build_index(self, indexes: List[IndexKeysDict]) -> None:
for index in indexes:
columns = index["columns"]
unique = index["unique"]
self.indexes[columns] = {"index": {}, "unique": unique}
for row in self.rows:
row._update_index(self)
def get_row(
self,
row_num: Optional[int] = None,
row_id: Optional[int] = None,
filter: Optional[Dict[str, Any]] = None,
) -> Optional[RowT]:
"""Returns Row object by row number or ID
Either row_num or row_id must be provided
Args:
row_num: row number
row_id: row id
filter: a dictionary with column title to value
mappings in the same order as index was built. Index must be unique.
Returns:
Row object
"""
if row_num is not None:
return self._row_num_to_row.get(row_num)
elif row_id is not None:
return self._row_id_to_row.get(row_id)
elif filter is not None:
columns, query = zip(*sorted(filter.items()))
index_dict = self.indexes.get(columns)
if index_dict is None:
raise exceptions.SmartsheetIndexNotFound(
f"Index {columns} is not found, "
f"build it first with build_index method"
)
unique = index_dict["unique"]
if not unique:
raise exceptions.SmartsheetIndexNotUnique(
f"Index {columns} is non-unique and lookup will potentially "
"return multiple rows, use get_rows method instead"
)
index = cast(Dict[Tuple[Any, ...], RowT], index_dict["index"])
return index[query]
else:
raise ValueError("Either row_num or row_id argument should be provided")
def get_rows(self, filter: Dict[str, Any]) -> List[RowT]:
"""Returns Row objects by index query
Args:
filter: a dictionary or ordered dictionary with column title to value
mappings in the same order as index was built. Index must be non-unique.
Returns:
Row object
"""
columns, query = zip(*sorted(filter.items()))
index_dict = self.indexes.get(columns)
if index_dict is None:
raise exceptions.SmartsheetIndexNotFound(
f"Index {columns} is not found, "
f"build it first with build_index method"
)
unique = index_dict["unique"]
if unique:
unique_index = cast(Dict[Tuple[Any, ...], RowT], index_dict["index"])
result = unique_index.get(query)
if result is not None:
return [result]
else:
return []
else:
non_unique_index = cast(
Dict[Tuple[Any, ...], List[RowT]], index_dict["index"]
)
return non_unique_index.get(query, [])
def get_column(
self, column_title: Optional[str] = None, column_id: Optional[int] = None
) -> ColumnT:
"""Returns Column object by column title or ID
Either column_title or column_id must be provided
Args:
column_title: column title (case-sensitive)
column_id: column id
Returns:
Column object
"""
if column_title is not None:
return self._column_title_to_column[column_title]
elif column_id is not None:
return self._column_id_to_column[column_id]
else:
raise ValueError(
"Either column_title or column_id argument should be provided"
)
def as_list(self) -> List[Dict[str, Union[float, str, datetime, None]]]:
"""Returns a list of dictionaries with column titles and cell values"""
return [row.as_dict() for row in self.rows]
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class Sheet(_SheetBase[Row, Column]):
columns: List[Column] = cast(List[Column], attr.Factory(list))
rows: List[Row] = attr.Factory(list)
def make_cell(self, column_title: str, field_value: Any) -> Cell:
"""Creates a Cell object for an existing column
Args:
column_title: title of an existing column
field_value: value of the cell
Returns:
Cell object
"""
column = self.get_column(column_title)
if column is None:
raise ValueError(
"A column with the title %s does not exist in this sheet", column_title
)
if column.type == ColumnType.MULTI_PICKLIST:
if not column.id:
raise ValueError(f"Column {column!r} does not have ID")
cell = Cell.create_multi_picklist(column_id=column.id, values=field_value)
else:
cell = Cell(column_id=column.id, value=field_value)
return cell
def make_cells(self, fields: Dict[str, Any]) -> List[Cell]:
"""Create a list of Cell objects from dictionary
Args:
fields: dictionary where key is a column title and value is a cell value
Returns:
list of Cell objects
"""
result: List[Cell] = []
for column_title, field_value in fields.items():
result.append(self.make_cell(column_title, field_value))
return result
def as_list(self) -> List[Dict[str, Union[float, str, datetime, None]]]:
"""Returns a list of dictionaries with column titles and cell values"""
return [row.as_dict() for row in self.rows]
def | (self) -> "pd.DataFrame":
"""Return the sheet as pandas DataFrame
Columns will includes row id, row number and all columns from the sheet
Pandas must be installed either separately or as extras:
`pip install simple-smartsheet[pandas]`
"""
import pandas as pd
df = pd.DataFrame([row.as_series() for row in self.rows])
return df
| as_dataframe | identifier_name |
sheet.py | import logging
from datetime import datetime
from typing import (
Optional,
Dict,
List,
ClassVar,
Generic,
Type,
TypeVar,
Tuple,
Any,
Union,
cast,
TYPE_CHECKING,
)
import attr
from marshmallow import fields, pre_load
from simple_smartsheet import config
from simple_smartsheet import exceptions
from simple_smartsheet import utils
from simple_smartsheet.types import IndexKeysDict, IndexesType
from simple_smartsheet.models.base import Schema, CoreSchema, Object, CoreObject
from simple_smartsheet.models.cell import Cell
from simple_smartsheet.models.column import Column, ColumnSchema, ColumnType
from simple_smartsheet.models.row import Row, RowSchema, _RowBase
if TYPE_CHECKING:
try:
import pandas as pd
except ImportError:
pass
logger = logging.getLogger(__name__)
class UserSettingsSchema(Schema):
critical_path_enabled = fields.Bool(data_key="criticalPathEnabled")
display_summary_tasks = fields.Bool(data_key="displaySummaryTasks")
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class UserSettings(Object):
critical_path_enabled: bool
display_summary_tasks: bool
class UserPermissionsSchema(Schema):
summary_permissions = fields.Str(data_key="summaryPermissions")
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class UserPermissions(Object):
summary_permissions: str
class WorkspaceSchema(Schema):
id = fields.Int()
name = fields.Str()
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class Workspace(Object):
id: int
name: str
class SheetSchema(CoreSchema):
"""Marshmallow Schema for Smartsheet Sheet object
Additional details about fields can be found here:
http://smartsheet-platform.github.io/api-docs/#sheets
"""
id = fields.Int()
name = fields.Str()
access_level = fields.Str(data_key="accessLevel")
permalink = fields.Str()
favorite = fields.Bool()
created_at = fields.DateTime(data_key="createdAt")
modified_at = fields.DateTime(data_key="modifiedAt")
version = fields.Int()
total_row_count = fields.Int(data_key="totalRowCount")
effective_attachment_options = fields.List(
fields.Str(), data_key="effectiveAttachmentOptions"
)
gantt_enabled = fields.Bool(data_key="ganttEnabled")
read_only = fields.Bool(data_key="readOnly")
dependencies_enabled = fields.Bool(data_key="dependenciesEnabled")
resource_management_enabled = fields.Bool(data_key="resourceManagementEnabled")
cell_image_upload_enabled = fields.Bool(data_key="cellImageUploadEnabled")
user_settings = fields.Nested(UserSettingsSchema, data_key="userSettings")
user_permissions = fields.Nested(UserPermissionsSchema, data_key="userPermissions")
has_summary_fields = fields.Bool(data_key="hasSummaryFields")
is_multi_picklist_enabled = fields.Bool(data_key="isMultiPicklistEnabled")
columns = fields.List(fields.Nested(ColumnSchema))
rows = fields.List(fields.Nested(RowSchema))
workspace = fields.Nested(WorkspaceSchema)
class Meta:
unknown = utils.get_unknown_field_handling(config.STRICT_VALIDATION)
ordered = True
@pre_load
def update_context(self, data, many: bool, **kwargs):
self.context["column_id_to_type"] = {}
return data
RowT = TypeVar("RowT", bound=_RowBase[Any])
ColumnT = TypeVar("ColumnT", bound=Column)
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class _SheetBase(CoreObject, Generic[RowT, ColumnT]):
"""Represents Smartsheet Sheet object
Additional details about fields can be found here:
http://smartsheet-platform.github.io/api-docs/#sheets
Extra attributes:
indexes: contains all built indices
"""
name: str
id: Optional[int] = None
access_level: Optional[str] = None
permalink: Optional[str] = None
favorite: Optional[bool] = None
created_at: Optional[datetime] = None
modified_at: Optional[datetime] = None
version: Optional[int] = None
total_row_count: Optional[int] = None
effective_attachment_options: List[str] = attr.Factory(list)
gantt_enabled: Optional[bool] = None
read_only: Optional[bool] = None
dependencies_enabled: Optional[bool] = None
resource_management_enabled: Optional[bool] = None
cell_image_upload_enabled: Optional[bool] = None
user_settings: Optional[UserSettings] = None
user_permissions: Optional[UserPermissions] = None
has_summary_fields: Optional[bool] = None
is_multi_picklist_enabled: Optional[bool] = None
columns: List[ColumnT] = attr.Factory(list)
rows: List[RowT] = attr.Factory(list)
workspace: Optional[Workspace] = None
_row_num_to_row: Dict[int, RowT] = attr.ib(attr.Factory(dict), init=False)
_row_id_to_row: Dict[int, RowT] = attr.ib(attr.Factory(dict), init=False)
_column_title_to_column: Dict[str, ColumnT] = attr.ib(
attr.Factory(dict), init=False
)
_column_id_to_column: Dict[int, ColumnT] = attr.ib(attr.Factory(dict), init=False)
indexes: IndexesType = attr.ib(attr.Factory(dict), init=False)
_schema: ClassVar[Type[SheetSchema]] = SheetSchema
def __attrs_post_init__(self) -> None:
self._update_column_lookup()
self._update_row_cell_lookup()
def _update_column_lookup(self) -> None:
self._column_title_to_column.clear()
self._column_id_to_column.clear()
for column in self.columns:
column_id = column._id
if column_id is None:
continue
self._column_id_to_column[column_id] = column
column_title = column.title
if column_title is None:
continue
if column_title in self._column_title_to_column:
logger.info(
"Column with the title %s is already present in the index",
column_title,
)
self._column_title_to_column[column_title] = column
def _update_row_cell_lookup(self) -> None:
self._row_num_to_row.clear()
self._row_id_to_row.clear()
for row in self.rows:
if row.num:
self._row_num_to_row[row.num] = row
if row.id:
self._row_id_to_row[row.id] = row
row._update_cell_lookup(self)
def build_index(self, indexes: List[IndexKeysDict]) -> None:
for index in indexes:
columns = index["columns"]
unique = index["unique"]
self.indexes[columns] = {"index": {}, "unique": unique}
for row in self.rows:
row._update_index(self)
def get_row(
self,
row_num: Optional[int] = None,
row_id: Optional[int] = None,
filter: Optional[Dict[str, Any]] = None,
) -> Optional[RowT]:
"""Returns Row object by row number or ID
Either row_num or row_id must be provided
Args:
row_num: row number
row_id: row id
filter: a dictionary with column title to value
mappings in the same order as index was built. Index must be unique.
Returns:
Row object
"""
if row_num is not None:
return self._row_num_to_row.get(row_num)
elif row_id is not None:
return self._row_id_to_row.get(row_id)
elif filter is not None:
columns, query = zip(*sorted(filter.items()))
index_dict = self.indexes.get(columns)
if index_dict is None:
raise exceptions.SmartsheetIndexNotFound(
f"Index {columns} is not found, "
f"build it first with build_index method"
)
unique = index_dict["unique"]
if not unique:
raise exceptions.SmartsheetIndexNotUnique(
f"Index {columns} is non-unique and lookup will potentially "
"return multiple rows, use get_rows method instead"
)
index = cast(Dict[Tuple[Any, ...], RowT], index_dict["index"])
return index[query]
else:
raise ValueError("Either row_num or row_id argument should be provided")
def get_rows(self, filter: Dict[str, Any]) -> List[RowT]:
"""Returns Row objects by index query
Args:
filter: a dictionary or ordered dictionary with column title to value
mappings in the same order as index was built. Index must be non-unique.
Returns:
Row object
"""
columns, query = zip(*sorted(filter.items()))
index_dict = self.indexes.get(columns)
if index_dict is None:
raise exceptions.SmartsheetIndexNotFound(
f"Index {columns} is not found, "
f"build it first with build_index method"
)
unique = index_dict["unique"]
if unique:
unique_index = cast(Dict[Tuple[Any, ...], RowT], index_dict["index"])
result = unique_index.get(query)
if result is not None:
return [result]
else:
return []
else:
non_unique_index = cast(
Dict[Tuple[Any, ...], List[RowT]], index_dict["index"]
)
return non_unique_index.get(query, [])
def get_column(
self, column_title: Optional[str] = None, column_id: Optional[int] = None
) -> ColumnT:
"""Returns Column object by column title or ID
Either column_title or column_id must be provided
Args:
column_title: column title (case-sensitive)
column_id: column id
Returns:
Column object
"""
if column_title is not None:
return self._column_title_to_column[column_title]
elif column_id is not None:
return self._column_id_to_column[column_id]
else:
raise ValueError(
"Either column_title or column_id argument should be provided"
)
def as_list(self) -> List[Dict[str, Union[float, str, datetime, None]]]:
"""Returns a list of dictionaries with column titles and cell values"""
return [row.as_dict() for row in self.rows]
@attr.s(auto_attribs=True, repr=False, kw_only=True)
class Sheet(_SheetBase[Row, Column]):
columns: List[Column] = cast(List[Column], attr.Factory(list))
rows: List[Row] = attr.Factory(list)
def make_cell(self, column_title: str, field_value: Any) -> Cell:
|
def make_cells(self, fields: Dict[str, Any]) -> List[Cell]:
"""Create a list of Cell objects from dictionary
Args:
fields: dictionary where key is a column title and value is a cell value
Returns:
list of Cell objects
"""
result: List[Cell] = []
for column_title, field_value in fields.items():
result.append(self.make_cell(column_title, field_value))
return result
def as_list(self) -> List[Dict[str, Union[float, str, datetime, None]]]:
"""Returns a list of dictionaries with column titles and cell values"""
return [row.as_dict() for row in self.rows]
def as_dataframe(self) -> "pd.DataFrame":
"""Return the sheet as pandas DataFrame
Columns will includes row id, row number and all columns from the sheet
Pandas must be installed either separately or as extras:
`pip install simple-smartsheet[pandas]`
"""
import pandas as pd
df = pd.DataFrame([row.as_series() for row in self.rows])
return df
| """Creates a Cell object for an existing column
Args:
column_title: title of an existing column
field_value: value of the cell
Returns:
Cell object
"""
column = self.get_column(column_title)
if column is None:
raise ValueError(
"A column with the title %s does not exist in this sheet", column_title
)
if column.type == ColumnType.MULTI_PICKLIST:
if not column.id:
raise ValueError(f"Column {column!r} does not have ID")
cell = Cell.create_multi_picklist(column_id=column.id, values=field_value)
else:
cell = Cell(column_id=column.id, value=field_value)
return cell | identifier_body |
lisplike.rs | extern mod std;
use std::hashmap::HashMap;
use std::to_str::ToStr;
use std::rc::Rc;
use std::io::stdio::{print, println};
use sexpr;
// A very simple LISP-like language
// Globally scoped, no closures
/// Our value types
#[deriving(Clone)]
pub enum LispValue {
List(~[LispValue]),
Atom(~str),
Str(~str),
Num(f64),
Fn(~[~str], ~sexpr::Value), // args, body
BIF(~str, int, ~[~str], fn(Rc<HashMap<~str, ~LispValue>>, ~[~LispValue])->~LispValue) // built-in function (args, closure)
}
// XXX: this is ugly but it won't automatically derive Eq because of the extern fn
impl Eq for LispValue {
fn eq(&self, other: &LispValue) -> bool {
match (self.clone(), other.clone()) {
(BIF(ref x, _, _, _), BIF(ref y, _, _, _)) if *x == *y => true,
(Str(ref x), Str(ref y)) if *x == *y => true,
(Num(ref x), Num(ref y)) if *x == *y => true,
(Atom(ref x), Atom(ref y)) if *x == *y => true,
(List(ref x), List(ref y)) if *x == *y => true,
(Fn(ref x, ref x2), Fn(ref y, ref y2)) if *x == *y && *x2 == *y2 => true,
_ => false
}
}
}
impl LispValue {
/// Coerces this Lisp value to a native boolean. Empty lists (nil) are falsey,
/// everything else is truthy.
fn as_bool(&self) -> bool {
match *self {
List([]) => false, // nil
_ => true
}
}
}
impl ToStr for LispValue {
fn to_str(&self) -> ~str {
match *self {
Atom(ref s) => s.clone(),
Str(ref s) => s.clone(),
Num(ref f) => f.to_str(),
Fn(ref args, _) => format!("<fn({:u})>", args.len()),
BIF(ref name, ref arity, _, _) => format!("<fn {:s}({:i})>", name.clone(), *arity),
List(ref v) => {
let values: ~[~str] = v.iter().map(|x: &LispValue| x.to_str()).collect();
format!("({:s})", values.connect(" "))
}
}
}
}
fn from_sexpr(sexpr: &sexpr::Value) -> ~LispValue {
match *sexpr {
sexpr::List(ref v) => ~List(v.map(|x| *from_sexpr(x))),
sexpr::Num(v) => ~Num(v),
sexpr::Str(ref v) => ~Str(v.clone()),
sexpr::Atom(ref v) => ~Atom(v.clone())
}
}
fn to_sexpr(value: &LispValue) -> sexpr::Value {
match *value {
Num(ref v) => sexpr::Num(v.clone()),
Str(ref s) => sexpr::Str(s.clone()),
Atom(ref s) => sexpr::Atom(s.clone()),
List(ref v) => sexpr::List(v.iter().map(to_sexpr).to_owned_vec()),
Fn(..) => fail!("can't convert fn to an s-expression"),
BIF(..) => fail!("can't convert BIF to an s-expression"),
}
}
/// The type of the global symbol table (string to a value mapping).
type SymbolTable = HashMap<~str, ~LispValue>;
/// Returns a value representing the empty list
#[inline]
pub fn nil() -> ~LispValue {
~List(~[])
}
/// Creates a new symbol table and returns it
pub fn new_symt() -> SymbolTable {
HashMap::new()
}
/// Binds a symbol in the symbol table. Replaces if it already exists.
pub fn bind(symt: Rc<SymbolTable>, name: ~str, value: ~LispValue) {
symt.borrow().insert(name, value);
}
/// Look up a symbol in the symbol table. Fails if not found.
pub fn lookup(symt: Rc<SymbolTable>, name: ~str) -> ~LispValue {
match symt.borrow().find(&name) {
Some(v) => v.clone(),
None => fail!("couldn't find symbol: {}", name)
}
}
/// Identity function
fn id_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue { v[0] }
fn cons_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v {
[~a, ~b, ..] => ~List(~[a, b]),
_ => fail!("cons: requires two arguments")
}
}
fn car_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v[0] {
~List(v_) => ~v_[0],
_ => fail!("car: need a list")
}
}
fn cdr_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v[0] {
~List(v_) => ~v_[1],
_ => fail!("cdr: need a list")
}
}
// Print function
fn print_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v[0] {
~Str(s) => println(s),
_ => fail!("print takes an str")
}
nil()
}
// There are several bugs in the macroing system (#8853, or, #8852 and #8851)
// that prevent us from making these functions macros. In addition, we can't
// return a closure because the type needs to be `extern "Rust" fn`, which
// closures aren't. So we have a little bit of code duplication.
fn plus_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("+ needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let add = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() + y.clone()),
(Str(ref x), &~Str(ref y)) => ~Str(x.clone() + y.clone()),
_ => fail!("invalid operands to +")
}
};
v.iter().skip(1).fold(v[0].clone(), add)
}
fn minus_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("- needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let sub = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() - y.clone()),
_ => fail!("invalid operands to -")
}
};
v.iter().skip(1).fold(v[0].clone(), sub)
}
fn mul_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("* needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let mul = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() * y.clone()),
_ => fail!("invalid operands to *")
}
};
v.iter().skip(1).fold(v[0].clone(), mul)
}
fn div_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("/ needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let div = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() / y.clone()),
_ => fail!("invalid operands to /")
}
};
v.iter().skip(1).fold(v[0].clone(), div)
}
fn equals_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v {
[a, b] => {
if a == b { ~Num(1.0) }
else { nil() }
}
_ => fail!("invalid operands to =")
}
}
/// Initializes standard library functions
pub fn init_std(symt: Rc<SymbolTable>) {
bind(symt, ~"id", ~BIF(~"id", 1, ~[~"x"], id_));
bind(symt, ~"print", ~BIF(~"print", 1, ~[~"msg"], print_));
bind(symt, ~"cons", ~BIF(~"cons", 2, ~[~"x", ~"y"], cons_));
bind(symt, ~"car", ~BIF(~"car", 1, ~[~"x"], car_));
bind(symt, ~"cdr", ~BIF(~"cdr", 1, ~[~"x"], cdr_));
bind(symt, ~"+", ~BIF(~"+", -1, ~[], plus_));
bind(symt, ~"*", ~BIF(~"*", -1, ~[], mul_));
bind(symt, ~"-", ~BIF(~"-", -1, ~[], minus_));
bind(symt, ~"/", ~BIF(~"/", -1, ~[], div_));
bind(symt, ~"=", ~BIF(~"=", 2, ~[~"x", ~"y"], equals_));
bind(symt, ~"true", ~Num(1.0));
bind(symt, ~"nil", nil());
}
fn apply(symt: Rc<SymbolTable>, f: ~LispValue, args: ~[~LispValue]) -> ~LispValue {
match *f {
BIF(name, arity, fnargs, bif) => {
// apply built-in function
if arity > 0 && fnargs.len() as int != arity {
fail!("function '{:s}' requires {:d} arguments, but it received {:u} arguments",
name, arity, args.len())
}
bif(symt, args)
}
Fn(fnargs, body) => {
// apply a defined function
if args.len() != fnargs.len() {
fail!("function requires {:u} arguments, but it received {:u} arguments",
fnargs.len(), args.len())
}
// bind its arguments in the environemnt and evaluate its body
for (name,value) in fnargs.iter().zip(args.iter()) {
bind(symt, name.clone(), value.clone());
}
eval(symt, *body)
}
v => fail!("") //fail!("apply: need function, received {}", v)
}
}
/// Evaluates an s-expression and returns a value.
pub fn eval(symt: Rc<SymbolTable>, input: sexpr::Value) -> ~LispValue {
match input {
sexpr::List(v) => {
if(v.len() == 0) {
fail!("eval given empty list")
}
// evaluate a list as a function call
match v {
[sexpr::Atom(~"quote"), arg] => from_sexpr(&arg),
[sexpr::Atom(~"def"), name, value] => {
// bind a value to an identifier
let ident = match name {
sexpr::Atom(s) => s,
sexpr::Str(s) => s,
_ => fail!("def requires an atom or a string")
};
bind(symt, ident, eval(symt, value));
nil()
},
[sexpr::Atom(~"cond"), ..conds] => {
//let conds = conds.iter().map(|x: &sexpr::Value| from_sexpr(x));
for cond in conds.iter() {
match *cond {
sexpr::List([ref c, ref e]) => {
if eval(symt, c.clone()).as_bool() {
return eval(symt, e.clone())
}
}
_ => fail!("cond: need list of (condition expression)")
}
}
nil()
}
[sexpr::Atom(~"eval"), ..args] => {
// takes an argument, evaluates it (like a function does)
// and then uses that as an argument to eval().
// e.g. (= (eval (quote (+ 1 2))) 3)
assert!(args.len() == 1);
eval(symt, to_sexpr(eval(symt, args[0].clone())))
}
[sexpr::Atom(~"fn"), sexpr::List(args), body] => {
// construct a function
let args_ = args.iter().map(|x| {
match x {
&sexpr::Atom(ref s) => s.clone(),
_ => fail!("fn: arguments need to be atoms")
}
}).collect();
~Fn(args_, ~body)
}
[ref fnval, ..args] => {
let f = eval(symt, fnval.clone());
let xargs = args.map(|x| eval(symt, x.clone())); // eval'd args
apply(symt, f, xargs)
}
_ => fail!("eval: requires a variable or an application"),
}
}
sexpr::Atom(v) => {
// variable
lookup(symt, v)
}
_ => from_sexpr(&input) // return non-list values as they are
}
}
#[cfg(test)]
mod test {
use super::{SymbolTable, eval, init_std, new_symt, nil, Num, Str, Fn, List, Atom};
use sexpr;
use sexpr::from_str;
use std::rc::Rc;
fn read(input: &str) -> sexpr::Value {
from_str(input).unwrap()
}
#[test]
fn test_eval() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("123")), ~Num(123.0));
assert_eq!(eval(symt, read("(id 123)")), ~Num(123.0));
assert_eq!(eval(symt, read("(id (id (id 123)))")), ~Num(123.0));
// should fail: assert_eq!(eval(&mut symt, read("(1 2 3)")), ~List(~[Num(1.0), Num(2.0), Num(3.0)]));
}
#[test]
fn test_str() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(id \"hi\")")), ~Str(~"hi"));
assert_eq!(eval(symt, read("(car (cons \"a\" \"b\"))")), ~Str(~"a"));
// string concatenation
assert_eq!(eval(symt, read("(+ \"hi\" \" there\")")), ~Str(~"hi there"));
assert_eq!(eval(symt, read("(+ \"hi\" \" there\" \" variadic\")")), ~Str(~"hi there variadic"));
}
#[test]
fn test_cons() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(cons 1 2)")), ~List(~[Num(1.0), Num(2.0)]));
assert_eq!(eval(symt, read("(cons 1 (cons 2 3))")), ~List(~[Num(1.0),
List(~[Num(2.0), Num(3.0)])]));
}
#[test]
fn test_car() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(car (cons 1 2))")), ~Num(1.0));
}
#[test]
fn test_cdr() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(cdr (cons 1 2))")), ~Num(2.0));
assert_eq!(eval(symt, read("(cdr (cons 1 (cons 2 3)))")), ~List(~[Num(2.0), Num(3.0)]));
}
#[test]
fn test_arithmetic() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(+ 1 3)")), ~Num(4.0));
assert_eq!(eval(symt, read("(+ 1.5 3)")), ~Num(4.5));
assert_eq!(eval(symt, read("(+ 5 -3)")), ~Num(2.0));
assert_eq!(eval(symt, read("(- 5 3)")), ~Num(2.0));
assert_eq!(eval(symt, read("(- 3 5)")), ~Num(-2.0));
assert_eq!(eval(symt, read("(- 5 -3)")), ~Num(8.0));
assert_eq!(eval(symt, read("(* 2 5)")), ~Num(10.0));
assert_eq!(eval(symt, read("(* 2 -5)")), ~Num(-10.0));
assert_eq!(eval(symt, read("(/ 10 2)")), ~Num(5.0));
assert_eq!(eval(symt, read("(/ 10 -2)")), ~Num(-5.0));
assert_eq!(eval(symt, read("(+ 6 (+ 1 3))")), ~Num(10.0));
assert_eq!(eval(symt, read("(- 6 (- 3 2))")), ~Num(5.0));
assert_eq!(eval(symt, read("(+ 1 (+ 2 3) 4)")), ~Num(10.0));
assert_eq!(eval(symt, read("(+ 5)")), ~Num(5.0));
assert_eq!(eval(symt, read("(+ -5)")), ~Num(-5.0));
}
#[test]
fn test_quote() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(quote 5)")), ~Num(5.0));
assert_eq!(eval(symt, read("(quote x)")), ~Atom(~"x"));
assert_eq!(eval(symt, read("(quote (1 2 3))")), ~List(~[Num(1.0), Num(2.0), Num(3.0)]));
assert_eq!(eval(symt, read("(quote (x y z))")), ~List(~[Atom(~"x"), Atom(~"y"), Atom(~"z")]))
assert_eq!(eval(symt, read("(quote (quote x))")), ~List(~[Atom(~"quote"), Atom(~"x")]));
assert_eq!(eval(symt, read("(+ (quote 1) 2)")), ~Num(3.0));
//assert_eq!(eval(symt, read("(quote 1 2 3 4 5)")), ~Num(5.0));
}
#[test]
fn test_def() {
let mut symt = Rc::new(new_symt());
init_std(symt);
eval(symt, read("(def x 5)"));
eval(symt, read("(def y 10)"));
assert_eq!(eval(symt, read("x")), ~Num(5.0));
assert_eq!(eval(symt, read("y")), ~Num(10.0));
assert_eq!(eval(symt, read("(+ x y)")), ~Num(15.0));
}
#[test]
fn test_fn() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(fn () ())")), ~Fn(~[], ~sexpr::List(~[])));
assert_eq!(eval(symt, read("(fn (x) (x))")), ~Fn(~[~"x"], ~sexpr::List(~[sexpr::Atom(~"x")])));
eval(symt, read("(def f (fn (x) (+ 1 x)))"));
assert_eq!(eval(symt, read("f")), ~Fn(~[~"x"],
~sexpr::List(~[sexpr::Atom(~"+"), sexpr::Num(1.0), sexpr::Atom(~"x")])));
assert_eq!(eval(symt, read("(f 5)")), ~Num(6.0));
}
#[test]
fn test_apply_fn() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("((fn () 0))")), ~Num(0.0));
assert_eq!(eval(symt, read("((fn (x) x) 5)")), ~Num(5.0));
}
#[test]
fn test_cond() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(cond (true 2) (nil 3))")), ~Num(2.0));
assert_eq!(eval(symt, read("(cond (nil 2) (true 3))")), ~Num(3.0));
assert_eq!(eval(symt, read("(cond (nil 2) (true 3) (true 4))")), ~Num(3.0));
}
#[test]
fn test_equals() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(= 1 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= 1.0 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= 1 2)")), nil());
assert_eq!(eval(symt, read("(= true 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= nil (quote ()))")), ~Num(1.0));
assert_eq!(eval(symt, read("(= nil nil)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= nil true)")), nil());
assert_eq!(eval(symt, read("(= \"a\" \"a\")")), ~Num(1.0));
assert_eq!(eval(symt, read("(= \"a\" \"b\")")), nil());
assert_eq!(eval(symt, read("(= (quote (1 2 3)) (quote (1 2 3)))")), ~Num(1.0));
}
#[test]
fn test_factorial() |
#[test]
fn test_eval_fn() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(eval 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(eval \"hi\")")), ~Str(~"hi"));
assert_eq!(eval(symt, read("(eval (quote (+ 1 2)))")), ~Num(3.0));
assert_eq!(eval(symt, read("(eval (quote ( (fn () 0) )))")), ~Num(0.0));
}
}
#[ignore(test)]
#[main]
fn main() {
// A simple REPL
let mut stdinReader = std::io::buffered::BufferedReader::new(std::io::stdin());
let mut symt = Rc::new(new_symt());
init_std(symt);
loop {
print("> ");
let line = stdinReader.read_line();
match line {
Some(~".q") => break,
Some(~".newsym") => {
// use a fresh symbol table
symt = Rc::new(new_symt());
init_std(symt);
println("ok");
}
Some(line) => {
match sexpr::from_str(line) {
Some(sexpr) => println(eval(symt, sexpr).to_str()),
None => println("syntax error")
}
}
None => ()
}
}
} | {
let mut symt = Rc::new(new_symt());
init_std(symt);
eval(symt, read("(def fac (fn (n) (cond ((= n 0) 1) (true (* n (fac (- n 1)))))))"));
assert_eq!(eval(symt, read("(fac 10)")), ~Num(3628800.0));
} | identifier_body |
lisplike.rs | extern mod std;
use std::hashmap::HashMap;
use std::to_str::ToStr;
use std::rc::Rc;
use std::io::stdio::{print, println};
use sexpr;
// A very simple LISP-like language
// Globally scoped, no closures
/// Our value types
#[deriving(Clone)]
pub enum LispValue {
List(~[LispValue]),
Atom(~str),
Str(~str),
Num(f64),
Fn(~[~str], ~sexpr::Value), // args, body
BIF(~str, int, ~[~str], fn(Rc<HashMap<~str, ~LispValue>>, ~[~LispValue])->~LispValue) // built-in function (args, closure)
}
// XXX: this is ugly but it won't automatically derive Eq because of the extern fn
impl Eq for LispValue {
fn eq(&self, other: &LispValue) -> bool {
match (self.clone(), other.clone()) {
(BIF(ref x, _, _, _), BIF(ref y, _, _, _)) if *x == *y => true,
(Str(ref x), Str(ref y)) if *x == *y => true,
(Num(ref x), Num(ref y)) if *x == *y => true,
(Atom(ref x), Atom(ref y)) if *x == *y => true,
(List(ref x), List(ref y)) if *x == *y => true,
(Fn(ref x, ref x2), Fn(ref y, ref y2)) if *x == *y && *x2 == *y2 => true,
_ => false
}
}
}
impl LispValue {
/// Coerces this Lisp value to a native boolean. Empty lists (nil) are falsey,
/// everything else is truthy.
fn as_bool(&self) -> bool {
match *self {
List([]) => false, // nil
_ => true
}
}
}
impl ToStr for LispValue {
fn to_str(&self) -> ~str {
match *self {
Atom(ref s) => s.clone(),
Str(ref s) => s.clone(),
Num(ref f) => f.to_str(),
Fn(ref args, _) => format!("<fn({:u})>", args.len()),
BIF(ref name, ref arity, _, _) => format!("<fn {:s}({:i})>", name.clone(), *arity),
List(ref v) => {
let values: ~[~str] = v.iter().map(|x: &LispValue| x.to_str()).collect();
format!("({:s})", values.connect(" "))
}
}
}
}
fn from_sexpr(sexpr: &sexpr::Value) -> ~LispValue {
match *sexpr {
sexpr::List(ref v) => ~List(v.map(|x| *from_sexpr(x))),
sexpr::Num(v) => ~Num(v),
sexpr::Str(ref v) => ~Str(v.clone()),
sexpr::Atom(ref v) => ~Atom(v.clone())
}
}
fn to_sexpr(value: &LispValue) -> sexpr::Value {
match *value {
Num(ref v) => sexpr::Num(v.clone()),
Str(ref s) => sexpr::Str(s.clone()),
Atom(ref s) => sexpr::Atom(s.clone()),
List(ref v) => sexpr::List(v.iter().map(to_sexpr).to_owned_vec()),
Fn(..) => fail!("can't convert fn to an s-expression"),
BIF(..) => fail!("can't convert BIF to an s-expression"),
}
}
/// The type of the global symbol table (string to a value mapping).
type SymbolTable = HashMap<~str, ~LispValue>;
/// Returns a value representing the empty list
#[inline]
pub fn nil() -> ~LispValue {
~List(~[])
}
/// Creates a new symbol table and returns it
pub fn | () -> SymbolTable {
HashMap::new()
}
/// Binds a symbol in the symbol table. Replaces if it already exists.
pub fn bind(symt: Rc<SymbolTable>, name: ~str, value: ~LispValue) {
symt.borrow().insert(name, value);
}
/// Look up a symbol in the symbol table. Fails if not found.
pub fn lookup(symt: Rc<SymbolTable>, name: ~str) -> ~LispValue {
match symt.borrow().find(&name) {
Some(v) => v.clone(),
None => fail!("couldn't find symbol: {}", name)
}
}
/// Identity function
fn id_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue { v[0] }
fn cons_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v {
[~a, ~b, ..] => ~List(~[a, b]),
_ => fail!("cons: requires two arguments")
}
}
fn car_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v[0] {
~List(v_) => ~v_[0],
_ => fail!("car: need a list")
}
}
fn cdr_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v[0] {
~List(v_) => ~v_[1],
_ => fail!("cdr: need a list")
}
}
// Print function
fn print_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v[0] {
~Str(s) => println(s),
_ => fail!("print takes an str")
}
nil()
}
// There are several bugs in the macroing system (#8853, or, #8852 and #8851)
// that prevent us from making these functions macros. In addition, we can't
// return a closure because the type needs to be `extern "Rust" fn`, which
// closures aren't. So we have a little bit of code duplication.
fn plus_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("+ needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let add = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() + y.clone()),
(Str(ref x), &~Str(ref y)) => ~Str(x.clone() + y.clone()),
_ => fail!("invalid operands to +")
}
};
v.iter().skip(1).fold(v[0].clone(), add)
}
fn minus_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("- needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let sub = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() - y.clone()),
_ => fail!("invalid operands to -")
}
};
v.iter().skip(1).fold(v[0].clone(), sub)
}
fn mul_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("* needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let mul = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() * y.clone()),
_ => fail!("invalid operands to *")
}
};
v.iter().skip(1).fold(v[0].clone(), mul)
}
fn div_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("/ needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let div = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() / y.clone()),
_ => fail!("invalid operands to /")
}
};
v.iter().skip(1).fold(v[0].clone(), div)
}
fn equals_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v {
[a, b] => {
if a == b { ~Num(1.0) }
else { nil() }
}
_ => fail!("invalid operands to =")
}
}
/// Initializes standard library functions
pub fn init_std(symt: Rc<SymbolTable>) {
bind(symt, ~"id", ~BIF(~"id", 1, ~[~"x"], id_));
bind(symt, ~"print", ~BIF(~"print", 1, ~[~"msg"], print_));
bind(symt, ~"cons", ~BIF(~"cons", 2, ~[~"x", ~"y"], cons_));
bind(symt, ~"car", ~BIF(~"car", 1, ~[~"x"], car_));
bind(symt, ~"cdr", ~BIF(~"cdr", 1, ~[~"x"], cdr_));
bind(symt, ~"+", ~BIF(~"+", -1, ~[], plus_));
bind(symt, ~"*", ~BIF(~"*", -1, ~[], mul_));
bind(symt, ~"-", ~BIF(~"-", -1, ~[], minus_));
bind(symt, ~"/", ~BIF(~"/", -1, ~[], div_));
bind(symt, ~"=", ~BIF(~"=", 2, ~[~"x", ~"y"], equals_));
bind(symt, ~"true", ~Num(1.0));
bind(symt, ~"nil", nil());
}
fn apply(symt: Rc<SymbolTable>, f: ~LispValue, args: ~[~LispValue]) -> ~LispValue {
match *f {
BIF(name, arity, fnargs, bif) => {
// apply built-in function
if arity > 0 && fnargs.len() as int != arity {
fail!("function '{:s}' requires {:d} arguments, but it received {:u} arguments",
name, arity, args.len())
}
bif(symt, args)
}
Fn(fnargs, body) => {
// apply a defined function
if args.len() != fnargs.len() {
fail!("function requires {:u} arguments, but it received {:u} arguments",
fnargs.len(), args.len())
}
// bind its arguments in the environemnt and evaluate its body
for (name,value) in fnargs.iter().zip(args.iter()) {
bind(symt, name.clone(), value.clone());
}
eval(symt, *body)
}
v => fail!("") //fail!("apply: need function, received {}", v)
}
}
/// Evaluates an s-expression and returns a value.
pub fn eval(symt: Rc<SymbolTable>, input: sexpr::Value) -> ~LispValue {
match input {
sexpr::List(v) => {
if(v.len() == 0) {
fail!("eval given empty list")
}
// evaluate a list as a function call
match v {
[sexpr::Atom(~"quote"), arg] => from_sexpr(&arg),
[sexpr::Atom(~"def"), name, value] => {
// bind a value to an identifier
let ident = match name {
sexpr::Atom(s) => s,
sexpr::Str(s) => s,
_ => fail!("def requires an atom or a string")
};
bind(symt, ident, eval(symt, value));
nil()
},
[sexpr::Atom(~"cond"), ..conds] => {
//let conds = conds.iter().map(|x: &sexpr::Value| from_sexpr(x));
for cond in conds.iter() {
match *cond {
sexpr::List([ref c, ref e]) => {
if eval(symt, c.clone()).as_bool() {
return eval(symt, e.clone())
}
}
_ => fail!("cond: need list of (condition expression)")
}
}
nil()
}
[sexpr::Atom(~"eval"), ..args] => {
// takes an argument, evaluates it (like a function does)
// and then uses that as an argument to eval().
// e.g. (= (eval (quote (+ 1 2))) 3)
assert!(args.len() == 1);
eval(symt, to_sexpr(eval(symt, args[0].clone())))
}
[sexpr::Atom(~"fn"), sexpr::List(args), body] => {
// construct a function
let args_ = args.iter().map(|x| {
match x {
&sexpr::Atom(ref s) => s.clone(),
_ => fail!("fn: arguments need to be atoms")
}
}).collect();
~Fn(args_, ~body)
}
[ref fnval, ..args] => {
let f = eval(symt, fnval.clone());
let xargs = args.map(|x| eval(symt, x.clone())); // eval'd args
apply(symt, f, xargs)
}
_ => fail!("eval: requires a variable or an application"),
}
}
sexpr::Atom(v) => {
// variable
lookup(symt, v)
}
_ => from_sexpr(&input) // return non-list values as they are
}
}
#[cfg(test)]
mod test {
use super::{SymbolTable, eval, init_std, new_symt, nil, Num, Str, Fn, List, Atom};
use sexpr;
use sexpr::from_str;
use std::rc::Rc;
fn read(input: &str) -> sexpr::Value {
from_str(input).unwrap()
}
#[test]
fn test_eval() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("123")), ~Num(123.0));
assert_eq!(eval(symt, read("(id 123)")), ~Num(123.0));
assert_eq!(eval(symt, read("(id (id (id 123)))")), ~Num(123.0));
// should fail: assert_eq!(eval(&mut symt, read("(1 2 3)")), ~List(~[Num(1.0), Num(2.0), Num(3.0)]));
}
#[test]
fn test_str() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(id \"hi\")")), ~Str(~"hi"));
assert_eq!(eval(symt, read("(car (cons \"a\" \"b\"))")), ~Str(~"a"));
// string concatenation
assert_eq!(eval(symt, read("(+ \"hi\" \" there\")")), ~Str(~"hi there"));
assert_eq!(eval(symt, read("(+ \"hi\" \" there\" \" variadic\")")), ~Str(~"hi there variadic"));
}
#[test]
fn test_cons() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(cons 1 2)")), ~List(~[Num(1.0), Num(2.0)]));
assert_eq!(eval(symt, read("(cons 1 (cons 2 3))")), ~List(~[Num(1.0),
List(~[Num(2.0), Num(3.0)])]));
}
#[test]
fn test_car() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(car (cons 1 2))")), ~Num(1.0));
}
#[test]
fn test_cdr() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(cdr (cons 1 2))")), ~Num(2.0));
assert_eq!(eval(symt, read("(cdr (cons 1 (cons 2 3)))")), ~List(~[Num(2.0), Num(3.0)]));
}
#[test]
fn test_arithmetic() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(+ 1 3)")), ~Num(4.0));
assert_eq!(eval(symt, read("(+ 1.5 3)")), ~Num(4.5));
assert_eq!(eval(symt, read("(+ 5 -3)")), ~Num(2.0));
assert_eq!(eval(symt, read("(- 5 3)")), ~Num(2.0));
assert_eq!(eval(symt, read("(- 3 5)")), ~Num(-2.0));
assert_eq!(eval(symt, read("(- 5 -3)")), ~Num(8.0));
assert_eq!(eval(symt, read("(* 2 5)")), ~Num(10.0));
assert_eq!(eval(symt, read("(* 2 -5)")), ~Num(-10.0));
assert_eq!(eval(symt, read("(/ 10 2)")), ~Num(5.0));
assert_eq!(eval(symt, read("(/ 10 -2)")), ~Num(-5.0));
assert_eq!(eval(symt, read("(+ 6 (+ 1 3))")), ~Num(10.0));
assert_eq!(eval(symt, read("(- 6 (- 3 2))")), ~Num(5.0));
assert_eq!(eval(symt, read("(+ 1 (+ 2 3) 4)")), ~Num(10.0));
assert_eq!(eval(symt, read("(+ 5)")), ~Num(5.0));
assert_eq!(eval(symt, read("(+ -5)")), ~Num(-5.0));
}
#[test]
fn test_quote() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(quote 5)")), ~Num(5.0));
assert_eq!(eval(symt, read("(quote x)")), ~Atom(~"x"));
assert_eq!(eval(symt, read("(quote (1 2 3))")), ~List(~[Num(1.0), Num(2.0), Num(3.0)]));
assert_eq!(eval(symt, read("(quote (x y z))")), ~List(~[Atom(~"x"), Atom(~"y"), Atom(~"z")]))
assert_eq!(eval(symt, read("(quote (quote x))")), ~List(~[Atom(~"quote"), Atom(~"x")]));
assert_eq!(eval(symt, read("(+ (quote 1) 2)")), ~Num(3.0));
//assert_eq!(eval(symt, read("(quote 1 2 3 4 5)")), ~Num(5.0));
}
#[test]
fn test_def() {
let mut symt = Rc::new(new_symt());
init_std(symt);
eval(symt, read("(def x 5)"));
eval(symt, read("(def y 10)"));
assert_eq!(eval(symt, read("x")), ~Num(5.0));
assert_eq!(eval(symt, read("y")), ~Num(10.0));
assert_eq!(eval(symt, read("(+ x y)")), ~Num(15.0));
}
#[test]
fn test_fn() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(fn () ())")), ~Fn(~[], ~sexpr::List(~[])));
assert_eq!(eval(symt, read("(fn (x) (x))")), ~Fn(~[~"x"], ~sexpr::List(~[sexpr::Atom(~"x")])));
eval(symt, read("(def f (fn (x) (+ 1 x)))"));
assert_eq!(eval(symt, read("f")), ~Fn(~[~"x"],
~sexpr::List(~[sexpr::Atom(~"+"), sexpr::Num(1.0), sexpr::Atom(~"x")])));
assert_eq!(eval(symt, read("(f 5)")), ~Num(6.0));
}
#[test]
fn test_apply_fn() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("((fn () 0))")), ~Num(0.0));
assert_eq!(eval(symt, read("((fn (x) x) 5)")), ~Num(5.0));
}
#[test]
fn test_cond() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(cond (true 2) (nil 3))")), ~Num(2.0));
assert_eq!(eval(symt, read("(cond (nil 2) (true 3))")), ~Num(3.0));
assert_eq!(eval(symt, read("(cond (nil 2) (true 3) (true 4))")), ~Num(3.0));
}
#[test]
fn test_equals() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(= 1 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= 1.0 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= 1 2)")), nil());
assert_eq!(eval(symt, read("(= true 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= nil (quote ()))")), ~Num(1.0));
assert_eq!(eval(symt, read("(= nil nil)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= nil true)")), nil());
assert_eq!(eval(symt, read("(= \"a\" \"a\")")), ~Num(1.0));
assert_eq!(eval(symt, read("(= \"a\" \"b\")")), nil());
assert_eq!(eval(symt, read("(= (quote (1 2 3)) (quote (1 2 3)))")), ~Num(1.0));
}
#[test]
fn test_factorial() {
let mut symt = Rc::new(new_symt());
init_std(symt);
eval(symt, read("(def fac (fn (n) (cond ((= n 0) 1) (true (* n (fac (- n 1)))))))"));
assert_eq!(eval(symt, read("(fac 10)")), ~Num(3628800.0));
}
#[test]
fn test_eval_fn() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(eval 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(eval \"hi\")")), ~Str(~"hi"));
assert_eq!(eval(symt, read("(eval (quote (+ 1 2)))")), ~Num(3.0));
assert_eq!(eval(symt, read("(eval (quote ( (fn () 0) )))")), ~Num(0.0));
}
}
#[ignore(test)]
#[main]
fn main() {
// A simple REPL
let mut stdinReader = std::io::buffered::BufferedReader::new(std::io::stdin());
let mut symt = Rc::new(new_symt());
init_std(symt);
loop {
print("> ");
let line = stdinReader.read_line();
match line {
Some(~".q") => break,
Some(~".newsym") => {
// use a fresh symbol table
symt = Rc::new(new_symt());
init_std(symt);
println("ok");
}
Some(line) => {
match sexpr::from_str(line) {
Some(sexpr) => println(eval(symt, sexpr).to_str()),
None => println("syntax error")
}
}
None => ()
}
}
} | new_symt | identifier_name |
lisplike.rs | extern mod std;
use std::hashmap::HashMap;
use std::to_str::ToStr;
use std::rc::Rc;
use std::io::stdio::{print, println};
use sexpr;
// A very simple LISP-like language
// Globally scoped, no closures
/// Our value types
#[deriving(Clone)]
pub enum LispValue {
List(~[LispValue]),
Atom(~str),
Str(~str),
Num(f64),
Fn(~[~str], ~sexpr::Value), // args, body
BIF(~str, int, ~[~str], fn(Rc<HashMap<~str, ~LispValue>>, ~[~LispValue])->~LispValue) // built-in function (args, closure) |
// XXX: this is ugly but it won't automatically derive Eq because of the extern fn
impl Eq for LispValue {
fn eq(&self, other: &LispValue) -> bool {
match (self.clone(), other.clone()) {
(BIF(ref x, _, _, _), BIF(ref y, _, _, _)) if *x == *y => true,
(Str(ref x), Str(ref y)) if *x == *y => true,
(Num(ref x), Num(ref y)) if *x == *y => true,
(Atom(ref x), Atom(ref y)) if *x == *y => true,
(List(ref x), List(ref y)) if *x == *y => true,
(Fn(ref x, ref x2), Fn(ref y, ref y2)) if *x == *y && *x2 == *y2 => true,
_ => false
}
}
}
impl LispValue {
/// Coerces this Lisp value to a native boolean. Empty lists (nil) are falsey,
/// everything else is truthy.
fn as_bool(&self) -> bool {
match *self {
List([]) => false, // nil
_ => true
}
}
}
impl ToStr for LispValue {
fn to_str(&self) -> ~str {
match *self {
Atom(ref s) => s.clone(),
Str(ref s) => s.clone(),
Num(ref f) => f.to_str(),
Fn(ref args, _) => format!("<fn({:u})>", args.len()),
BIF(ref name, ref arity, _, _) => format!("<fn {:s}({:i})>", name.clone(), *arity),
List(ref v) => {
let values: ~[~str] = v.iter().map(|x: &LispValue| x.to_str()).collect();
format!("({:s})", values.connect(" "))
}
}
}
}
fn from_sexpr(sexpr: &sexpr::Value) -> ~LispValue {
match *sexpr {
sexpr::List(ref v) => ~List(v.map(|x| *from_sexpr(x))),
sexpr::Num(v) => ~Num(v),
sexpr::Str(ref v) => ~Str(v.clone()),
sexpr::Atom(ref v) => ~Atom(v.clone())
}
}
fn to_sexpr(value: &LispValue) -> sexpr::Value {
match *value {
Num(ref v) => sexpr::Num(v.clone()),
Str(ref s) => sexpr::Str(s.clone()),
Atom(ref s) => sexpr::Atom(s.clone()),
List(ref v) => sexpr::List(v.iter().map(to_sexpr).to_owned_vec()),
Fn(..) => fail!("can't convert fn to an s-expression"),
BIF(..) => fail!("can't convert BIF to an s-expression"),
}
}
/// The type of the global symbol table (string to a value mapping).
type SymbolTable = HashMap<~str, ~LispValue>;
/// Returns a value representing the empty list
#[inline]
pub fn nil() -> ~LispValue {
~List(~[])
}
/// Creates a new symbol table and returns it
pub fn new_symt() -> SymbolTable {
HashMap::new()
}
/// Binds a symbol in the symbol table. Replaces if it already exists.
pub fn bind(symt: Rc<SymbolTable>, name: ~str, value: ~LispValue) {
symt.borrow().insert(name, value);
}
/// Look up a symbol in the symbol table. Fails if not found.
pub fn lookup(symt: Rc<SymbolTable>, name: ~str) -> ~LispValue {
match symt.borrow().find(&name) {
Some(v) => v.clone(),
None => fail!("couldn't find symbol: {}", name)
}
}
/// Identity function
fn id_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue { v[0] }
fn cons_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v {
[~a, ~b, ..] => ~List(~[a, b]),
_ => fail!("cons: requires two arguments")
}
}
fn car_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v[0] {
~List(v_) => ~v_[0],
_ => fail!("car: need a list")
}
}
fn cdr_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v[0] {
~List(v_) => ~v_[1],
_ => fail!("cdr: need a list")
}
}
// Print function
fn print_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v[0] {
~Str(s) => println(s),
_ => fail!("print takes an str")
}
nil()
}
// There are several bugs in the macroing system (#8853, or, #8852 and #8851)
// that prevent us from making these functions macros. In addition, we can't
// return a closure because the type needs to be `extern "Rust" fn`, which
// closures aren't. So we have a little bit of code duplication.
fn plus_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("+ needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let add = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() + y.clone()),
(Str(ref x), &~Str(ref y)) => ~Str(x.clone() + y.clone()),
_ => fail!("invalid operands to +")
}
};
v.iter().skip(1).fold(v[0].clone(), add)
}
fn minus_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("- needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let sub = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() - y.clone()),
_ => fail!("invalid operands to -")
}
};
v.iter().skip(1).fold(v[0].clone(), sub)
}
fn mul_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("* needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let mul = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() * y.clone()),
_ => fail!("invalid operands to *")
}
};
v.iter().skip(1).fold(v[0].clone(), mul)
}
fn div_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
if(v.len() == 0) { fail!("/ needs operands"); }
else if(v.len() == 1) {
return v[0];
}
let div = |acc: ~LispValue, b: &~LispValue| {
match (*acc, b) {
(Num(ref x), &~Num(ref y)) => ~Num(x.clone() / y.clone()),
_ => fail!("invalid operands to /")
}
};
v.iter().skip(1).fold(v[0].clone(), div)
}
fn equals_(_symt: Rc<SymbolTable>, v: ~[~LispValue]) -> ~LispValue {
match v {
[a, b] => {
if a == b { ~Num(1.0) }
else { nil() }
}
_ => fail!("invalid operands to =")
}
}
/// Initializes standard library functions
pub fn init_std(symt: Rc<SymbolTable>) {
bind(symt, ~"id", ~BIF(~"id", 1, ~[~"x"], id_));
bind(symt, ~"print", ~BIF(~"print", 1, ~[~"msg"], print_));
bind(symt, ~"cons", ~BIF(~"cons", 2, ~[~"x", ~"y"], cons_));
bind(symt, ~"car", ~BIF(~"car", 1, ~[~"x"], car_));
bind(symt, ~"cdr", ~BIF(~"cdr", 1, ~[~"x"], cdr_));
bind(symt, ~"+", ~BIF(~"+", -1, ~[], plus_));
bind(symt, ~"*", ~BIF(~"*", -1, ~[], mul_));
bind(symt, ~"-", ~BIF(~"-", -1, ~[], minus_));
bind(symt, ~"/", ~BIF(~"/", -1, ~[], div_));
bind(symt, ~"=", ~BIF(~"=", 2, ~[~"x", ~"y"], equals_));
bind(symt, ~"true", ~Num(1.0));
bind(symt, ~"nil", nil());
}
fn apply(symt: Rc<SymbolTable>, f: ~LispValue, args: ~[~LispValue]) -> ~LispValue {
match *f {
BIF(name, arity, fnargs, bif) => {
// apply built-in function
if arity > 0 && fnargs.len() as int != arity {
fail!("function '{:s}' requires {:d} arguments, but it received {:u} arguments",
name, arity, args.len())
}
bif(symt, args)
}
Fn(fnargs, body) => {
// apply a defined function
if args.len() != fnargs.len() {
fail!("function requires {:u} arguments, but it received {:u} arguments",
fnargs.len(), args.len())
}
// bind its arguments in the environemnt and evaluate its body
for (name,value) in fnargs.iter().zip(args.iter()) {
bind(symt, name.clone(), value.clone());
}
eval(symt, *body)
}
v => fail!("") //fail!("apply: need function, received {}", v)
}
}
/// Evaluates an s-expression and returns a value.
pub fn eval(symt: Rc<SymbolTable>, input: sexpr::Value) -> ~LispValue {
match input {
sexpr::List(v) => {
if(v.len() == 0) {
fail!("eval given empty list")
}
// evaluate a list as a function call
match v {
[sexpr::Atom(~"quote"), arg] => from_sexpr(&arg),
[sexpr::Atom(~"def"), name, value] => {
// bind a value to an identifier
let ident = match name {
sexpr::Atom(s) => s,
sexpr::Str(s) => s,
_ => fail!("def requires an atom or a string")
};
bind(symt, ident, eval(symt, value));
nil()
},
[sexpr::Atom(~"cond"), ..conds] => {
//let conds = conds.iter().map(|x: &sexpr::Value| from_sexpr(x));
for cond in conds.iter() {
match *cond {
sexpr::List([ref c, ref e]) => {
if eval(symt, c.clone()).as_bool() {
return eval(symt, e.clone())
}
}
_ => fail!("cond: need list of (condition expression)")
}
}
nil()
}
[sexpr::Atom(~"eval"), ..args] => {
// takes an argument, evaluates it (like a function does)
// and then uses that as an argument to eval().
// e.g. (= (eval (quote (+ 1 2))) 3)
assert!(args.len() == 1);
eval(symt, to_sexpr(eval(symt, args[0].clone())))
}
[sexpr::Atom(~"fn"), sexpr::List(args), body] => {
// construct a function
let args_ = args.iter().map(|x| {
match x {
&sexpr::Atom(ref s) => s.clone(),
_ => fail!("fn: arguments need to be atoms")
}
}).collect();
~Fn(args_, ~body)
}
[ref fnval, ..args] => {
let f = eval(symt, fnval.clone());
let xargs = args.map(|x| eval(symt, x.clone())); // eval'd args
apply(symt, f, xargs)
}
_ => fail!("eval: requires a variable or an application"),
}
}
sexpr::Atom(v) => {
// variable
lookup(symt, v)
}
_ => from_sexpr(&input) // return non-list values as they are
}
}
#[cfg(test)]
mod test {
use super::{SymbolTable, eval, init_std, new_symt, nil, Num, Str, Fn, List, Atom};
use sexpr;
use sexpr::from_str;
use std::rc::Rc;
fn read(input: &str) -> sexpr::Value {
from_str(input).unwrap()
}
#[test]
fn test_eval() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("123")), ~Num(123.0));
assert_eq!(eval(symt, read("(id 123)")), ~Num(123.0));
assert_eq!(eval(symt, read("(id (id (id 123)))")), ~Num(123.0));
// should fail: assert_eq!(eval(&mut symt, read("(1 2 3)")), ~List(~[Num(1.0), Num(2.0), Num(3.0)]));
}
#[test]
fn test_str() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(id \"hi\")")), ~Str(~"hi"));
assert_eq!(eval(symt, read("(car (cons \"a\" \"b\"))")), ~Str(~"a"));
// string concatenation
assert_eq!(eval(symt, read("(+ \"hi\" \" there\")")), ~Str(~"hi there"));
assert_eq!(eval(symt, read("(+ \"hi\" \" there\" \" variadic\")")), ~Str(~"hi there variadic"));
}
#[test]
fn test_cons() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(cons 1 2)")), ~List(~[Num(1.0), Num(2.0)]));
assert_eq!(eval(symt, read("(cons 1 (cons 2 3))")), ~List(~[Num(1.0),
List(~[Num(2.0), Num(3.0)])]));
}
#[test]
fn test_car() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(car (cons 1 2))")), ~Num(1.0));
}
#[test]
fn test_cdr() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(cdr (cons 1 2))")), ~Num(2.0));
assert_eq!(eval(symt, read("(cdr (cons 1 (cons 2 3)))")), ~List(~[Num(2.0), Num(3.0)]));
}
#[test]
fn test_arithmetic() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(+ 1 3)")), ~Num(4.0));
assert_eq!(eval(symt, read("(+ 1.5 3)")), ~Num(4.5));
assert_eq!(eval(symt, read("(+ 5 -3)")), ~Num(2.0));
assert_eq!(eval(symt, read("(- 5 3)")), ~Num(2.0));
assert_eq!(eval(symt, read("(- 3 5)")), ~Num(-2.0));
assert_eq!(eval(symt, read("(- 5 -3)")), ~Num(8.0));
assert_eq!(eval(symt, read("(* 2 5)")), ~Num(10.0));
assert_eq!(eval(symt, read("(* 2 -5)")), ~Num(-10.0));
assert_eq!(eval(symt, read("(/ 10 2)")), ~Num(5.0));
assert_eq!(eval(symt, read("(/ 10 -2)")), ~Num(-5.0));
assert_eq!(eval(symt, read("(+ 6 (+ 1 3))")), ~Num(10.0));
assert_eq!(eval(symt, read("(- 6 (- 3 2))")), ~Num(5.0));
assert_eq!(eval(symt, read("(+ 1 (+ 2 3) 4)")), ~Num(10.0));
assert_eq!(eval(symt, read("(+ 5)")), ~Num(5.0));
assert_eq!(eval(symt, read("(+ -5)")), ~Num(-5.0));
}
#[test]
fn test_quote() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(quote 5)")), ~Num(5.0));
assert_eq!(eval(symt, read("(quote x)")), ~Atom(~"x"));
assert_eq!(eval(symt, read("(quote (1 2 3))")), ~List(~[Num(1.0), Num(2.0), Num(3.0)]));
assert_eq!(eval(symt, read("(quote (x y z))")), ~List(~[Atom(~"x"), Atom(~"y"), Atom(~"z")]))
assert_eq!(eval(symt, read("(quote (quote x))")), ~List(~[Atom(~"quote"), Atom(~"x")]));
assert_eq!(eval(symt, read("(+ (quote 1) 2)")), ~Num(3.0));
//assert_eq!(eval(symt, read("(quote 1 2 3 4 5)")), ~Num(5.0));
}
#[test]
fn test_def() {
let mut symt = Rc::new(new_symt());
init_std(symt);
eval(symt, read("(def x 5)"));
eval(symt, read("(def y 10)"));
assert_eq!(eval(symt, read("x")), ~Num(5.0));
assert_eq!(eval(symt, read("y")), ~Num(10.0));
assert_eq!(eval(symt, read("(+ x y)")), ~Num(15.0));
}
#[test]
fn test_fn() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(fn () ())")), ~Fn(~[], ~sexpr::List(~[])));
assert_eq!(eval(symt, read("(fn (x) (x))")), ~Fn(~[~"x"], ~sexpr::List(~[sexpr::Atom(~"x")])));
eval(symt, read("(def f (fn (x) (+ 1 x)))"));
assert_eq!(eval(symt, read("f")), ~Fn(~[~"x"],
~sexpr::List(~[sexpr::Atom(~"+"), sexpr::Num(1.0), sexpr::Atom(~"x")])));
assert_eq!(eval(symt, read("(f 5)")), ~Num(6.0));
}
#[test]
fn test_apply_fn() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("((fn () 0))")), ~Num(0.0));
assert_eq!(eval(symt, read("((fn (x) x) 5)")), ~Num(5.0));
}
#[test]
fn test_cond() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(cond (true 2) (nil 3))")), ~Num(2.0));
assert_eq!(eval(symt, read("(cond (nil 2) (true 3))")), ~Num(3.0));
assert_eq!(eval(symt, read("(cond (nil 2) (true 3) (true 4))")), ~Num(3.0));
}
#[test]
fn test_equals() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(= 1 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= 1.0 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= 1 2)")), nil());
assert_eq!(eval(symt, read("(= true 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= nil (quote ()))")), ~Num(1.0));
assert_eq!(eval(symt, read("(= nil nil)")), ~Num(1.0));
assert_eq!(eval(symt, read("(= nil true)")), nil());
assert_eq!(eval(symt, read("(= \"a\" \"a\")")), ~Num(1.0));
assert_eq!(eval(symt, read("(= \"a\" \"b\")")), nil());
assert_eq!(eval(symt, read("(= (quote (1 2 3)) (quote (1 2 3)))")), ~Num(1.0));
}
#[test]
fn test_factorial() {
let mut symt = Rc::new(new_symt());
init_std(symt);
eval(symt, read("(def fac (fn (n) (cond ((= n 0) 1) (true (* n (fac (- n 1)))))))"));
assert_eq!(eval(symt, read("(fac 10)")), ~Num(3628800.0));
}
#[test]
fn test_eval_fn() {
let mut symt = Rc::new(new_symt());
init_std(symt);
assert_eq!(eval(symt, read("(eval 1)")), ~Num(1.0));
assert_eq!(eval(symt, read("(eval \"hi\")")), ~Str(~"hi"));
assert_eq!(eval(symt, read("(eval (quote (+ 1 2)))")), ~Num(3.0));
assert_eq!(eval(symt, read("(eval (quote ( (fn () 0) )))")), ~Num(0.0));
}
}
#[ignore(test)]
#[main]
fn main() {
// A simple REPL
let mut stdinReader = std::io::buffered::BufferedReader::new(std::io::stdin());
let mut symt = Rc::new(new_symt());
init_std(symt);
loop {
print("> ");
let line = stdinReader.read_line();
match line {
Some(~".q") => break,
Some(~".newsym") => {
// use a fresh symbol table
symt = Rc::new(new_symt());
init_std(symt);
println("ok");
}
Some(line) => {
match sexpr::from_str(line) {
Some(sexpr) => println(eval(symt, sexpr).to_str()),
None => println("syntax error")
}
}
None => ()
}
}
} | } | random_line_split |
client.go | package apic
import (
"encoding/json"
"fmt"
"net/http"
"strings"
coreapi "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/api"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/apic/apiserver/models/management/v1alpha1"
corecfg "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/config"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/errors"
hc "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/healthcheck"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/log"
"git.ecd.axway.org/apigov/service-mesh-agent/pkg/apicauth"
)
// constants for auth policy types
const (
Apikey = "verify-api-key"
Passthrough = "pass-through"
Oauth = "verify-oauth-token"
)
const serverName = "AMPLIFY Central"
// ValidPolicies - list of valid auth policies supported by Central. Add to this list as more policies are supported.
var ValidPolicies = []string{Apikey, Passthrough, Oauth}
// SubscriptionProcessor - callback method type to process subscriptions
type SubscriptionProcessor func(subscription Subscription)
// SubscriptionValidator - callback method type to validate subscription for processing
type SubscriptionValidator func(subscription Subscription) bool
// Client - interface
type Client interface {
PublishService(serviceBody ServiceBody) (*v1alpha1.APIService, error)
RegisterSubscriptionWebhook() error
RegisterSubscriptionSchema(subscriptionSchema SubscriptionSchema) error
UpdateSubscriptionSchema(subscriptionSchema SubscriptionSchema) error
GetSubscriptionManager() SubscriptionManager
GetCatalogItemIDForConsumerInstance(instanceID string) (string, error)
DeleteConsumerInstance(instanceName string) error
GetConsumerInstanceByID(consumerInstanceID string) (*v1alpha1.ConsumerInstance, error)
GetUserEmailAddress(ID string) (string, error)
GetSubscriptionsForCatalogItem(states []string, catalogItemID string) ([]CentralSubscription, error)
GetSubscriptionDefinitionPropertiesForCatalogItem(catalogItemID, propertyKey string) (SubscriptionSchema, error)
UpdateSubscriptionDefinitionPropertiesForCatalogItem(catalogItemID, propertyKey string, subscriptionSchema SubscriptionSchema) error
GetCatalogItemName(ID string) (string, error)
ExecuteAPI(method, url string, queryParam map[string]string, buffer []byte) ([]byte, error)
}
type tokenGetter interface {
GetToken() (string, error)
}
type platformTokenGetter struct {
requester *apicauth.PlatformTokenGetter
}
func (p *platformTokenGetter) GetToken() (string, error) {
return p.requester.GetToken()
}
// New -
func New(cfg corecfg.CentralConfig) Client {
tokenURL := cfg.GetAuthConfig().GetTokenURL()
aud := cfg.GetAuthConfig().GetAudience()
priKey := cfg.GetAuthConfig().GetPrivateKey()
pubKey := cfg.GetAuthConfig().GetPublicKey()
keyPwd := cfg.GetAuthConfig().GetKeyPassword()
clientID := cfg.GetAuthConfig().GetClientID()
authTimeout := cfg.GetAuthConfig().GetTimeout()
platformTokenGetter := &platformTokenGetter{
requester: apicauth.NewPlatformTokenGetter(priKey, pubKey, keyPwd, tokenURL, aud, clientID, authTimeout),
}
serviceClient := &ServiceClient{
cfg: cfg,
tokenRequester: platformTokenGetter,
apiClient: coreapi.NewClient(cfg.GetTLSConfig(), cfg.GetProxyURL()),
DefaultSubscriptionSchema: NewSubscriptionSchema(cfg.GetEnvironmentName() + SubscriptionSchemaNameSuffix),
}
// set the default webhook if one has been configured
webCfg := cfg.GetSubscriptionConfig().GetSubscriptionApprovalWebhookConfig()
if webCfg != nil && webCfg.IsConfigured() {
serviceClient.DefaultSubscriptionApprovalWebhook = webCfg
}
serviceClient.subscriptionMgr = newSubscriptionManager(serviceClient)
hc.RegisterHealthcheck(serverName, "central", serviceClient.healthcheck)
return serviceClient
}
// mapToTagsArray -
func (c *ServiceClient) mapToTagsArray(m map[string]interface{}) []string {
strArr := []string{}
for key, val := range m {
var value string
v, ok := val.(*string)
if ok {
value = *v
} else {
v, ok := val.(string)
if ok {
value = v
}
}
if value == "" {
strArr = append(strArr, key)
} else {
strArr = append(strArr, key+"_"+value)
}
}
// Add any tags from config
additionalTags := c.cfg.GetTagsToPublish()
if additionalTags != "" {
additionalTagsArray := strings.Split(additionalTags, ",")
for _, tag := range additionalTagsArray {
strArr = append(strArr, strings.TrimSpace(tag))
}
}
return strArr
}
func logResponseErrors(body []byte) {
detail := make(map[string]*json.RawMessage)
json.Unmarshal(body, &detail)
for k, v := range detail {
buffer, _ := v.MarshalJSON()
log.Debugf("HTTP response %v: %v", k, string(buffer))
}
}
func (c *ServiceClient) createHeader() (map[string]string, error) {
token, err := c.tokenRequester.GetToken()
if err != nil {
return nil, err
}
headers := make(map[string]string)
headers["X-Axway-Tenant-Id"] = c.cfg.GetTenantID()
headers["Authorization"] = "Bearer " + token
headers["Content-Type"] = "application/json"
return headers, nil
}
// GetSubscriptionManager -
func (c *ServiceClient) GetSubscriptionManager() SubscriptionManager {
return c.subscriptionMgr
}
// SetSubscriptionManager -
func (c *ServiceClient) SetSubscriptionManager(mgr SubscriptionManager) {
c.subscriptionMgr = mgr
}
func (c *ServiceClient) healthcheck(name string) *hc.Status {
// Set a default response
s := hc.Status{
Result: hc.OK,
}
// Check that we can reach the platform
err := c.checkPlatformHealth()
if err != nil {
s = hc.Status{
Result: hc.FAIL,
Details: err.Error(),
}
}
// Check that appropriate settings for the API server are set
err = c.checkAPIServerHealth()
if err != nil {
s = hc.Status{
Result: hc.FAIL,
Details: err.Error(),
}
}
// Return our response
return &s
}
func (c *ServiceClient) checkPlatformHealth() error {
_, err := c.tokenRequester.GetToken()
if err != nil {
return errors.Wrap(ErrAuthenticationCall, err.Error())
}
return nil
}
func (c *ServiceClient) checkAPIServerHealth() error {
headers, err := c.createHeader()
if err != nil {
return errors.Wrap(ErrAuthenticationCall, err.Error())
}
apiEnvironment, err := c.getEnvironment(headers)
if err != nil || apiEnvironment == nil {
return err
}
if c.cfg.GetEnvironmentID() == "" {
// need to save this ID for the traceability agent for later
c.cfg.SetEnvironmentID(apiEnvironment.Metadata.ID)
err = c.updateEnvironmentStatus(apiEnvironment)
if err != nil {
return err
}
}
if c.cfg.GetTeamID() == "" {
// Validate if team exists
team, err := c.getCentralTeam(c.cfg.GetTeamName())
if err != nil {
return err
}
// Set the team Id
c.cfg.SetTeamID(team.ID)
}
return nil
}
func (c *ServiceClient) updateEnvironmentStatus(apiEnvironment *v1alpha1.Environment) error {
attribute := "x-axway-agent"
// check to see if x-axway-agent has already been set
if _, found := apiEnvironment.Attributes[attribute]; found {
log.Debugf("Environment attribute: %s is already set.", attribute)
return nil
}
apiEnvironment.Attributes[attribute] = "true"
buffer, err := json.Marshal(apiEnvironment)
if err != nil {
return nil
}
_, err = c.apiServiceDeployAPI(http.MethodPut, c.cfg.GetEnvironmentURL(), buffer)
if err != nil {
return err
}
log.Debugf("Updated environment attribute: %s to true.", attribute)
return nil
}
func (c *ServiceClient) getEnvironment(headers map[string]string) (*v1alpha1.Environment, error) {
queryParams := map[string]string{}
// do a request for the environment
apiEnvByte, err := c.sendServerRequest(c.cfg.GetEnvironmentURL(), headers, queryParams)
if err != nil {
return nil, err
}
// Get env id from apiServerEnvByte
var apiEnvironment v1alpha1.Environment
err = json.Unmarshal(apiEnvByte, &apiEnvironment)
if err != nil {
return nil, errors.Wrap(ErrEnvironmentQuery, err.Error())
}
// Validate that we actually get an environment ID back within the Metadata
if apiEnvironment.Metadata.ID == "" {
return nil, ErrEnvironmentQuery
}
return &apiEnvironment, nil
}
func (c *ServiceClient) sendServerRequest(url string, headers, query map[string]string) ([]byte, error) {
request := coreapi.Request{
Method: coreapi.GET,
URL: url,
QueryParams: query,
Headers: headers,
}
response, err := c.apiClient.Send(request)
if err != nil {
return nil, errors.Wrap(ErrNetwork, err.Error())
}
switch response.Code {
case http.StatusOK:
return response.Body, nil
case http.StatusUnauthorized:
return nil, ErrAuthentication
default:
logResponseErrors(response.Body)
return nil, ErrRequestQuery
}
}
// GetUserEmailAddress - request the user email
func (c *ServiceClient) GetUserEmailAddress(id string) (string, error) {
headers, err := c.createHeader()
if err != nil {
return "", err
}
platformURL := fmt.Sprintf("%s/api/v1/user/%s", c.cfg.GetPlatformURL(), id)
log.Debugf("Platform URL being used to get user information %s", platformURL)
platformUserBytes, reqErr := c.sendServerRequest(platformURL, headers, make(map[string]string, 0))
if reqErr != nil {
if reqErr.(*errors.AgentError).GetErrorCode() == ErrRequestQuery.GetErrorCode() |
return "", reqErr
}
// Get the email
var platformUserInfo PlatformUserInfo
err = json.Unmarshal(platformUserBytes, &platformUserInfo)
if err != nil {
return "", err
}
email := platformUserInfo.Result.Email
log.Debugf("Platform user email %s", email)
return email, nil
}
// getCentralTeam - returns the team based on team name
func (c *ServiceClient) getCentralTeam(teamName string) (*PlatformTeam, error) {
// Query for the default, if no teamName is given
queryParams := map[string]string{}
if teamName != "" {
queryParams = map[string]string{
"query": fmt.Sprintf("name==\"%s\"", teamName),
}
}
platformTeams, err := c.getTeam(queryParams)
if err != nil {
return nil, err
}
if len(platformTeams) == 0 {
return nil, ErrTeamNotFound.FormatError(teamName)
}
team := platformTeams[0]
if teamName == "" {
// Loop through to find the default team
for i, platformTeam := range platformTeams {
if platformTeam.Default {
// Found the default, set as the team var and break
team = platformTeams[i]
break
}
}
}
return &team, nil
}
// getTeam - returns the team ID based on filter
func (c *ServiceClient) getTeam(filterQueryParams map[string]string) ([]PlatformTeam, error) {
headers, err := c.createHeader()
if err != nil {
return nil, err
}
// Get the teams using Client registry service instead of from platform.
// Platform teams API require access and DOSA account will not have the access
platformURL := fmt.Sprintf("%s/api/v1/platformTeams", c.cfg.GetURL())
response, reqErr := c.sendServerRequest(platformURL, headers, filterQueryParams)
if reqErr != nil {
return nil, reqErr
}
var platformTeams []PlatformTeam
err = json.Unmarshal(response, &platformTeams)
if err != nil {
return nil, err
}
return platformTeams, nil
}
// ExecuteAPI - execute the api
func (c *ServiceClient) ExecuteAPI(method, url string, queryParam map[string]string, buffer []byte) ([]byte, error) {
headers, err := c.createHeader()
if err != nil {
return nil, err
}
request := coreapi.Request{
Method: method,
URL: url,
QueryParams: queryParam,
Headers: headers,
Body: buffer,
}
response, err := c.apiClient.Send(request)
if err != nil {
return nil, errors.Wrap(ErrNetwork, err.Error())
}
switch response.Code {
case http.StatusOK:
return response.Body, nil
case http.StatusUnauthorized:
return nil, ErrAuthentication
default:
logResponseErrors(response.Body)
return nil, ErrRequestQuery
}
}
| {
return "", ErrNoAddressFound.FormatError(id)
} | conditional_block |
client.go | package apic
import (
"encoding/json"
"fmt"
"net/http"
"strings"
coreapi "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/api"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/apic/apiserver/models/management/v1alpha1"
corecfg "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/config"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/errors"
hc "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/healthcheck"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/log"
"git.ecd.axway.org/apigov/service-mesh-agent/pkg/apicauth"
)
// constants for auth policy types
const (
Apikey = "verify-api-key"
Passthrough = "pass-through"
Oauth = "verify-oauth-token"
)
const serverName = "AMPLIFY Central"
// ValidPolicies - list of valid auth policies supported by Central. Add to this list as more policies are supported.
var ValidPolicies = []string{Apikey, Passthrough, Oauth}
// SubscriptionProcessor - callback method type to process subscriptions
type SubscriptionProcessor func(subscription Subscription)
// SubscriptionValidator - callback method type to validate subscription for processing
type SubscriptionValidator func(subscription Subscription) bool
// Client - interface
type Client interface {
PublishService(serviceBody ServiceBody) (*v1alpha1.APIService, error)
RegisterSubscriptionWebhook() error
RegisterSubscriptionSchema(subscriptionSchema SubscriptionSchema) error
UpdateSubscriptionSchema(subscriptionSchema SubscriptionSchema) error
GetSubscriptionManager() SubscriptionManager
GetCatalogItemIDForConsumerInstance(instanceID string) (string, error)
DeleteConsumerInstance(instanceName string) error
GetConsumerInstanceByID(consumerInstanceID string) (*v1alpha1.ConsumerInstance, error)
GetUserEmailAddress(ID string) (string, error)
GetSubscriptionsForCatalogItem(states []string, catalogItemID string) ([]CentralSubscription, error)
GetSubscriptionDefinitionPropertiesForCatalogItem(catalogItemID, propertyKey string) (SubscriptionSchema, error)
UpdateSubscriptionDefinitionPropertiesForCatalogItem(catalogItemID, propertyKey string, subscriptionSchema SubscriptionSchema) error
GetCatalogItemName(ID string) (string, error)
ExecuteAPI(method, url string, queryParam map[string]string, buffer []byte) ([]byte, error)
}
type tokenGetter interface {
GetToken() (string, error)
}
type platformTokenGetter struct {
requester *apicauth.PlatformTokenGetter
}
func (p *platformTokenGetter) GetToken() (string, error) {
return p.requester.GetToken()
}
// New -
func New(cfg corecfg.CentralConfig) Client {
tokenURL := cfg.GetAuthConfig().GetTokenURL()
aud := cfg.GetAuthConfig().GetAudience()
priKey := cfg.GetAuthConfig().GetPrivateKey()
pubKey := cfg.GetAuthConfig().GetPublicKey()
keyPwd := cfg.GetAuthConfig().GetKeyPassword()
clientID := cfg.GetAuthConfig().GetClientID()
authTimeout := cfg.GetAuthConfig().GetTimeout()
platformTokenGetter := &platformTokenGetter{
requester: apicauth.NewPlatformTokenGetter(priKey, pubKey, keyPwd, tokenURL, aud, clientID, authTimeout),
}
serviceClient := &ServiceClient{
cfg: cfg,
tokenRequester: platformTokenGetter,
apiClient: coreapi.NewClient(cfg.GetTLSConfig(), cfg.GetProxyURL()),
DefaultSubscriptionSchema: NewSubscriptionSchema(cfg.GetEnvironmentName() + SubscriptionSchemaNameSuffix),
}
// set the default webhook if one has been configured
webCfg := cfg.GetSubscriptionConfig().GetSubscriptionApprovalWebhookConfig()
if webCfg != nil && webCfg.IsConfigured() {
serviceClient.DefaultSubscriptionApprovalWebhook = webCfg
}
serviceClient.subscriptionMgr = newSubscriptionManager(serviceClient)
hc.RegisterHealthcheck(serverName, "central", serviceClient.healthcheck)
return serviceClient
}
// mapToTagsArray -
func (c *ServiceClient) mapToTagsArray(m map[string]interface{}) []string {
strArr := []string{}
for key, val := range m {
var value string
v, ok := val.(*string)
if ok {
value = *v
} else {
v, ok := val.(string)
if ok {
value = v
}
}
if value == "" {
strArr = append(strArr, key)
} else {
strArr = append(strArr, key+"_"+value)
}
}
// Add any tags from config
additionalTags := c.cfg.GetTagsToPublish()
if additionalTags != "" {
additionalTagsArray := strings.Split(additionalTags, ",")
for _, tag := range additionalTagsArray {
strArr = append(strArr, strings.TrimSpace(tag))
}
}
return strArr
}
func logResponseErrors(body []byte) {
detail := make(map[string]*json.RawMessage)
json.Unmarshal(body, &detail)
for k, v := range detail {
buffer, _ := v.MarshalJSON()
log.Debugf("HTTP response %v: %v", k, string(buffer))
}
}
func (c *ServiceClient) createHeader() (map[string]string, error) {
token, err := c.tokenRequester.GetToken()
if err != nil {
return nil, err
}
headers := make(map[string]string)
headers["X-Axway-Tenant-Id"] = c.cfg.GetTenantID()
headers["Authorization"] = "Bearer " + token
headers["Content-Type"] = "application/json"
return headers, nil
}
// GetSubscriptionManager -
func (c *ServiceClient) GetSubscriptionManager() SubscriptionManager {
return c.subscriptionMgr
}
// SetSubscriptionManager -
func (c *ServiceClient) SetSubscriptionManager(mgr SubscriptionManager) {
c.subscriptionMgr = mgr
}
func (c *ServiceClient) healthcheck(name string) *hc.Status {
// Set a default response
s := hc.Status{
Result: hc.OK,
}
// Check that we can reach the platform
err := c.checkPlatformHealth() | }
}
// Check that appropriate settings for the API server are set
err = c.checkAPIServerHealth()
if err != nil {
s = hc.Status{
Result: hc.FAIL,
Details: err.Error(),
}
}
// Return our response
return &s
}
func (c *ServiceClient) checkPlatformHealth() error {
_, err := c.tokenRequester.GetToken()
if err != nil {
return errors.Wrap(ErrAuthenticationCall, err.Error())
}
return nil
}
func (c *ServiceClient) checkAPIServerHealth() error {
headers, err := c.createHeader()
if err != nil {
return errors.Wrap(ErrAuthenticationCall, err.Error())
}
apiEnvironment, err := c.getEnvironment(headers)
if err != nil || apiEnvironment == nil {
return err
}
if c.cfg.GetEnvironmentID() == "" {
// need to save this ID for the traceability agent for later
c.cfg.SetEnvironmentID(apiEnvironment.Metadata.ID)
err = c.updateEnvironmentStatus(apiEnvironment)
if err != nil {
return err
}
}
if c.cfg.GetTeamID() == "" {
// Validate if team exists
team, err := c.getCentralTeam(c.cfg.GetTeamName())
if err != nil {
return err
}
// Set the team Id
c.cfg.SetTeamID(team.ID)
}
return nil
}
func (c *ServiceClient) updateEnvironmentStatus(apiEnvironment *v1alpha1.Environment) error {
attribute := "x-axway-agent"
// check to see if x-axway-agent has already been set
if _, found := apiEnvironment.Attributes[attribute]; found {
log.Debugf("Environment attribute: %s is already set.", attribute)
return nil
}
apiEnvironment.Attributes[attribute] = "true"
buffer, err := json.Marshal(apiEnvironment)
if err != nil {
return nil
}
_, err = c.apiServiceDeployAPI(http.MethodPut, c.cfg.GetEnvironmentURL(), buffer)
if err != nil {
return err
}
log.Debugf("Updated environment attribute: %s to true.", attribute)
return nil
}
func (c *ServiceClient) getEnvironment(headers map[string]string) (*v1alpha1.Environment, error) {
queryParams := map[string]string{}
// do a request for the environment
apiEnvByte, err := c.sendServerRequest(c.cfg.GetEnvironmentURL(), headers, queryParams)
if err != nil {
return nil, err
}
// Get env id from apiServerEnvByte
var apiEnvironment v1alpha1.Environment
err = json.Unmarshal(apiEnvByte, &apiEnvironment)
if err != nil {
return nil, errors.Wrap(ErrEnvironmentQuery, err.Error())
}
// Validate that we actually get an environment ID back within the Metadata
if apiEnvironment.Metadata.ID == "" {
return nil, ErrEnvironmentQuery
}
return &apiEnvironment, nil
}
func (c *ServiceClient) sendServerRequest(url string, headers, query map[string]string) ([]byte, error) {
request := coreapi.Request{
Method: coreapi.GET,
URL: url,
QueryParams: query,
Headers: headers,
}
response, err := c.apiClient.Send(request)
if err != nil {
return nil, errors.Wrap(ErrNetwork, err.Error())
}
switch response.Code {
case http.StatusOK:
return response.Body, nil
case http.StatusUnauthorized:
return nil, ErrAuthentication
default:
logResponseErrors(response.Body)
return nil, ErrRequestQuery
}
}
// GetUserEmailAddress - request the user email
func (c *ServiceClient) GetUserEmailAddress(id string) (string, error) {
headers, err := c.createHeader()
if err != nil {
return "", err
}
platformURL := fmt.Sprintf("%s/api/v1/user/%s", c.cfg.GetPlatformURL(), id)
log.Debugf("Platform URL being used to get user information %s", platformURL)
platformUserBytes, reqErr := c.sendServerRequest(platformURL, headers, make(map[string]string, 0))
if reqErr != nil {
if reqErr.(*errors.AgentError).GetErrorCode() == ErrRequestQuery.GetErrorCode() {
return "", ErrNoAddressFound.FormatError(id)
}
return "", reqErr
}
// Get the email
var platformUserInfo PlatformUserInfo
err = json.Unmarshal(platformUserBytes, &platformUserInfo)
if err != nil {
return "", err
}
email := platformUserInfo.Result.Email
log.Debugf("Platform user email %s", email)
return email, nil
}
// getCentralTeam - returns the team based on team name
func (c *ServiceClient) getCentralTeam(teamName string) (*PlatformTeam, error) {
// Query for the default, if no teamName is given
queryParams := map[string]string{}
if teamName != "" {
queryParams = map[string]string{
"query": fmt.Sprintf("name==\"%s\"", teamName),
}
}
platformTeams, err := c.getTeam(queryParams)
if err != nil {
return nil, err
}
if len(platformTeams) == 0 {
return nil, ErrTeamNotFound.FormatError(teamName)
}
team := platformTeams[0]
if teamName == "" {
// Loop through to find the default team
for i, platformTeam := range platformTeams {
if platformTeam.Default {
// Found the default, set as the team var and break
team = platformTeams[i]
break
}
}
}
return &team, nil
}
// getTeam - returns the team ID based on filter
func (c *ServiceClient) getTeam(filterQueryParams map[string]string) ([]PlatformTeam, error) {
headers, err := c.createHeader()
if err != nil {
return nil, err
}
// Get the teams using Client registry service instead of from platform.
// Platform teams API require access and DOSA account will not have the access
platformURL := fmt.Sprintf("%s/api/v1/platformTeams", c.cfg.GetURL())
response, reqErr := c.sendServerRequest(platformURL, headers, filterQueryParams)
if reqErr != nil {
return nil, reqErr
}
var platformTeams []PlatformTeam
err = json.Unmarshal(response, &platformTeams)
if err != nil {
return nil, err
}
return platformTeams, nil
}
// ExecuteAPI - execute the api
func (c *ServiceClient) ExecuteAPI(method, url string, queryParam map[string]string, buffer []byte) ([]byte, error) {
headers, err := c.createHeader()
if err != nil {
return nil, err
}
request := coreapi.Request{
Method: method,
URL: url,
QueryParams: queryParam,
Headers: headers,
Body: buffer,
}
response, err := c.apiClient.Send(request)
if err != nil {
return nil, errors.Wrap(ErrNetwork, err.Error())
}
switch response.Code {
case http.StatusOK:
return response.Body, nil
case http.StatusUnauthorized:
return nil, ErrAuthentication
default:
logResponseErrors(response.Body)
return nil, ErrRequestQuery
}
} | if err != nil {
s = hc.Status{
Result: hc.FAIL,
Details: err.Error(), | random_line_split |
client.go | package apic
import (
"encoding/json"
"fmt"
"net/http"
"strings"
coreapi "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/api"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/apic/apiserver/models/management/v1alpha1"
corecfg "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/config"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/errors"
hc "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/healthcheck"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/log"
"git.ecd.axway.org/apigov/service-mesh-agent/pkg/apicauth"
)
// constants for auth policy types
const (
Apikey = "verify-api-key"
Passthrough = "pass-through"
Oauth = "verify-oauth-token"
)
const serverName = "AMPLIFY Central"
// ValidPolicies - list of valid auth policies supported by Central. Add to this list as more policies are supported.
var ValidPolicies = []string{Apikey, Passthrough, Oauth}
// SubscriptionProcessor - callback method type to process subscriptions
type SubscriptionProcessor func(subscription Subscription)
// SubscriptionValidator - callback method type to validate subscription for processing
type SubscriptionValidator func(subscription Subscription) bool
// Client - interface
type Client interface {
PublishService(serviceBody ServiceBody) (*v1alpha1.APIService, error)
RegisterSubscriptionWebhook() error
RegisterSubscriptionSchema(subscriptionSchema SubscriptionSchema) error
UpdateSubscriptionSchema(subscriptionSchema SubscriptionSchema) error
GetSubscriptionManager() SubscriptionManager
GetCatalogItemIDForConsumerInstance(instanceID string) (string, error)
DeleteConsumerInstance(instanceName string) error
GetConsumerInstanceByID(consumerInstanceID string) (*v1alpha1.ConsumerInstance, error)
GetUserEmailAddress(ID string) (string, error)
GetSubscriptionsForCatalogItem(states []string, catalogItemID string) ([]CentralSubscription, error)
GetSubscriptionDefinitionPropertiesForCatalogItem(catalogItemID, propertyKey string) (SubscriptionSchema, error)
UpdateSubscriptionDefinitionPropertiesForCatalogItem(catalogItemID, propertyKey string, subscriptionSchema SubscriptionSchema) error
GetCatalogItemName(ID string) (string, error)
ExecuteAPI(method, url string, queryParam map[string]string, buffer []byte) ([]byte, error)
}
type tokenGetter interface {
GetToken() (string, error)
}
type platformTokenGetter struct {
requester *apicauth.PlatformTokenGetter
}
func (p *platformTokenGetter) GetToken() (string, error) {
return p.requester.GetToken()
}
// New -
func New(cfg corecfg.CentralConfig) Client |
// mapToTagsArray -
func (c *ServiceClient) mapToTagsArray(m map[string]interface{}) []string {
strArr := []string{}
for key, val := range m {
var value string
v, ok := val.(*string)
if ok {
value = *v
} else {
v, ok := val.(string)
if ok {
value = v
}
}
if value == "" {
strArr = append(strArr, key)
} else {
strArr = append(strArr, key+"_"+value)
}
}
// Add any tags from config
additionalTags := c.cfg.GetTagsToPublish()
if additionalTags != "" {
additionalTagsArray := strings.Split(additionalTags, ",")
for _, tag := range additionalTagsArray {
strArr = append(strArr, strings.TrimSpace(tag))
}
}
return strArr
}
func logResponseErrors(body []byte) {
detail := make(map[string]*json.RawMessage)
json.Unmarshal(body, &detail)
for k, v := range detail {
buffer, _ := v.MarshalJSON()
log.Debugf("HTTP response %v: %v", k, string(buffer))
}
}
func (c *ServiceClient) createHeader() (map[string]string, error) {
token, err := c.tokenRequester.GetToken()
if err != nil {
return nil, err
}
headers := make(map[string]string)
headers["X-Axway-Tenant-Id"] = c.cfg.GetTenantID()
headers["Authorization"] = "Bearer " + token
headers["Content-Type"] = "application/json"
return headers, nil
}
// GetSubscriptionManager -
func (c *ServiceClient) GetSubscriptionManager() SubscriptionManager {
return c.subscriptionMgr
}
// SetSubscriptionManager -
func (c *ServiceClient) SetSubscriptionManager(mgr SubscriptionManager) {
c.subscriptionMgr = mgr
}
func (c *ServiceClient) healthcheck(name string) *hc.Status {
// Set a default response
s := hc.Status{
Result: hc.OK,
}
// Check that we can reach the platform
err := c.checkPlatformHealth()
if err != nil {
s = hc.Status{
Result: hc.FAIL,
Details: err.Error(),
}
}
// Check that appropriate settings for the API server are set
err = c.checkAPIServerHealth()
if err != nil {
s = hc.Status{
Result: hc.FAIL,
Details: err.Error(),
}
}
// Return our response
return &s
}
func (c *ServiceClient) checkPlatformHealth() error {
_, err := c.tokenRequester.GetToken()
if err != nil {
return errors.Wrap(ErrAuthenticationCall, err.Error())
}
return nil
}
func (c *ServiceClient) checkAPIServerHealth() error {
headers, err := c.createHeader()
if err != nil {
return errors.Wrap(ErrAuthenticationCall, err.Error())
}
apiEnvironment, err := c.getEnvironment(headers)
if err != nil || apiEnvironment == nil {
return err
}
if c.cfg.GetEnvironmentID() == "" {
// need to save this ID for the traceability agent for later
c.cfg.SetEnvironmentID(apiEnvironment.Metadata.ID)
err = c.updateEnvironmentStatus(apiEnvironment)
if err != nil {
return err
}
}
if c.cfg.GetTeamID() == "" {
// Validate if team exists
team, err := c.getCentralTeam(c.cfg.GetTeamName())
if err != nil {
return err
}
// Set the team Id
c.cfg.SetTeamID(team.ID)
}
return nil
}
func (c *ServiceClient) updateEnvironmentStatus(apiEnvironment *v1alpha1.Environment) error {
attribute := "x-axway-agent"
// check to see if x-axway-agent has already been set
if _, found := apiEnvironment.Attributes[attribute]; found {
log.Debugf("Environment attribute: %s is already set.", attribute)
return nil
}
apiEnvironment.Attributes[attribute] = "true"
buffer, err := json.Marshal(apiEnvironment)
if err != nil {
return nil
}
_, err = c.apiServiceDeployAPI(http.MethodPut, c.cfg.GetEnvironmentURL(), buffer)
if err != nil {
return err
}
log.Debugf("Updated environment attribute: %s to true.", attribute)
return nil
}
func (c *ServiceClient) getEnvironment(headers map[string]string) (*v1alpha1.Environment, error) {
queryParams := map[string]string{}
// do a request for the environment
apiEnvByte, err := c.sendServerRequest(c.cfg.GetEnvironmentURL(), headers, queryParams)
if err != nil {
return nil, err
}
// Get env id from apiServerEnvByte
var apiEnvironment v1alpha1.Environment
err = json.Unmarshal(apiEnvByte, &apiEnvironment)
if err != nil {
return nil, errors.Wrap(ErrEnvironmentQuery, err.Error())
}
// Validate that we actually get an environment ID back within the Metadata
if apiEnvironment.Metadata.ID == "" {
return nil, ErrEnvironmentQuery
}
return &apiEnvironment, nil
}
func (c *ServiceClient) sendServerRequest(url string, headers, query map[string]string) ([]byte, error) {
request := coreapi.Request{
Method: coreapi.GET,
URL: url,
QueryParams: query,
Headers: headers,
}
response, err := c.apiClient.Send(request)
if err != nil {
return nil, errors.Wrap(ErrNetwork, err.Error())
}
switch response.Code {
case http.StatusOK:
return response.Body, nil
case http.StatusUnauthorized:
return nil, ErrAuthentication
default:
logResponseErrors(response.Body)
return nil, ErrRequestQuery
}
}
// GetUserEmailAddress - request the user email
func (c *ServiceClient) GetUserEmailAddress(id string) (string, error) {
headers, err := c.createHeader()
if err != nil {
return "", err
}
platformURL := fmt.Sprintf("%s/api/v1/user/%s", c.cfg.GetPlatformURL(), id)
log.Debugf("Platform URL being used to get user information %s", platformURL)
platformUserBytes, reqErr := c.sendServerRequest(platformURL, headers, make(map[string]string, 0))
if reqErr != nil {
if reqErr.(*errors.AgentError).GetErrorCode() == ErrRequestQuery.GetErrorCode() {
return "", ErrNoAddressFound.FormatError(id)
}
return "", reqErr
}
// Get the email
var platformUserInfo PlatformUserInfo
err = json.Unmarshal(platformUserBytes, &platformUserInfo)
if err != nil {
return "", err
}
email := platformUserInfo.Result.Email
log.Debugf("Platform user email %s", email)
return email, nil
}
// getCentralTeam - returns the team based on team name
func (c *ServiceClient) getCentralTeam(teamName string) (*PlatformTeam, error) {
// Query for the default, if no teamName is given
queryParams := map[string]string{}
if teamName != "" {
queryParams = map[string]string{
"query": fmt.Sprintf("name==\"%s\"", teamName),
}
}
platformTeams, err := c.getTeam(queryParams)
if err != nil {
return nil, err
}
if len(platformTeams) == 0 {
return nil, ErrTeamNotFound.FormatError(teamName)
}
team := platformTeams[0]
if teamName == "" {
// Loop through to find the default team
for i, platformTeam := range platformTeams {
if platformTeam.Default {
// Found the default, set as the team var and break
team = platformTeams[i]
break
}
}
}
return &team, nil
}
// getTeam - returns the team ID based on filter
func (c *ServiceClient) getTeam(filterQueryParams map[string]string) ([]PlatformTeam, error) {
headers, err := c.createHeader()
if err != nil {
return nil, err
}
// Get the teams using Client registry service instead of from platform.
// Platform teams API require access and DOSA account will not have the access
platformURL := fmt.Sprintf("%s/api/v1/platformTeams", c.cfg.GetURL())
response, reqErr := c.sendServerRequest(platformURL, headers, filterQueryParams)
if reqErr != nil {
return nil, reqErr
}
var platformTeams []PlatformTeam
err = json.Unmarshal(response, &platformTeams)
if err != nil {
return nil, err
}
return platformTeams, nil
}
// ExecuteAPI - execute the api
func (c *ServiceClient) ExecuteAPI(method, url string, queryParam map[string]string, buffer []byte) ([]byte, error) {
headers, err := c.createHeader()
if err != nil {
return nil, err
}
request := coreapi.Request{
Method: method,
URL: url,
QueryParams: queryParam,
Headers: headers,
Body: buffer,
}
response, err := c.apiClient.Send(request)
if err != nil {
return nil, errors.Wrap(ErrNetwork, err.Error())
}
switch response.Code {
case http.StatusOK:
return response.Body, nil
case http.StatusUnauthorized:
return nil, ErrAuthentication
default:
logResponseErrors(response.Body)
return nil, ErrRequestQuery
}
}
| {
tokenURL := cfg.GetAuthConfig().GetTokenURL()
aud := cfg.GetAuthConfig().GetAudience()
priKey := cfg.GetAuthConfig().GetPrivateKey()
pubKey := cfg.GetAuthConfig().GetPublicKey()
keyPwd := cfg.GetAuthConfig().GetKeyPassword()
clientID := cfg.GetAuthConfig().GetClientID()
authTimeout := cfg.GetAuthConfig().GetTimeout()
platformTokenGetter := &platformTokenGetter{
requester: apicauth.NewPlatformTokenGetter(priKey, pubKey, keyPwd, tokenURL, aud, clientID, authTimeout),
}
serviceClient := &ServiceClient{
cfg: cfg,
tokenRequester: platformTokenGetter,
apiClient: coreapi.NewClient(cfg.GetTLSConfig(), cfg.GetProxyURL()),
DefaultSubscriptionSchema: NewSubscriptionSchema(cfg.GetEnvironmentName() + SubscriptionSchemaNameSuffix),
}
// set the default webhook if one has been configured
webCfg := cfg.GetSubscriptionConfig().GetSubscriptionApprovalWebhookConfig()
if webCfg != nil && webCfg.IsConfigured() {
serviceClient.DefaultSubscriptionApprovalWebhook = webCfg
}
serviceClient.subscriptionMgr = newSubscriptionManager(serviceClient)
hc.RegisterHealthcheck(serverName, "central", serviceClient.healthcheck)
return serviceClient
} | identifier_body |
client.go | package apic
import (
"encoding/json"
"fmt"
"net/http"
"strings"
coreapi "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/api"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/apic/apiserver/models/management/v1alpha1"
corecfg "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/config"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/errors"
hc "git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/healthcheck"
"git.ecd.axway.org/apigov/apic_agents_sdk/pkg/util/log"
"git.ecd.axway.org/apigov/service-mesh-agent/pkg/apicauth"
)
// constants for auth policy types
const (
Apikey = "verify-api-key"
Passthrough = "pass-through"
Oauth = "verify-oauth-token"
)
const serverName = "AMPLIFY Central"
// ValidPolicies - list of valid auth policies supported by Central. Add to this list as more policies are supported.
var ValidPolicies = []string{Apikey, Passthrough, Oauth}
// SubscriptionProcessor - callback method type to process subscriptions
type SubscriptionProcessor func(subscription Subscription)
// SubscriptionValidator - callback method type to validate subscription for processing
type SubscriptionValidator func(subscription Subscription) bool
// Client - interface
type Client interface {
PublishService(serviceBody ServiceBody) (*v1alpha1.APIService, error)
RegisterSubscriptionWebhook() error
RegisterSubscriptionSchema(subscriptionSchema SubscriptionSchema) error
UpdateSubscriptionSchema(subscriptionSchema SubscriptionSchema) error
GetSubscriptionManager() SubscriptionManager
GetCatalogItemIDForConsumerInstance(instanceID string) (string, error)
DeleteConsumerInstance(instanceName string) error
GetConsumerInstanceByID(consumerInstanceID string) (*v1alpha1.ConsumerInstance, error)
GetUserEmailAddress(ID string) (string, error)
GetSubscriptionsForCatalogItem(states []string, catalogItemID string) ([]CentralSubscription, error)
GetSubscriptionDefinitionPropertiesForCatalogItem(catalogItemID, propertyKey string) (SubscriptionSchema, error)
UpdateSubscriptionDefinitionPropertiesForCatalogItem(catalogItemID, propertyKey string, subscriptionSchema SubscriptionSchema) error
GetCatalogItemName(ID string) (string, error)
ExecuteAPI(method, url string, queryParam map[string]string, buffer []byte) ([]byte, error)
}
type tokenGetter interface {
GetToken() (string, error)
}
type platformTokenGetter struct {
requester *apicauth.PlatformTokenGetter
}
func (p *platformTokenGetter) GetToken() (string, error) {
return p.requester.GetToken()
}
// New -
func New(cfg corecfg.CentralConfig) Client {
tokenURL := cfg.GetAuthConfig().GetTokenURL()
aud := cfg.GetAuthConfig().GetAudience()
priKey := cfg.GetAuthConfig().GetPrivateKey()
pubKey := cfg.GetAuthConfig().GetPublicKey()
keyPwd := cfg.GetAuthConfig().GetKeyPassword()
clientID := cfg.GetAuthConfig().GetClientID()
authTimeout := cfg.GetAuthConfig().GetTimeout()
platformTokenGetter := &platformTokenGetter{
requester: apicauth.NewPlatformTokenGetter(priKey, pubKey, keyPwd, tokenURL, aud, clientID, authTimeout),
}
serviceClient := &ServiceClient{
cfg: cfg,
tokenRequester: platformTokenGetter,
apiClient: coreapi.NewClient(cfg.GetTLSConfig(), cfg.GetProxyURL()),
DefaultSubscriptionSchema: NewSubscriptionSchema(cfg.GetEnvironmentName() + SubscriptionSchemaNameSuffix),
}
// set the default webhook if one has been configured
webCfg := cfg.GetSubscriptionConfig().GetSubscriptionApprovalWebhookConfig()
if webCfg != nil && webCfg.IsConfigured() {
serviceClient.DefaultSubscriptionApprovalWebhook = webCfg
}
serviceClient.subscriptionMgr = newSubscriptionManager(serviceClient)
hc.RegisterHealthcheck(serverName, "central", serviceClient.healthcheck)
return serviceClient
}
// mapToTagsArray -
func (c *ServiceClient) mapToTagsArray(m map[string]interface{}) []string {
strArr := []string{}
for key, val := range m {
var value string
v, ok := val.(*string)
if ok {
value = *v
} else {
v, ok := val.(string)
if ok {
value = v
}
}
if value == "" {
strArr = append(strArr, key)
} else {
strArr = append(strArr, key+"_"+value)
}
}
// Add any tags from config
additionalTags := c.cfg.GetTagsToPublish()
if additionalTags != "" {
additionalTagsArray := strings.Split(additionalTags, ",")
for _, tag := range additionalTagsArray {
strArr = append(strArr, strings.TrimSpace(tag))
}
}
return strArr
}
func logResponseErrors(body []byte) {
detail := make(map[string]*json.RawMessage)
json.Unmarshal(body, &detail)
for k, v := range detail {
buffer, _ := v.MarshalJSON()
log.Debugf("HTTP response %v: %v", k, string(buffer))
}
}
func (c *ServiceClient) createHeader() (map[string]string, error) {
token, err := c.tokenRequester.GetToken()
if err != nil {
return nil, err
}
headers := make(map[string]string)
headers["X-Axway-Tenant-Id"] = c.cfg.GetTenantID()
headers["Authorization"] = "Bearer " + token
headers["Content-Type"] = "application/json"
return headers, nil
}
// GetSubscriptionManager -
func (c *ServiceClient) GetSubscriptionManager() SubscriptionManager {
return c.subscriptionMgr
}
// SetSubscriptionManager -
func (c *ServiceClient) SetSubscriptionManager(mgr SubscriptionManager) {
c.subscriptionMgr = mgr
}
func (c *ServiceClient) healthcheck(name string) *hc.Status {
// Set a default response
s := hc.Status{
Result: hc.OK,
}
// Check that we can reach the platform
err := c.checkPlatformHealth()
if err != nil {
s = hc.Status{
Result: hc.FAIL,
Details: err.Error(),
}
}
// Check that appropriate settings for the API server are set
err = c.checkAPIServerHealth()
if err != nil {
s = hc.Status{
Result: hc.FAIL,
Details: err.Error(),
}
}
// Return our response
return &s
}
func (c *ServiceClient) checkPlatformHealth() error {
_, err := c.tokenRequester.GetToken()
if err != nil {
return errors.Wrap(ErrAuthenticationCall, err.Error())
}
return nil
}
func (c *ServiceClient) checkAPIServerHealth() error {
headers, err := c.createHeader()
if err != nil {
return errors.Wrap(ErrAuthenticationCall, err.Error())
}
apiEnvironment, err := c.getEnvironment(headers)
if err != nil || apiEnvironment == nil {
return err
}
if c.cfg.GetEnvironmentID() == "" {
// need to save this ID for the traceability agent for later
c.cfg.SetEnvironmentID(apiEnvironment.Metadata.ID)
err = c.updateEnvironmentStatus(apiEnvironment)
if err != nil {
return err
}
}
if c.cfg.GetTeamID() == "" {
// Validate if team exists
team, err := c.getCentralTeam(c.cfg.GetTeamName())
if err != nil {
return err
}
// Set the team Id
c.cfg.SetTeamID(team.ID)
}
return nil
}
func (c *ServiceClient) updateEnvironmentStatus(apiEnvironment *v1alpha1.Environment) error {
attribute := "x-axway-agent"
// check to see if x-axway-agent has already been set
if _, found := apiEnvironment.Attributes[attribute]; found {
log.Debugf("Environment attribute: %s is already set.", attribute)
return nil
}
apiEnvironment.Attributes[attribute] = "true"
buffer, err := json.Marshal(apiEnvironment)
if err != nil {
return nil
}
_, err = c.apiServiceDeployAPI(http.MethodPut, c.cfg.GetEnvironmentURL(), buffer)
if err != nil {
return err
}
log.Debugf("Updated environment attribute: %s to true.", attribute)
return nil
}
func (c *ServiceClient) getEnvironment(headers map[string]string) (*v1alpha1.Environment, error) {
queryParams := map[string]string{}
// do a request for the environment
apiEnvByte, err := c.sendServerRequest(c.cfg.GetEnvironmentURL(), headers, queryParams)
if err != nil {
return nil, err
}
// Get env id from apiServerEnvByte
var apiEnvironment v1alpha1.Environment
err = json.Unmarshal(apiEnvByte, &apiEnvironment)
if err != nil {
return nil, errors.Wrap(ErrEnvironmentQuery, err.Error())
}
// Validate that we actually get an environment ID back within the Metadata
if apiEnvironment.Metadata.ID == "" {
return nil, ErrEnvironmentQuery
}
return &apiEnvironment, nil
}
func (c *ServiceClient) sendServerRequest(url string, headers, query map[string]string) ([]byte, error) {
request := coreapi.Request{
Method: coreapi.GET,
URL: url,
QueryParams: query,
Headers: headers,
}
response, err := c.apiClient.Send(request)
if err != nil {
return nil, errors.Wrap(ErrNetwork, err.Error())
}
switch response.Code {
case http.StatusOK:
return response.Body, nil
case http.StatusUnauthorized:
return nil, ErrAuthentication
default:
logResponseErrors(response.Body)
return nil, ErrRequestQuery
}
}
// GetUserEmailAddress - request the user email
func (c *ServiceClient) GetUserEmailAddress(id string) (string, error) {
headers, err := c.createHeader()
if err != nil {
return "", err
}
platformURL := fmt.Sprintf("%s/api/v1/user/%s", c.cfg.GetPlatformURL(), id)
log.Debugf("Platform URL being used to get user information %s", platformURL)
platformUserBytes, reqErr := c.sendServerRequest(platformURL, headers, make(map[string]string, 0))
if reqErr != nil {
if reqErr.(*errors.AgentError).GetErrorCode() == ErrRequestQuery.GetErrorCode() {
return "", ErrNoAddressFound.FormatError(id)
}
return "", reqErr
}
// Get the email
var platformUserInfo PlatformUserInfo
err = json.Unmarshal(platformUserBytes, &platformUserInfo)
if err != nil {
return "", err
}
email := platformUserInfo.Result.Email
log.Debugf("Platform user email %s", email)
return email, nil
}
// getCentralTeam - returns the team based on team name
func (c *ServiceClient) getCentralTeam(teamName string) (*PlatformTeam, error) {
// Query for the default, if no teamName is given
queryParams := map[string]string{}
if teamName != "" {
queryParams = map[string]string{
"query": fmt.Sprintf("name==\"%s\"", teamName),
}
}
platformTeams, err := c.getTeam(queryParams)
if err != nil {
return nil, err
}
if len(platformTeams) == 0 {
return nil, ErrTeamNotFound.FormatError(teamName)
}
team := platformTeams[0]
if teamName == "" {
// Loop through to find the default team
for i, platformTeam := range platformTeams {
if platformTeam.Default {
// Found the default, set as the team var and break
team = platformTeams[i]
break
}
}
}
return &team, nil
}
// getTeam - returns the team ID based on filter
func (c *ServiceClient) | (filterQueryParams map[string]string) ([]PlatformTeam, error) {
headers, err := c.createHeader()
if err != nil {
return nil, err
}
// Get the teams using Client registry service instead of from platform.
// Platform teams API require access and DOSA account will not have the access
platformURL := fmt.Sprintf("%s/api/v1/platformTeams", c.cfg.GetURL())
response, reqErr := c.sendServerRequest(platformURL, headers, filterQueryParams)
if reqErr != nil {
return nil, reqErr
}
var platformTeams []PlatformTeam
err = json.Unmarshal(response, &platformTeams)
if err != nil {
return nil, err
}
return platformTeams, nil
}
// ExecuteAPI - execute the api
func (c *ServiceClient) ExecuteAPI(method, url string, queryParam map[string]string, buffer []byte) ([]byte, error) {
headers, err := c.createHeader()
if err != nil {
return nil, err
}
request := coreapi.Request{
Method: method,
URL: url,
QueryParams: queryParam,
Headers: headers,
Body: buffer,
}
response, err := c.apiClient.Send(request)
if err != nil {
return nil, errors.Wrap(ErrNetwork, err.Error())
}
switch response.Code {
case http.StatusOK:
return response.Body, nil
case http.StatusUnauthorized:
return nil, ErrAuthentication
default:
logResponseErrors(response.Body)
return nil, ErrRequestQuery
}
}
| getTeam | identifier_name |
git.go | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/transitional.dtd">
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<title>Source file /src/pkg/encoding/git85/git.go</title>
<link rel="stylesheet" type="text/css" href="../../../../doc/style.css">
<script type="text/javascript" src="../../../../doc/godocs.js"></script>
</head>
<body>
<script>
// Catch 'enter' key down events and trigger the search form submission.
function codesearchKeyDown(event) {
if (event.which == 13) {
var form = document.getElementById('codesearch');
var query = document.getElementById('codesearchQuery');
form.q.value = "lang:go package:go.googlecode.com " + query.value;
document.getElementById('codesearch').submit();
} return true;
}
// Capture the submission event and construct the query parameter.
function codeSearchSubmit() {
var query = document.getElementById('codesearchQuery');
var form = document.getElementById('codesearch');
form.q.value = "lang:go package:go.googlecode.com " + query.value;
return true;
} </script>
<div id="topnav">
<table summary="">
<tr>
<td id="headerImage">
<a href="../../../../index.html"><img src="../../../../doc/logo-153x55.png" height="55" width="153" alt="Go Home Page" style="border:0" /></a>
</td>
<td>
<div id="headerDocSetTitle">The Go Programming Language</div>
</td>
<td>
<!-- <table>
<tr>
<td>
<! The input box is outside of the form because we want to add | appear in the box when the user presses 'back'. Thus we use a
hidden field in the form. However, there's no way to stop the
non-hidden text box from also submitting a value unless we move
it outside of the form
<input type="search" id="codesearchQuery" value="" size="30" onkeydown="return codesearchKeyDown(event);"/>
<form method="GET" action="http://www.google.com/codesearch" id="codesearch" class="search" onsubmit="return codeSearchSubmit();" style="display:inline;">
<input type="hidden" name="q" value=""/>
<input type="submit" value="Code search" />
<span style="color: red">(TODO: remove for now?)</span>
</form>
</td>
</tr>
<tr>
<td>
<span style="color: gray;">(e.g. “pem” or “xml”)</span>
</td>
</tr>
</table> -->
</td>
</tr>
</table>
</div>
<div id="linkList">
<ul>
<li class="navhead"><a href="../../../../index.html">Home</a></li>
<li class="blank"> </li>
<li class="navhead">Documents</li>
<li><a href="../../../../doc/go_tutorial.html">Tutorial</a></li>
<li><a href="../../../../doc/effective_go.html">Effective Go</a></li>
<li><a href="../../../../doc/go_faq.html">FAQ</a></li>
<li><a href="../../../../doc/go_lang_faq.html">Language Design FAQ</a></li>
<li><a href="http://www.youtube.com/watch?v=rKnDgT73v8s">Tech talk (1 hour)</a> (<a href="../../../../doc/go_talk-20091030.pdf">PDF</a>)</li>
<li><a href="../../../../doc/go_spec.html">Language Specification</a></li>
<li><a href="../../../../doc/go_mem.html">Memory Model</a></li>
<li><a href="../../../../doc/go_for_cpp_programmers.html">Go for C++ Programmers</a></li>
<li class="blank"> </li>
<li class="navhead">How To</li>
<li><a href="../../../../doc/install.html">Install Go</a></li>
<li><a href="../../../../doc/contribute.html">Contribute code</a></li>
<li class="blank"> </li>
<li class="navhead">Programming</li>
<li><a href="../../../../cmd/index.html">Command documentation</a></li>
<li><a href="../../../../pkg/index.html">Package documentation</a></li>
<li><a href="../../../index.html">Source files</a></li>
<li class="blank"> </li>
<li class="navhead">Help</li>
<li>#go-nuts on irc.freenode.net</li>
<li><a href="http://groups.google.com/group/golang-nuts">Go Nuts mailing list</a></li>
<li><a href="http://code.google.com/p/go/issues/list">Issue tracker</a></li>
<li class="blank"> </li>
<li class="navhead">Go code search</li>
<form method="GET" action="http://golang.org/search" class="search">
<input type="search" name="q" value="" size="25" style="width:80%; max-width:200px" />
<input type="submit" value="Go" />
</form>
<li class="blank"> </li>
<li class="navhead">Last update</li>
<li>Thu Nov 12 15:48:37 PST 2009</li>
</ul>
</div>
<div id="content">
<h1 id="generatedHeader">Source file /src/pkg/encoding/git85/git.go</h1>
<!-- The Table of Contents is automatically inserted in this <div>.
Do not delete this <div>. -->
<div id="nav"></div>
<!-- Content is HTML-escaped elsewhere -->
<pre>
<a id="L1"></a><span class="comment">// Copyright 2009 The Go Authors. All rights reserved.</span>
<a id="L2"></a><span class="comment">// Use of this source code is governed by a BSD-style</span>
<a id="L3"></a><span class="comment">// license that can be found in the LICENSE file.</span>
<a id="L5"></a><span class="comment">// Package git85 implements the radix 85 data encoding</span>
<a id="L6"></a><span class="comment">// used in the Git version control system.</span>
<a id="L7"></a>package git85
<a id="L9"></a>import (
<a id="L10"></a>"bytes";
<a id="L11"></a>"io";
<a id="L12"></a>"os";
<a id="L13"></a>"strconv";
<a id="L14"></a>)
<a id="L16"></a>type CorruptInputError int64
<a id="L18"></a>func (e CorruptInputError) String() string {
<a id="L19"></a>return "illegal git85 data at input byte" + strconv.Itoa64(int64(e))
<a id="L20"></a>}
<a id="L22"></a>const encode = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~"
<a id="L24"></a><span class="comment">// The decodings are 1+ the actual value, so that the</span>
<a id="L25"></a><span class="comment">// default zero value can be used to mean "not valid".</span>
<a id="L26"></a>var decode = [256]uint8{
<a id="L27"></a>'0': 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
<a id="L28"></a>'A': 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
<a id="L29"></a>24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
<a id="L30"></a>'a': 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
<a id="L31"></a>50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
<a id="L32"></a>'!': 63,
<a id="L33"></a>'#': 64, 65, 66, 67,
<a id="L34"></a>'(': 68, 69, 70, 71,
<a id="L35"></a>'-': 72,
<a id="L36"></a>';': 73,
<a id="L37"></a>'<': 74, 75, 76, 77,
<a id="L38"></a>'@': 78,
<a id="L39"></a>'^': 79, 80, 81,
<a id="L40"></a>'{': 82, 83, 84, 85,
<a id="L41"></a>}
<a id="L43"></a><span class="comment">// Encode encodes src into EncodedLen(len(src))</span>
<a id="L44"></a><span class="comment">// bytes of dst. As a convenience, it returns the number</span>
<a id="L45"></a><span class="comment">// of bytes written to dst, but this value is always EncodedLen(len(src)).</span>
<a id="L46"></a><span class="comment">// Encode implements the radix 85 encoding used in the</span>
<a id="L47"></a><span class="comment">// Git version control tool.</span>
<a id="L48"></a><span class="comment">//</span>
<a id="L49"></a><span class="comment">// The encoding splits src into chunks of at most 52 bytes</span>
<a id="L50"></a><span class="comment">// and encodes each chunk on its own line.</span>
<a id="L51"></a>func Encode(dst, src []byte) int {
<a id="L52"></a>ndst := 0;
<a id="L53"></a>for len(src) > 0 {
<a id="L54"></a>n := len(src);
<a id="L55"></a>if n > 52 {
<a id="L56"></a>n = 52
<a id="L57"></a>}
<a id="L58"></a>if n <= 27 {
<a id="L59"></a>dst[ndst] = byte('A' + n - 1)
<a id="L60"></a>} else {
<a id="L61"></a>dst[ndst] = byte('a' + n - 26 - 1)
<a id="L62"></a>}
<a id="L63"></a>ndst++;
<a id="L64"></a>for i := 0; i < n; i += 4 {
<a id="L65"></a>var v uint32;
<a id="L66"></a>for j := 0; j < 4 && i+j < n; j++ {
<a id="L67"></a>v |= uint32(src[i+j]) << uint(24-j*8)
<a id="L68"></a>}
<a id="L69"></a>for j := 4; j >= 0; j-- {
<a id="L70"></a>dst[ndst+j] = encode[v%85];
<a id="L71"></a>v /= 85;
<a id="L72"></a>}
<a id="L73"></a>ndst += 5;
<a id="L74"></a>}
<a id="L75"></a>dst[ndst] = '\n';
<a id="L76"></a>ndst++;
<a id="L77"></a>src = src[n:len(src)];
<a id="L78"></a>}
<a id="L79"></a>return ndst;
<a id="L80"></a>}
<a id="L82"></a><span class="comment">// EncodedLen returns the length of an encoding of n source bytes.</span>
<a id="L83"></a>func EncodedLen(n int) int {
<a id="L84"></a>if n == 0 {
<a id="L85"></a>return 0
<a id="L86"></a>}
<a id="L87"></a><span class="comment">// 5 bytes per 4 bytes of input, rounded up.</span>
<a id="L88"></a><span class="comment">// 2 extra bytes for each line of 52 src bytes, rounded up.</span>
<a id="L89"></a>return (n+3)/4*5 + (n+51)/52*2;
<a id="L90"></a>}
<a id="L92"></a>var newline = []byte{'\n'}
<a id="L94"></a><span class="comment">// Decode decodes src into at most MaxDecodedLen(len(src))</span>
<a id="L95"></a><span class="comment">// bytes, returning the actual number of bytes written to dst.</span>
<a id="L96"></a><span class="comment">//</span>
<a id="L97"></a><span class="comment">// If Decode encounters invalid input, it returns a CorruptInputError.</span>
<a id="L98"></a><span class="comment">//</span>
<a id="L99"></a>func Decode(dst, src []byte) (n int, err os.Error) {
<a id="L100"></a>ndst := 0;
<a id="L101"></a>nsrc := 0;
<a id="L102"></a>for nsrc < len(src) {
<a id="L103"></a>var l int;
<a id="L104"></a>switch ch := int(src[nsrc]); {
<a id="L105"></a>case 'A' <= ch && ch <= 'Z':
<a id="L106"></a>l = ch - 'A' + 1
<a id="L107"></a>case 'a' <= ch && ch <= 'z':
<a id="L108"></a>l = ch - 'a' + 26 + 1
<a id="L109"></a>default:
<a id="L110"></a>return ndst, CorruptInputError(nsrc)
<a id="L111"></a>}
<a id="L112"></a>if nsrc+1+l > len(src) {
<a id="L113"></a>return ndst, CorruptInputError(nsrc)
<a id="L114"></a>}
<a id="L115"></a>el := (l + 3) / 4 * 5; <span class="comment">// encoded len</span>
<a id="L116"></a>if nsrc+1+el+1 > len(src) || src[nsrc+1+el] != '\n' {
<a id="L117"></a>return ndst, CorruptInputError(nsrc)
<a id="L118"></a>}
<a id="L119"></a>line := src[nsrc+1 : nsrc+1+el];
<a id="L120"></a>for i := 0; i < el; i += 5 {
<a id="L121"></a>var v uint32;
<a id="L122"></a>for j := 0; j < 5; j++ {
<a id="L123"></a>ch := decode[line[i+j]];
<a id="L124"></a>if ch == 0 {
<a id="L125"></a>return ndst, CorruptInputError(nsrc + 1 + i + j)
<a id="L126"></a>}
<a id="L127"></a>v = v*85 + uint32(ch-1);
<a id="L128"></a>}
<a id="L129"></a>for j := 0; j < 4; j++ {
<a id="L130"></a>dst[ndst] = byte(v >> 24);
<a id="L131"></a>v <<= 8;
<a id="L132"></a>ndst++;
<a id="L133"></a>}
<a id="L134"></a>}
<a id="L135"></a><span class="comment">// Last fragment may have run too far (but there was room in dst).</span>
<a id="L136"></a><span class="comment">// Back up.</span>
<a id="L137"></a>if l%4 != 0 {
<a id="L138"></a>ndst -= 4 - l%4
<a id="L139"></a>}
<a id="L140"></a>nsrc += 1 + el + 1;
<a id="L141"></a>}
<a id="L142"></a>return ndst, nil;
<a id="L143"></a>}
<a id="L145"></a>func MaxDecodedLen(n int) int { return n / 5 * 4 }
<a id="L147"></a><span class="comment">// NewEncoder returns a new Git base85 stream encoder. Data written to</span>
<a id="L148"></a><span class="comment">// the returned writer will be encoded and then written to w.</span>
<a id="L149"></a><span class="comment">// The Git encoding operates on 52-byte blocks; when finished</span>
<a id="L150"></a><span class="comment">// writing, the caller must Close the returned encoder to flush any</span>
<a id="L151"></a><span class="comment">// partially written blocks.</span>
<a id="L152"></a>func NewEncoder(w io.Writer) io.WriteCloser { return &encoder{w: w} }
<a id="L154"></a>type encoder struct {
<a id="L155"></a>w io.Writer;
<a id="L156"></a>err os.Error;
<a id="L157"></a>buf [52]byte;
<a id="L158"></a>nbuf int;
<a id="L159"></a>out [1024]byte;
<a id="L160"></a>nout int;
<a id="L161"></a>}
<a id="L163"></a>func (e *encoder) Write(p []byte) (n int, err os.Error) {
<a id="L164"></a>if e.err != nil {
<a id="L165"></a>return 0, e.err
<a id="L166"></a>}
<a id="L168"></a><span class="comment">// Leading fringe.</span>
<a id="L169"></a>if e.nbuf > 0 {
<a id="L170"></a>var i int;
<a id="L171"></a>for i = 0; i < len(p) && e.nbuf < 52; i++ {
<a id="L172"></a>e.buf[e.nbuf] = p[i];
<a id="L173"></a>e.nbuf++;
<a id="L174"></a>}
<a id="L175"></a>n += i;
<a id="L176"></a>p = p[i:len(p)];
<a id="L177"></a>if e.nbuf < 52 {
<a id="L178"></a>return
<a id="L179"></a>}
<a id="L180"></a>nout := Encode(&e.out, &e.buf);
<a id="L181"></a>if _, e.err = e.w.Write(e.out[0:nout]); e.err != nil {
<a id="L182"></a>return n, e.err
<a id="L183"></a>}
<a id="L184"></a>e.nbuf = 0;
<a id="L185"></a>}
<a id="L187"></a><span class="comment">// Large interior chunks.</span>
<a id="L188"></a>for len(p) >= 52 {
<a id="L189"></a>nn := len(e.out) / (1 + 52/4*5 + 1) * 52;
<a id="L190"></a>if nn > len(p) {
<a id="L191"></a>nn = len(p) / 52 * 52
<a id="L192"></a>}
<a id="L193"></a>if nn > 0 {
<a id="L194"></a>nout := Encode(&e.out, p[0:nn]);
<a id="L195"></a>if _, e.err = e.w.Write(e.out[0:nout]); e.err != nil {
<a id="L196"></a>return n, e.err
<a id="L197"></a>}
<a id="L198"></a>}
<a id="L199"></a>n += nn;
<a id="L200"></a>p = p[nn:len(p)];
<a id="L201"></a>}
<a id="L203"></a><span class="comment">// Trailing fringe.</span>
<a id="L204"></a>for i := 0; i < len(p); i++ {
<a id="L205"></a>e.buf[i] = p[i]
<a id="L206"></a>}
<a id="L207"></a>e.nbuf = len(p);
<a id="L208"></a>n += len(p);
<a id="L209"></a>return;
<a id="L210"></a>}
<a id="L212"></a>func (e *encoder) Close() os.Error {
<a id="L213"></a><span class="comment">// If there's anything left in the buffer, flush it out</span>
<a id="L214"></a>if e.err == nil && e.nbuf > 0 {
<a id="L215"></a>nout := Encode(&e.out, e.buf[0:e.nbuf]);
<a id="L216"></a>e.nbuf = 0;
<a id="L217"></a>_, e.err = e.w.Write(e.out[0:nout]);
<a id="L218"></a>}
<a id="L219"></a>return e.err;
<a id="L220"></a>}
<a id="L222"></a><span class="comment">// NewDecoder returns a new Git base85 stream decoder.</span>
<a id="L223"></a>func NewDecoder(r io.Reader) io.Reader { return &decoder{r: r} }
<a id="L225"></a>type decoder struct {
<a id="L226"></a>r io.Reader;
<a id="L227"></a>err os.Error;
<a id="L228"></a>readErr os.Error;
<a id="L229"></a>buf [1024]byte;
<a id="L230"></a>nbuf int;
<a id="L231"></a>out []byte;
<a id="L232"></a>outbuf [1024]byte;
<a id="L233"></a>off int64;
<a id="L234"></a>}
<a id="L236"></a>func (d *decoder) Read(p []byte) (n int, err os.Error) {
<a id="L237"></a>if len(p) == 0 {
<a id="L238"></a>return 0, nil
<a id="L239"></a>}
<a id="L241"></a>for {
<a id="L242"></a><span class="comment">// Copy leftover output from last decode.</span>
<a id="L243"></a>if len(d.out) > 0 {
<a id="L244"></a>n = bytes.Copy(p, d.out);
<a id="L245"></a>d.out = d.out[n:len(d.out)];
<a id="L246"></a>return;
<a id="L247"></a>}
<a id="L249"></a><span class="comment">// Out of decoded output. Check errors.</span>
<a id="L250"></a>if d.err != nil {
<a id="L251"></a>return 0, d.err
<a id="L252"></a>}
<a id="L253"></a>if d.readErr != nil {
<a id="L254"></a>d.err = d.readErr;
<a id="L255"></a>return 0, d.err;
<a id="L256"></a>}
<a id="L258"></a><span class="comment">// Read and decode more input.</span>
<a id="L259"></a>var nn int;
<a id="L260"></a>nn, d.readErr = d.r.Read(d.buf[d.nbuf:len(d.buf)]);
<a id="L261"></a>d.nbuf += nn;
<a id="L263"></a><span class="comment">// Send complete lines to Decode.</span>
<a id="L264"></a>nl := bytes.LastIndex(d.buf[0:d.nbuf], newline);
<a id="L265"></a>if nl < 0 {
<a id="L266"></a>continue
<a id="L267"></a>}
<a id="L268"></a>nn, d.err = Decode(&d.outbuf, d.buf[0:nl+1]);
<a id="L269"></a>if e, ok := d.err.(CorruptInputError); ok {
<a id="L270"></a>d.err = CorruptInputError(int64(e) + d.off)
<a id="L271"></a>}
<a id="L272"></a>d.out = d.outbuf[0:nn];
<a id="L273"></a>d.nbuf = bytes.Copy(&d.buf, d.buf[nl+1:d.nbuf]);
<a id="L274"></a>d.off += int64(nl + 1);
<a id="L275"></a>}
<a id="L276"></a>panic("unreacahable");
<a id="L277"></a>}
</pre>
</div>
<div id="footer">
<p>Except as noted, this content is
licensed under <a href="http://creativecommons.org/licenses/by/3.0/">
Creative Commons Attribution 3.0</a>.
</div>
<script type="text/javascript">
var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
document.write(unescape("%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%3E%3C/script%3E"));
</script>
<script type="text/javascript">
var pageTracker = _gat._getTracker("UA-11222381-2");
pageTracker._trackPageview();
</script>
</body>
</html>
<!-- generated at Thu Nov 12 15:42:51 PST 2009 --> | a couple of restricts to the query before submitting. If we just
add the restricts to the text box before submitting, then they | random_line_split |
base.py | import os
from datetime import datetime
from random import randint
from Products.Five.browser import BrowserView
from Products.CMFCore.utils import getToolByName
from collective.sendaspdf import transforms
from collective.sendaspdf.utils import find_filename, update_relative_url
from collective.sendaspdf.utils import md5_hash
from collective.sendaspdf.utils import extract_from_url
from collective.sendaspdf.interfaces import ISendAsPDFOptionsMaker
from plone.transformchain.zpublisher import applyTransform
class BaseView(BrowserView):
""" Class used to factorize some code for the different views
used in the product.
"""
error_mapping = {}
def __init__(self, *args, **kwargs):
""" We just need to define some instance variables.
"""
super(BaseView, self).__init__(*args, **kwargs)
# The list of errors found when checking the form.
self.errors = []
# We get the configuration from the portal_sendaspdf
self.pdf_tool = getToolByName(self.context,
'portal_sendaspdf')
self.tempdir = self.pdf_tool.tempdir
self.salt = self.pdf_tool.salt
self.pdf_generator = self.pdf_tool.pdf_generator
self.filename_in_mail = self.pdf_tool.filename_in_mail
self.pdf_file = None
self.filename = ''
def get_lang(self):
""" Finds the language to use.
"""
props = getToolByName(self.context,
'portal_properties')
return props.site_properties.getProperty('default_language') or 'en'
def get_user(self):
""" Returns the currently logged-in user.
"""
mtool = getToolByName(self.context, 'portal_membership')
if mtool.isAnonymousUser():
return
return mtool.getAuthenticatedMember()
def get_user_fullname(self):
""" Returns the currently logged-in user's fullname.
"""
member = self.get_user()
if member:
return member.getProperty('fullname')
def get_user_email(self):
""" Returns the currently logged-in user's email.
"""
member = self.get_user()
if member:
return member.getProperty('email')
def generate_filename_prefix(self):
""" Returns the user's email hashed in md5 followed
by an underscore.
If we can not get an email (the user is anonymous or
email is not mandatory in the system), returns an empty
string.
We extract it from 'generate_temp_filename as we will
also use this sytem to be sure that the user has
access to the file when sending it.
"""
email = self.get_user_email()
if not email:
return ''
return '%s_' % md5_hash(email, self.salt)
def get_page_source(self):
""" Returns the HTML source of a web page, considering
that the URL of the page is contained in the form under
the 'page_url' key.
"""
if not 'page_url' in self.request.form:
self.errors.append('no_source')
return
url = self.request.form['page_url']
context_url = self.context.absolute_url()
view_name, get_params = extract_from_url(url, context_url)
# Now we will reinject the GET parameters in the request.
if get_params:
for key in get_params:
self.request.form[key] = get_params[key]
try:
view = self.context.restrictedTraverse(view_name)
except:
# For some reason, 'view' as view name can fail (at least
# in a client project in Plone 3.3.5), where calling the
# same url in a browser works just fine... The error here is:
# AttributeError: 'view' object has no attribute '__of__'
# Just take the context as view then.
# Note that this has the same effect as calling
# self.context.restrictedTraverse(x), where x is None or
# an empty string.
view = self.context
body = view()
result = applyTransform(self.request, body=body)
return update_relative_url(result, self.context)
def generate_temp_filename(self):
""" Generates the filename used to store the PDF file.
Basically the md5 hash of th user's email followed
by a timestamp.
If the user is anonymous, just use the timestamp.
In case of conflict, we just happen '-x' at the end.
"""
prefix = self.generate_filename_prefix()
now = datetime.now()
# Ok that might not be the best timestamp system, but it's
# enough for our needs.
timestamp = '-'.join([
''.join([str(x) for x in now.timetuple()]),
str(now.microsecond),
str(randint(10000, 99999))])
filename = prefix + timestamp
return find_filename(self.tempdir,
filename)
def _get_adapter_options(self):
try:
adapter = ISendAsPDFOptionsMaker(self.context)
except TypeError:
# There's no product implementing the adapter
# available.
return {}, None
return adapter.getOptions(), adapter.overrideAll()
def get_extra_options(self):
""" Fetches the options defined in the request, the tool
or an adapter.
"""
# Options change depending on the pdf generator..
try:
transform_module = getattr(transforms, self.pdf_generator)
except AttributeError:
return []
options = []
tool_options = self.pdf_tool.make_options()
adapter_options, adapter_overrides = self._get_adapter_options()
opts_order = [self.request, tool_options]
if adapter_overrides:
opts_order.insert(0, adapter_options)
else:
opts_order.append(adapter_options)
# First we check the options for which no value is
# needed.
# For each one, it is possible to define a --no-xxx
# option.
for opt_name in transform_module.simple_options:
for opts in opts_order:
if opts.get('--no-%s' % opt_name):
break
if opts.get(opt_name, None):
options.append('--%s' % opt_name)
break
# Then we check values that expect a value.
for opt_name in transform_module.valued_options:
for opts in opts_order:
opt_val = opts.get(opt_name, None)
if opt_val is None:
continue
# Value is put before the option name as we
# insert them after in another list using l.insert(2, opt)
if isinstance(opt_val, list): | for x in reversed(opt_val):
options.append(str(x))
else:
options.append(str(opt_val))
options.append('--%s' % opt_name)
break
return options
def generate_pdf_file(self, source):
""" Generates a PDF file from the given source
(string containing the HTML source of a page).
"""
filename = self.generate_temp_filename()
if not filename:
self.errors.append('filename_generation_failed')
return
try:
transform_module = getattr(transforms, self.pdf_generator)
except AttributeError:
self.errors.append('wrong_generator_configuration')
return
self.filename = filename
url = self.context.absolute_url()
print_css = (self.pdf_tool.always_print_css or
self.context.portal_type in self.pdf_tool.print_css_types)
# When the source is sent through Ajax, it's already encoded
# as a utf-8 string. When using it without javascript, the
# source comes from a view, which always returns unicode. In
# that case we need to encode it.
if isinstance(source, unicode):
source = source.encode('utf-8')
export_file, err = transform_module.html_to_pdf(
source,
self.tempdir,
filename,
url,
print_css,
self.get_extra_options())
if err:
self.errors.append('pdf_creation_failed')
return
self.pdf_tool.registerPDF(filename)
self.pdf_file = export_file
self.pdf_file.close()
def make_pdf(self):
""" Fetches the page source and generates a PDF.
"""
source = self.get_page_source()
if not source:
self.errors.append('no_source')
if not self.errors:
self.generate_pdf_file(source)
def show_error_message(self, error_name):
""" Tells if an error message should be shown in the template.
"""
return error_name in self.errors
def class_for_field(self, fieldname):
""" Returns the class that should be applied to a field
in the forms displayed by the product.
"""
base_class = 'field'
error_class = ' error'
if not fieldname in self.error_mapping:
if fieldname in self.errors:
base_class += error_class
return base_class
for error_name in self.error_mapping[fieldname]:
if self.show_error_message(error_name):
return base_class + error_class
return base_class
def check_pdf_accessibility(self):
""" Check that the filename given in the request
can be accessed by the user.
"""
if not 'pdf_name' in self.request.form:
# Should not happen.
self.errors.append('file_not_specified')
return
filename = self.request.form['pdf_name']
prefix = self.generate_filename_prefix()
if not filename.startswith(prefix):
# The user should not be able to see this file.
self.errors.append('file_unauthorized')
return
if not filename in os.listdir(self.tempdir):
self.errors.append('file_not_found')
self.request.response.setStatus(404)
return | random_line_split | |
base.py | import os
from datetime import datetime
from random import randint
from Products.Five.browser import BrowserView
from Products.CMFCore.utils import getToolByName
from collective.sendaspdf import transforms
from collective.sendaspdf.utils import find_filename, update_relative_url
from collective.sendaspdf.utils import md5_hash
from collective.sendaspdf.utils import extract_from_url
from collective.sendaspdf.interfaces import ISendAsPDFOptionsMaker
from plone.transformchain.zpublisher import applyTransform
class BaseView(BrowserView):
""" Class used to factorize some code for the different views
used in the product.
"""
error_mapping = {}
def __init__(self, *args, **kwargs):
""" We just need to define some instance variables.
"""
super(BaseView, self).__init__(*args, **kwargs)
# The list of errors found when checking the form.
self.errors = []
# We get the configuration from the portal_sendaspdf
self.pdf_tool = getToolByName(self.context,
'portal_sendaspdf')
self.tempdir = self.pdf_tool.tempdir
self.salt = self.pdf_tool.salt
self.pdf_generator = self.pdf_tool.pdf_generator
self.filename_in_mail = self.pdf_tool.filename_in_mail
self.pdf_file = None
self.filename = ''
def get_lang(self):
""" Finds the language to use.
"""
props = getToolByName(self.context,
'portal_properties')
return props.site_properties.getProperty('default_language') or 'en'
def get_user(self):
""" Returns the currently logged-in user.
"""
mtool = getToolByName(self.context, 'portal_membership')
if mtool.isAnonymousUser():
|
return mtool.getAuthenticatedMember()
def get_user_fullname(self):
""" Returns the currently logged-in user's fullname.
"""
member = self.get_user()
if member:
return member.getProperty('fullname')
def get_user_email(self):
""" Returns the currently logged-in user's email.
"""
member = self.get_user()
if member:
return member.getProperty('email')
def generate_filename_prefix(self):
""" Returns the user's email hashed in md5 followed
by an underscore.
If we can not get an email (the user is anonymous or
email is not mandatory in the system), returns an empty
string.
We extract it from 'generate_temp_filename as we will
also use this sytem to be sure that the user has
access to the file when sending it.
"""
email = self.get_user_email()
if not email:
return ''
return '%s_' % md5_hash(email, self.salt)
def get_page_source(self):
""" Returns the HTML source of a web page, considering
that the URL of the page is contained in the form under
the 'page_url' key.
"""
if not 'page_url' in self.request.form:
self.errors.append('no_source')
return
url = self.request.form['page_url']
context_url = self.context.absolute_url()
view_name, get_params = extract_from_url(url, context_url)
# Now we will reinject the GET parameters in the request.
if get_params:
for key in get_params:
self.request.form[key] = get_params[key]
try:
view = self.context.restrictedTraverse(view_name)
except:
# For some reason, 'view' as view name can fail (at least
# in a client project in Plone 3.3.5), where calling the
# same url in a browser works just fine... The error here is:
# AttributeError: 'view' object has no attribute '__of__'
# Just take the context as view then.
# Note that this has the same effect as calling
# self.context.restrictedTraverse(x), where x is None or
# an empty string.
view = self.context
body = view()
result = applyTransform(self.request, body=body)
return update_relative_url(result, self.context)
def generate_temp_filename(self):
""" Generates the filename used to store the PDF file.
Basically the md5 hash of th user's email followed
by a timestamp.
If the user is anonymous, just use the timestamp.
In case of conflict, we just happen '-x' at the end.
"""
prefix = self.generate_filename_prefix()
now = datetime.now()
# Ok that might not be the best timestamp system, but it's
# enough for our needs.
timestamp = '-'.join([
''.join([str(x) for x in now.timetuple()]),
str(now.microsecond),
str(randint(10000, 99999))])
filename = prefix + timestamp
return find_filename(self.tempdir,
filename)
def _get_adapter_options(self):
try:
adapter = ISendAsPDFOptionsMaker(self.context)
except TypeError:
# There's no product implementing the adapter
# available.
return {}, None
return adapter.getOptions(), adapter.overrideAll()
def get_extra_options(self):
""" Fetches the options defined in the request, the tool
or an adapter.
"""
# Options change depending on the pdf generator..
try:
transform_module = getattr(transforms, self.pdf_generator)
except AttributeError:
return []
options = []
tool_options = self.pdf_tool.make_options()
adapter_options, adapter_overrides = self._get_adapter_options()
opts_order = [self.request, tool_options]
if adapter_overrides:
opts_order.insert(0, adapter_options)
else:
opts_order.append(adapter_options)
# First we check the options for which no value is
# needed.
# For each one, it is possible to define a --no-xxx
# option.
for opt_name in transform_module.simple_options:
for opts in opts_order:
if opts.get('--no-%s' % opt_name):
break
if opts.get(opt_name, None):
options.append('--%s' % opt_name)
break
# Then we check values that expect a value.
for opt_name in transform_module.valued_options:
for opts in opts_order:
opt_val = opts.get(opt_name, None)
if opt_val is None:
continue
# Value is put before the option name as we
# insert them after in another list using l.insert(2, opt)
if isinstance(opt_val, list):
for x in reversed(opt_val):
options.append(str(x))
else:
options.append(str(opt_val))
options.append('--%s' % opt_name)
break
return options
def generate_pdf_file(self, source):
""" Generates a PDF file from the given source
(string containing the HTML source of a page).
"""
filename = self.generate_temp_filename()
if not filename:
self.errors.append('filename_generation_failed')
return
try:
transform_module = getattr(transforms, self.pdf_generator)
except AttributeError:
self.errors.append('wrong_generator_configuration')
return
self.filename = filename
url = self.context.absolute_url()
print_css = (self.pdf_tool.always_print_css or
self.context.portal_type in self.pdf_tool.print_css_types)
# When the source is sent through Ajax, it's already encoded
# as a utf-8 string. When using it without javascript, the
# source comes from a view, which always returns unicode. In
# that case we need to encode it.
if isinstance(source, unicode):
source = source.encode('utf-8')
export_file, err = transform_module.html_to_pdf(
source,
self.tempdir,
filename,
url,
print_css,
self.get_extra_options())
if err:
self.errors.append('pdf_creation_failed')
return
self.pdf_tool.registerPDF(filename)
self.pdf_file = export_file
self.pdf_file.close()
def make_pdf(self):
""" Fetches the page source and generates a PDF.
"""
source = self.get_page_source()
if not source:
self.errors.append('no_source')
if not self.errors:
self.generate_pdf_file(source)
def show_error_message(self, error_name):
""" Tells if an error message should be shown in the template.
"""
return error_name in self.errors
def class_for_field(self, fieldname):
""" Returns the class that should be applied to a field
in the forms displayed by the product.
"""
base_class = 'field'
error_class = ' error'
if not fieldname in self.error_mapping:
if fieldname in self.errors:
base_class += error_class
return base_class
for error_name in self.error_mapping[fieldname]:
if self.show_error_message(error_name):
return base_class + error_class
return base_class
def check_pdf_accessibility(self):
""" Check that the filename given in the request
can be accessed by the user.
"""
if not 'pdf_name' in self.request.form:
# Should not happen.
self.errors.append('file_not_specified')
return
filename = self.request.form['pdf_name']
prefix = self.generate_filename_prefix()
if not filename.startswith(prefix):
# The user should not be able to see this file.
self.errors.append('file_unauthorized')
return
if not filename in os.listdir(self.tempdir):
self.errors.append('file_not_found')
self.request.response.setStatus(404)
return
| return | conditional_block |
base.py | import os
from datetime import datetime
from random import randint
from Products.Five.browser import BrowserView
from Products.CMFCore.utils import getToolByName
from collective.sendaspdf import transforms
from collective.sendaspdf.utils import find_filename, update_relative_url
from collective.sendaspdf.utils import md5_hash
from collective.sendaspdf.utils import extract_from_url
from collective.sendaspdf.interfaces import ISendAsPDFOptionsMaker
from plone.transformchain.zpublisher import applyTransform
class BaseView(BrowserView):
""" Class used to factorize some code for the different views
used in the product.
"""
error_mapping = {}
def __init__(self, *args, **kwargs):
""" We just need to define some instance variables.
"""
super(BaseView, self).__init__(*args, **kwargs)
# The list of errors found when checking the form.
self.errors = []
# We get the configuration from the portal_sendaspdf
self.pdf_tool = getToolByName(self.context,
'portal_sendaspdf')
self.tempdir = self.pdf_tool.tempdir
self.salt = self.pdf_tool.salt
self.pdf_generator = self.pdf_tool.pdf_generator
self.filename_in_mail = self.pdf_tool.filename_in_mail
self.pdf_file = None
self.filename = ''
def get_lang(self):
""" Finds the language to use.
"""
props = getToolByName(self.context,
'portal_properties')
return props.site_properties.getProperty('default_language') or 'en'
def get_user(self):
""" Returns the currently logged-in user.
"""
mtool = getToolByName(self.context, 'portal_membership')
if mtool.isAnonymousUser():
return
return mtool.getAuthenticatedMember()
def get_user_fullname(self):
|
def get_user_email(self):
""" Returns the currently logged-in user's email.
"""
member = self.get_user()
if member:
return member.getProperty('email')
def generate_filename_prefix(self):
""" Returns the user's email hashed in md5 followed
by an underscore.
If we can not get an email (the user is anonymous or
email is not mandatory in the system), returns an empty
string.
We extract it from 'generate_temp_filename as we will
also use this sytem to be sure that the user has
access to the file when sending it.
"""
email = self.get_user_email()
if not email:
return ''
return '%s_' % md5_hash(email, self.salt)
def get_page_source(self):
""" Returns the HTML source of a web page, considering
that the URL of the page is contained in the form under
the 'page_url' key.
"""
if not 'page_url' in self.request.form:
self.errors.append('no_source')
return
url = self.request.form['page_url']
context_url = self.context.absolute_url()
view_name, get_params = extract_from_url(url, context_url)
# Now we will reinject the GET parameters in the request.
if get_params:
for key in get_params:
self.request.form[key] = get_params[key]
try:
view = self.context.restrictedTraverse(view_name)
except:
# For some reason, 'view' as view name can fail (at least
# in a client project in Plone 3.3.5), where calling the
# same url in a browser works just fine... The error here is:
# AttributeError: 'view' object has no attribute '__of__'
# Just take the context as view then.
# Note that this has the same effect as calling
# self.context.restrictedTraverse(x), where x is None or
# an empty string.
view = self.context
body = view()
result = applyTransform(self.request, body=body)
return update_relative_url(result, self.context)
def generate_temp_filename(self):
""" Generates the filename used to store the PDF file.
Basically the md5 hash of th user's email followed
by a timestamp.
If the user is anonymous, just use the timestamp.
In case of conflict, we just happen '-x' at the end.
"""
prefix = self.generate_filename_prefix()
now = datetime.now()
# Ok that might not be the best timestamp system, but it's
# enough for our needs.
timestamp = '-'.join([
''.join([str(x) for x in now.timetuple()]),
str(now.microsecond),
str(randint(10000, 99999))])
filename = prefix + timestamp
return find_filename(self.tempdir,
filename)
def _get_adapter_options(self):
try:
adapter = ISendAsPDFOptionsMaker(self.context)
except TypeError:
# There's no product implementing the adapter
# available.
return {}, None
return adapter.getOptions(), adapter.overrideAll()
def get_extra_options(self):
""" Fetches the options defined in the request, the tool
or an adapter.
"""
# Options change depending on the pdf generator..
try:
transform_module = getattr(transforms, self.pdf_generator)
except AttributeError:
return []
options = []
tool_options = self.pdf_tool.make_options()
adapter_options, adapter_overrides = self._get_adapter_options()
opts_order = [self.request, tool_options]
if adapter_overrides:
opts_order.insert(0, adapter_options)
else:
opts_order.append(adapter_options)
# First we check the options for which no value is
# needed.
# For each one, it is possible to define a --no-xxx
# option.
for opt_name in transform_module.simple_options:
for opts in opts_order:
if opts.get('--no-%s' % opt_name):
break
if opts.get(opt_name, None):
options.append('--%s' % opt_name)
break
# Then we check values that expect a value.
for opt_name in transform_module.valued_options:
for opts in opts_order:
opt_val = opts.get(opt_name, None)
if opt_val is None:
continue
# Value is put before the option name as we
# insert them after in another list using l.insert(2, opt)
if isinstance(opt_val, list):
for x in reversed(opt_val):
options.append(str(x))
else:
options.append(str(opt_val))
options.append('--%s' % opt_name)
break
return options
def generate_pdf_file(self, source):
""" Generates a PDF file from the given source
(string containing the HTML source of a page).
"""
filename = self.generate_temp_filename()
if not filename:
self.errors.append('filename_generation_failed')
return
try:
transform_module = getattr(transforms, self.pdf_generator)
except AttributeError:
self.errors.append('wrong_generator_configuration')
return
self.filename = filename
url = self.context.absolute_url()
print_css = (self.pdf_tool.always_print_css or
self.context.portal_type in self.pdf_tool.print_css_types)
# When the source is sent through Ajax, it's already encoded
# as a utf-8 string. When using it without javascript, the
# source comes from a view, which always returns unicode. In
# that case we need to encode it.
if isinstance(source, unicode):
source = source.encode('utf-8')
export_file, err = transform_module.html_to_pdf(
source,
self.tempdir,
filename,
url,
print_css,
self.get_extra_options())
if err:
self.errors.append('pdf_creation_failed')
return
self.pdf_tool.registerPDF(filename)
self.pdf_file = export_file
self.pdf_file.close()
def make_pdf(self):
""" Fetches the page source and generates a PDF.
"""
source = self.get_page_source()
if not source:
self.errors.append('no_source')
if not self.errors:
self.generate_pdf_file(source)
def show_error_message(self, error_name):
""" Tells if an error message should be shown in the template.
"""
return error_name in self.errors
def class_for_field(self, fieldname):
""" Returns the class that should be applied to a field
in the forms displayed by the product.
"""
base_class = 'field'
error_class = ' error'
if not fieldname in self.error_mapping:
if fieldname in self.errors:
base_class += error_class
return base_class
for error_name in self.error_mapping[fieldname]:
if self.show_error_message(error_name):
return base_class + error_class
return base_class
def check_pdf_accessibility(self):
""" Check that the filename given in the request
can be accessed by the user.
"""
if not 'pdf_name' in self.request.form:
# Should not happen.
self.errors.append('file_not_specified')
return
filename = self.request.form['pdf_name']
prefix = self.generate_filename_prefix()
if not filename.startswith(prefix):
# The user should not be able to see this file.
self.errors.append('file_unauthorized')
return
if not filename in os.listdir(self.tempdir):
self.errors.append('file_not_found')
self.request.response.setStatus(404)
return
| """ Returns the currently logged-in user's fullname.
"""
member = self.get_user()
if member:
return member.getProperty('fullname') | identifier_body |
base.py | import os
from datetime import datetime
from random import randint
from Products.Five.browser import BrowserView
from Products.CMFCore.utils import getToolByName
from collective.sendaspdf import transforms
from collective.sendaspdf.utils import find_filename, update_relative_url
from collective.sendaspdf.utils import md5_hash
from collective.sendaspdf.utils import extract_from_url
from collective.sendaspdf.interfaces import ISendAsPDFOptionsMaker
from plone.transformchain.zpublisher import applyTransform
class BaseView(BrowserView):
""" Class used to factorize some code for the different views
used in the product.
"""
error_mapping = {}
def __init__(self, *args, **kwargs):
""" We just need to define some instance variables.
"""
super(BaseView, self).__init__(*args, **kwargs)
# The list of errors found when checking the form.
self.errors = []
# We get the configuration from the portal_sendaspdf
self.pdf_tool = getToolByName(self.context,
'portal_sendaspdf')
self.tempdir = self.pdf_tool.tempdir
self.salt = self.pdf_tool.salt
self.pdf_generator = self.pdf_tool.pdf_generator
self.filename_in_mail = self.pdf_tool.filename_in_mail
self.pdf_file = None
self.filename = ''
def get_lang(self):
""" Finds the language to use.
"""
props = getToolByName(self.context,
'portal_properties')
return props.site_properties.getProperty('default_language') or 'en'
def get_user(self):
""" Returns the currently logged-in user.
"""
mtool = getToolByName(self.context, 'portal_membership')
if mtool.isAnonymousUser():
return
return mtool.getAuthenticatedMember()
def get_user_fullname(self):
""" Returns the currently logged-in user's fullname.
"""
member = self.get_user()
if member:
return member.getProperty('fullname')
def get_user_email(self):
""" Returns the currently logged-in user's email.
"""
member = self.get_user()
if member:
return member.getProperty('email')
def generate_filename_prefix(self):
""" Returns the user's email hashed in md5 followed
by an underscore.
If we can not get an email (the user is anonymous or
email is not mandatory in the system), returns an empty
string.
We extract it from 'generate_temp_filename as we will
also use this sytem to be sure that the user has
access to the file when sending it.
"""
email = self.get_user_email()
if not email:
return ''
return '%s_' % md5_hash(email, self.salt)
def get_page_source(self):
""" Returns the HTML source of a web page, considering
that the URL of the page is contained in the form under
the 'page_url' key.
"""
if not 'page_url' in self.request.form:
self.errors.append('no_source')
return
url = self.request.form['page_url']
context_url = self.context.absolute_url()
view_name, get_params = extract_from_url(url, context_url)
# Now we will reinject the GET parameters in the request.
if get_params:
for key in get_params:
self.request.form[key] = get_params[key]
try:
view = self.context.restrictedTraverse(view_name)
except:
# For some reason, 'view' as view name can fail (at least
# in a client project in Plone 3.3.5), where calling the
# same url in a browser works just fine... The error here is:
# AttributeError: 'view' object has no attribute '__of__'
# Just take the context as view then.
# Note that this has the same effect as calling
# self.context.restrictedTraverse(x), where x is None or
# an empty string.
view = self.context
body = view()
result = applyTransform(self.request, body=body)
return update_relative_url(result, self.context)
def generate_temp_filename(self):
""" Generates the filename used to store the PDF file.
Basically the md5 hash of th user's email followed
by a timestamp.
If the user is anonymous, just use the timestamp.
In case of conflict, we just happen '-x' at the end.
"""
prefix = self.generate_filename_prefix()
now = datetime.now()
# Ok that might not be the best timestamp system, but it's
# enough for our needs.
timestamp = '-'.join([
''.join([str(x) for x in now.timetuple()]),
str(now.microsecond),
str(randint(10000, 99999))])
filename = prefix + timestamp
return find_filename(self.tempdir,
filename)
def _get_adapter_options(self):
try:
adapter = ISendAsPDFOptionsMaker(self.context)
except TypeError:
# There's no product implementing the adapter
# available.
return {}, None
return adapter.getOptions(), adapter.overrideAll()
def get_extra_options(self):
""" Fetches the options defined in the request, the tool
or an adapter.
"""
# Options change depending on the pdf generator..
try:
transform_module = getattr(transforms, self.pdf_generator)
except AttributeError:
return []
options = []
tool_options = self.pdf_tool.make_options()
adapter_options, adapter_overrides = self._get_adapter_options()
opts_order = [self.request, tool_options]
if adapter_overrides:
opts_order.insert(0, adapter_options)
else:
opts_order.append(adapter_options)
# First we check the options for which no value is
# needed.
# For each one, it is possible to define a --no-xxx
# option.
for opt_name in transform_module.simple_options:
for opts in opts_order:
if opts.get('--no-%s' % opt_name):
break
if opts.get(opt_name, None):
options.append('--%s' % opt_name)
break
# Then we check values that expect a value.
for opt_name in transform_module.valued_options:
for opts in opts_order:
opt_val = opts.get(opt_name, None)
if opt_val is None:
continue
# Value is put before the option name as we
# insert them after in another list using l.insert(2, opt)
if isinstance(opt_val, list):
for x in reversed(opt_val):
options.append(str(x))
else:
options.append(str(opt_val))
options.append('--%s' % opt_name)
break
return options
def generate_pdf_file(self, source):
""" Generates a PDF file from the given source
(string containing the HTML source of a page).
"""
filename = self.generate_temp_filename()
if not filename:
self.errors.append('filename_generation_failed')
return
try:
transform_module = getattr(transforms, self.pdf_generator)
except AttributeError:
self.errors.append('wrong_generator_configuration')
return
self.filename = filename
url = self.context.absolute_url()
print_css = (self.pdf_tool.always_print_css or
self.context.portal_type in self.pdf_tool.print_css_types)
# When the source is sent through Ajax, it's already encoded
# as a utf-8 string. When using it without javascript, the
# source comes from a view, which always returns unicode. In
# that case we need to encode it.
if isinstance(source, unicode):
source = source.encode('utf-8')
export_file, err = transform_module.html_to_pdf(
source,
self.tempdir,
filename,
url,
print_css,
self.get_extra_options())
if err:
self.errors.append('pdf_creation_failed')
return
self.pdf_tool.registerPDF(filename)
self.pdf_file = export_file
self.pdf_file.close()
def make_pdf(self):
""" Fetches the page source and generates a PDF.
"""
source = self.get_page_source()
if not source:
self.errors.append('no_source')
if not self.errors:
self.generate_pdf_file(source)
def show_error_message(self, error_name):
""" Tells if an error message should be shown in the template.
"""
return error_name in self.errors
def | (self, fieldname):
""" Returns the class that should be applied to a field
in the forms displayed by the product.
"""
base_class = 'field'
error_class = ' error'
if not fieldname in self.error_mapping:
if fieldname in self.errors:
base_class += error_class
return base_class
for error_name in self.error_mapping[fieldname]:
if self.show_error_message(error_name):
return base_class + error_class
return base_class
def check_pdf_accessibility(self):
""" Check that the filename given in the request
can be accessed by the user.
"""
if not 'pdf_name' in self.request.form:
# Should not happen.
self.errors.append('file_not_specified')
return
filename = self.request.form['pdf_name']
prefix = self.generate_filename_prefix()
if not filename.startswith(prefix):
# The user should not be able to see this file.
self.errors.append('file_unauthorized')
return
if not filename in os.listdir(self.tempdir):
self.errors.append('file_not_found')
self.request.response.setStatus(404)
return
| class_for_field | identifier_name |
apply.rs | use crate::{
patch::{Hunk, Line, Patch},
utils::LineIter,
};
use std::collections::VecDeque;
use std::{fmt, iter};
/// An error returned when [`apply`]ing a `Patch` fails
///
/// [`apply`]: fn.apply.html
#[derive(Debug)]
pub struct ApplyError(usize);
impl fmt::Display for ApplyError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "error applying hunk #{}", self.0)
}
}
impl std::error::Error for ApplyError {}
#[derive(Debug)]
enum ImageLine<'a, T: ?Sized> {
Unpatched(&'a T),
Patched(&'a T),
}
impl<'a, T: ?Sized> ImageLine<'a, T> {
fn inner(&self) -> &'a T {
match self {
ImageLine::Unpatched(inner) | ImageLine::Patched(inner) => inner,
}
}
fn into_inner(self) -> &'a T {
self.inner()
}
fn is_patched(&self) -> bool {
match self {
ImageLine::Unpatched(_) => false,
ImageLine::Patched(_) => true,
}
}
}
impl<T: ?Sized> Copy for ImageLine<'_, T> {}
impl<T: ?Sized> Clone for ImageLine<'_, T> {
fn clone(&self) -> Self {
*self
}
}
#[derive(Debug)]
pub struct ApplyOptions {
max_fuzzy: usize,
}
impl Default for ApplyOptions {
fn default() -> Self {
ApplyOptions::new()
}
}
impl ApplyOptions {
pub fn new() -> Self {
ApplyOptions { max_fuzzy: 0 }
}
pub fn with_max_fuzzy(mut self, max_fuzzy: usize) -> Self {
self.max_fuzzy = max_fuzzy;
self
}
}
/// Apply a `Patch` to a base image
///
/// ```
/// use diffy::{apply, Patch};
///
/// let s = "\
/// --- a/ideals
/// +++ b/ideals
/// @@ -1,4 +1,6 @@
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// +Second:
/// + I will protect those who cannot protect themselves.
/// ";
///
/// let patch = Patch::from_str(s).unwrap();
///
/// let base_image = "\
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// ";
///
/// let expected = "\
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// Second:
/// I will protect those who cannot protect themselves.
/// ";
///
/// assert_eq!(apply(base_image, &patch).unwrap(), expected);
/// ```
pub fn apply(base_image: &str, patch: &Patch<'_, str>) -> Result<String, ApplyError> {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
for (i, hunk) in patch.hunks().iter().enumerate() {
apply_hunk(&mut image, hunk, &ApplyOptions::new()).map_err(|_| ApplyError(i + 1))?;
}
Ok(image.into_iter().map(ImageLine::into_inner).collect())
}
/// Apply a non-utf8 `Patch` to a base image
pub fn apply_bytes(base_image: &[u8], patch: &Patch<'_, [u8]>) -> Result<Vec<u8>, ApplyError> {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
for (i, hunk) in patch.hunks().iter().enumerate() {
apply_hunk(&mut image, hunk, &ApplyOptions::new()).map_err(|_| ApplyError(i + 1))?;
}
Ok(image
.into_iter()
.flat_map(ImageLine::into_inner)
.copied()
.collect())
}
/// Try applying all hunks a `Patch` to a base image
pub fn apply_all_bytes(
base_image: &[u8],
patch: &Patch<'_, [u8]>,
options: ApplyOptions,
) -> (Vec<u8>, Vec<usize>) {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
let mut failed_indices = Vec::new();
for (i, hunk) in patch.hunks().iter().enumerate() {
if let Some(_) = apply_hunk(&mut image, hunk, &options).err() {
failed_indices.push(i);
}
}
(
image
.into_iter()
.flat_map(ImageLine::into_inner)
.copied()
.collect(),
failed_indices,
)
}
/// Try applying all hunks a `Patch` to a base image
pub fn apply_all(
base_image: &str,
patch: &Patch<'_, str>,
options: ApplyOptions,
) -> (String, Vec<usize>) {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
let mut failed_indices = Vec::new();
for (i, hunk) in patch.hunks().iter().enumerate() {
if let Some(_) = apply_hunk(&mut image, hunk, &options).err() {
failed_indices.push(i);
}
}
(
image.into_iter().map(ImageLine::into_inner).collect(),
failed_indices,
)
}
fn apply_hunk<'a, T: PartialEq + ?Sized>(
image: &mut Vec<ImageLine<'a, T>>,
hunk: &Hunk<'a, T>,
options: &ApplyOptions,
) -> Result<(), ()> {
// Find position
let max_fuzzy = pre_context_line_count(hunk.lines())
.min(post_context_line_count(hunk.lines()))
.min(options.max_fuzzy);
let (pos, fuzzy) = find_position(image, hunk, max_fuzzy).ok_or(())?;
let begin = pos + fuzzy;
let end = pos
+ pre_image_line_count(hunk.lines())
.checked_sub(fuzzy)
.unwrap_or(0);
// update image
image.splice(
begin..end,
skip_last(post_image(hunk.lines()).skip(fuzzy), fuzzy).map(ImageLine::Patched),
);
Ok(())
}
// Search in `image` for a palce to apply hunk.
// This follows the general algorithm (minus fuzzy-matching context lines) described in GNU patch's
// man page.
//
// It might be worth looking into other possible positions to apply the hunk to as described here:
// https://neil.fraser.name/writing/patch/
fn find_position<T: PartialEq + ?Sized>(
image: &[ImageLine<T>],
hunk: &Hunk<'_, T>,
max_fuzzy: usize,
) -> Option<(usize, usize)> {
let pos = hunk.new_range().start().saturating_sub(1);
for fuzzy in 0..=max_fuzzy {
// Create an iterator that starts with 'pos' and then interleaves
// moving pos backward/foward by one.
let backward = (0..pos).rev();
let forward = pos + 1..image.len();
for pos in iter::once(pos).chain(interleave(backward, forward)) {
if match_fragment(image, hunk.lines(), pos, fuzzy) {
return Some((pos, fuzzy));
}
}
}
None
}
fn pre_context_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
lines
.iter()
.take_while(|x| matches!(x, Line::Context(_)))
.count()
}
fn post_context_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
lines
.iter()
.rev()
.take_while(|x| matches!(x, Line::Context(_)))
.count()
}
fn pre_image_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
pre_image(lines).count()
}
fn post_image<'a, 'b, T: ?Sized>(lines: &'b [Line<'a, T>]) -> impl Iterator<Item = &'a T> + 'b {
lines.iter().filter_map(|line| match line {
Line::Context(l) | Line::Insert(l) => Some(*l),
Line::Delete(_) => None,
})
}
fn pre_image<'a, 'b, T: ?Sized>(lines: &'b [Line<'a, T>]) -> impl Iterator<Item = &'a T> + 'b {
lines.iter().filter_map(|line| match line {
Line::Context(l) | Line::Delete(l) => Some(*l),
Line::Insert(_) => None,
})
}
fn match_fragment<T: PartialEq + ?Sized>(
image: &[ImageLine<T>],
lines: &[Line<'_, T>],
pos: usize,
fuzzy: usize,
) -> bool {
let len = pre_image_line_count(lines);
let begin = pos + fuzzy;
let end = pos + len.checked_sub(fuzzy).unwrap_or(0);
let image = if let Some(image) = image.get(begin..end) {
image
} else | ;
// If any of these lines have already been patched then we can't match at this position
if image.iter().any(ImageLine::is_patched) {
return false;
}
pre_image(lines).eq(image.iter().map(ImageLine::inner))
}
#[derive(Debug)]
struct Interleave<I, J> {
a: iter::Fuse<I>,
b: iter::Fuse<J>,
flag: bool,
}
fn interleave<I, J>(
i: I,
j: J,
) -> Interleave<<I as IntoIterator>::IntoIter, <J as IntoIterator>::IntoIter>
where
I: IntoIterator,
J: IntoIterator<Item = I::Item>,
{
Interleave {
a: i.into_iter().fuse(),
b: j.into_iter().fuse(),
flag: false,
}
}
impl<I, J> Iterator for Interleave<I, J>
where
I: Iterator,
J: Iterator<Item = I::Item>,
{
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> {
self.flag = !self.flag;
if self.flag {
match self.a.next() {
None => self.b.next(),
item => item,
}
} else {
match self.b.next() {
None => self.a.next(),
item => item,
}
}
}
}
fn skip_last<I: Iterator>(iter: I, count: usize) -> SkipLast<I, I::Item> {
SkipLast {
iter: iter.fuse(),
buffer: VecDeque::with_capacity(count),
count,
}
}
#[derive(Debug)]
struct SkipLast<Iter: Iterator<Item = Item>, Item> {
iter: iter::Fuse<Iter>,
buffer: VecDeque<Item>,
count: usize,
}
impl<Iter: Iterator<Item = Item>, Item> Iterator for SkipLast<Iter, Item> {
type Item = Item;
fn next(&mut self) -> Option<Self::Item> {
if self.count == 0 {
return self.iter.next();
}
while self.buffer.len() != self.count {
self.buffer.push_front(self.iter.next()?);
}
let next = self.iter.next()?;
let res = self.buffer.pop_back()?;
self.buffer.push_front(next);
Some(res)
}
}
#[cfg(test)]
mod skip_last_test {
use crate::apply::skip_last;
#[test]
fn skip_last_test() {
let a = [1, 2, 3, 4, 5, 6, 7];
assert_eq!(
skip_last(a.iter().copied(), 0)
.collect::<Vec<_>>()
.as_slice(),
&[1, 2, 3, 4, 5, 6, 7]
);
assert_eq!(
skip_last(a.iter().copied(), 5)
.collect::<Vec<_>>()
.as_slice(),
&[1, 2]
);
assert_eq!(
skip_last(a.iter().copied(), 7)
.collect::<Vec<_>>()
.as_slice(),
&[]
);
}
}
| {
return false;
} | conditional_block |
apply.rs | use crate::{
patch::{Hunk, Line, Patch},
utils::LineIter,
};
use std::collections::VecDeque;
use std::{fmt, iter};
/// An error returned when [`apply`]ing a `Patch` fails
///
/// [`apply`]: fn.apply.html
#[derive(Debug)]
pub struct ApplyError(usize);
impl fmt::Display for ApplyError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "error applying hunk #{}", self.0)
}
}
impl std::error::Error for ApplyError {}
#[derive(Debug)]
enum ImageLine<'a, T: ?Sized> {
Unpatched(&'a T),
Patched(&'a T),
}
impl<'a, T: ?Sized> ImageLine<'a, T> {
fn inner(&self) -> &'a T {
match self {
ImageLine::Unpatched(inner) | ImageLine::Patched(inner) => inner,
}
}
fn into_inner(self) -> &'a T {
self.inner()
}
fn is_patched(&self) -> bool {
match self {
ImageLine::Unpatched(_) => false,
ImageLine::Patched(_) => true,
}
}
}
impl<T: ?Sized> Copy for ImageLine<'_, T> {}
impl<T: ?Sized> Clone for ImageLine<'_, T> {
fn clone(&self) -> Self {
*self
}
}
#[derive(Debug)]
pub struct ApplyOptions {
max_fuzzy: usize,
}
impl Default for ApplyOptions {
fn default() -> Self {
ApplyOptions::new()
}
}
impl ApplyOptions {
pub fn new() -> Self {
ApplyOptions { max_fuzzy: 0 }
}
pub fn with_max_fuzzy(mut self, max_fuzzy: usize) -> Self {
self.max_fuzzy = max_fuzzy;
self
}
}
/// Apply a `Patch` to a base image
///
/// ```
/// use diffy::{apply, Patch};
///
/// let s = "\
/// --- a/ideals
/// +++ b/ideals
/// @@ -1,4 +1,6 @@
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// +Second:
/// + I will protect those who cannot protect themselves.
/// ";
///
/// let patch = Patch::from_str(s).unwrap();
///
/// let base_image = "\
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// ";
///
/// let expected = "\
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// Second:
/// I will protect those who cannot protect themselves.
/// ";
///
/// assert_eq!(apply(base_image, &patch).unwrap(), expected);
/// ```
pub fn apply(base_image: &str, patch: &Patch<'_, str>) -> Result<String, ApplyError> {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
for (i, hunk) in patch.hunks().iter().enumerate() {
apply_hunk(&mut image, hunk, &ApplyOptions::new()).map_err(|_| ApplyError(i + 1))?;
}
Ok(image.into_iter().map(ImageLine::into_inner).collect())
}
/// Apply a non-utf8 `Patch` to a base image
pub fn apply_bytes(base_image: &[u8], patch: &Patch<'_, [u8]>) -> Result<Vec<u8>, ApplyError> {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
for (i, hunk) in patch.hunks().iter().enumerate() {
apply_hunk(&mut image, hunk, &ApplyOptions::new()).map_err(|_| ApplyError(i + 1))?;
}
Ok(image
.into_iter()
.flat_map(ImageLine::into_inner)
.copied()
.collect())
}
/// Try applying all hunks a `Patch` to a base image
pub fn apply_all_bytes(
base_image: &[u8],
patch: &Patch<'_, [u8]>,
options: ApplyOptions,
) -> (Vec<u8>, Vec<usize>) {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
let mut failed_indices = Vec::new();
for (i, hunk) in patch.hunks().iter().enumerate() {
if let Some(_) = apply_hunk(&mut image, hunk, &options).err() {
failed_indices.push(i);
}
}
(
image
.into_iter()
.flat_map(ImageLine::into_inner)
.copied()
.collect(),
failed_indices,
)
}
/// Try applying all hunks a `Patch` to a base image
pub fn apply_all(
base_image: &str,
patch: &Patch<'_, str>,
options: ApplyOptions,
) -> (String, Vec<usize>) {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
let mut failed_indices = Vec::new();
for (i, hunk) in patch.hunks().iter().enumerate() {
if let Some(_) = apply_hunk(&mut image, hunk, &options).err() {
failed_indices.push(i);
}
}
(
image.into_iter().map(ImageLine::into_inner).collect(),
failed_indices,
)
}
fn apply_hunk<'a, T: PartialEq + ?Sized>(
image: &mut Vec<ImageLine<'a, T>>,
hunk: &Hunk<'a, T>,
options: &ApplyOptions,
) -> Result<(), ()> {
// Find position
let max_fuzzy = pre_context_line_count(hunk.lines())
.min(post_context_line_count(hunk.lines()))
.min(options.max_fuzzy);
let (pos, fuzzy) = find_position(image, hunk, max_fuzzy).ok_or(())?;
let begin = pos + fuzzy;
let end = pos
+ pre_image_line_count(hunk.lines())
.checked_sub(fuzzy)
.unwrap_or(0);
// update image
image.splice(
begin..end,
skip_last(post_image(hunk.lines()).skip(fuzzy), fuzzy).map(ImageLine::Patched),
);
Ok(())
}
// Search in `image` for a palce to apply hunk.
// This follows the general algorithm (minus fuzzy-matching context lines) described in GNU patch's
// man page.
//
// It might be worth looking into other possible positions to apply the hunk to as described here:
// https://neil.fraser.name/writing/patch/
fn find_position<T: PartialEq + ?Sized>(
image: &[ImageLine<T>],
hunk: &Hunk<'_, T>,
max_fuzzy: usize,
) -> Option<(usize, usize)> {
let pos = hunk.new_range().start().saturating_sub(1);
for fuzzy in 0..=max_fuzzy {
// Create an iterator that starts with 'pos' and then interleaves
// moving pos backward/foward by one.
let backward = (0..pos).rev();
let forward = pos + 1..image.len();
for pos in iter::once(pos).chain(interleave(backward, forward)) {
if match_fragment(image, hunk.lines(), pos, fuzzy) {
return Some((pos, fuzzy));
}
}
}
None
}
fn pre_context_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
lines
.iter()
.take_while(|x| matches!(x, Line::Context(_)))
.count()
}
fn post_context_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
lines
.iter()
.rev()
.take_while(|x| matches!(x, Line::Context(_)))
.count()
}
fn pre_image_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
pre_image(lines).count()
}
fn post_image<'a, 'b, T: ?Sized>(lines: &'b [Line<'a, T>]) -> impl Iterator<Item = &'a T> + 'b {
lines.iter().filter_map(|line| match line {
Line::Context(l) | Line::Insert(l) => Some(*l),
Line::Delete(_) => None,
})
}
fn pre_image<'a, 'b, T: ?Sized>(lines: &'b [Line<'a, T>]) -> impl Iterator<Item = &'a T> + 'b {
lines.iter().filter_map(|line| match line {
Line::Context(l) | Line::Delete(l) => Some(*l),
Line::Insert(_) => None,
})
}
fn match_fragment<T: PartialEq + ?Sized>(
image: &[ImageLine<T>],
lines: &[Line<'_, T>],
pos: usize,
fuzzy: usize,
) -> bool {
let len = pre_image_line_count(lines);
let begin = pos + fuzzy;
let end = pos + len.checked_sub(fuzzy).unwrap_or(0);
let image = if let Some(image) = image.get(begin..end) {
image
} else {
return false;
};
// If any of these lines have already been patched then we can't match at this position
if image.iter().any(ImageLine::is_patched) {
return false;
}
pre_image(lines).eq(image.iter().map(ImageLine::inner))
}
#[derive(Debug)]
struct Interleave<I, J> {
a: iter::Fuse<I>,
b: iter::Fuse<J>,
flag: bool,
}
fn interleave<I, J>(
i: I,
j: J,
) -> Interleave<<I as IntoIterator>::IntoIter, <J as IntoIterator>::IntoIter>
where
I: IntoIterator,
J: IntoIterator<Item = I::Item>,
{
Interleave {
a: i.into_iter().fuse(),
b: j.into_iter().fuse(),
flag: false,
}
}
impl<I, J> Iterator for Interleave<I, J>
where
I: Iterator,
J: Iterator<Item = I::Item>,
{
type Item = I::Item;
fn | (&mut self) -> Option<I::Item> {
self.flag = !self.flag;
if self.flag {
match self.a.next() {
None => self.b.next(),
item => item,
}
} else {
match self.b.next() {
None => self.a.next(),
item => item,
}
}
}
}
fn skip_last<I: Iterator>(iter: I, count: usize) -> SkipLast<I, I::Item> {
SkipLast {
iter: iter.fuse(),
buffer: VecDeque::with_capacity(count),
count,
}
}
#[derive(Debug)]
struct SkipLast<Iter: Iterator<Item = Item>, Item> {
iter: iter::Fuse<Iter>,
buffer: VecDeque<Item>,
count: usize,
}
impl<Iter: Iterator<Item = Item>, Item> Iterator for SkipLast<Iter, Item> {
type Item = Item;
fn next(&mut self) -> Option<Self::Item> {
if self.count == 0 {
return self.iter.next();
}
while self.buffer.len() != self.count {
self.buffer.push_front(self.iter.next()?);
}
let next = self.iter.next()?;
let res = self.buffer.pop_back()?;
self.buffer.push_front(next);
Some(res)
}
}
#[cfg(test)]
mod skip_last_test {
use crate::apply::skip_last;
#[test]
fn skip_last_test() {
let a = [1, 2, 3, 4, 5, 6, 7];
assert_eq!(
skip_last(a.iter().copied(), 0)
.collect::<Vec<_>>()
.as_slice(),
&[1, 2, 3, 4, 5, 6, 7]
);
assert_eq!(
skip_last(a.iter().copied(), 5)
.collect::<Vec<_>>()
.as_slice(),
&[1, 2]
);
assert_eq!(
skip_last(a.iter().copied(), 7)
.collect::<Vec<_>>()
.as_slice(),
&[]
);
}
}
| next | identifier_name |
apply.rs | use crate::{
patch::{Hunk, Line, Patch},
utils::LineIter,
};
use std::collections::VecDeque;
use std::{fmt, iter};
/// An error returned when [`apply`]ing a `Patch` fails
///
/// [`apply`]: fn.apply.html
#[derive(Debug)]
pub struct ApplyError(usize);
impl fmt::Display for ApplyError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "error applying hunk #{}", self.0)
}
}
impl std::error::Error for ApplyError {}
#[derive(Debug)]
enum ImageLine<'a, T: ?Sized> {
Unpatched(&'a T),
Patched(&'a T),
}
impl<'a, T: ?Sized> ImageLine<'a, T> {
fn inner(&self) -> &'a T |
fn into_inner(self) -> &'a T {
self.inner()
}
fn is_patched(&self) -> bool {
match self {
ImageLine::Unpatched(_) => false,
ImageLine::Patched(_) => true,
}
}
}
impl<T: ?Sized> Copy for ImageLine<'_, T> {}
impl<T: ?Sized> Clone for ImageLine<'_, T> {
fn clone(&self) -> Self {
*self
}
}
#[derive(Debug)]
pub struct ApplyOptions {
max_fuzzy: usize,
}
impl Default for ApplyOptions {
fn default() -> Self {
ApplyOptions::new()
}
}
impl ApplyOptions {
pub fn new() -> Self {
ApplyOptions { max_fuzzy: 0 }
}
pub fn with_max_fuzzy(mut self, max_fuzzy: usize) -> Self {
self.max_fuzzy = max_fuzzy;
self
}
}
/// Apply a `Patch` to a base image
///
/// ```
/// use diffy::{apply, Patch};
///
/// let s = "\
/// --- a/ideals
/// +++ b/ideals
/// @@ -1,4 +1,6 @@
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// +Second:
/// + I will protect those who cannot protect themselves.
/// ";
///
/// let patch = Patch::from_str(s).unwrap();
///
/// let base_image = "\
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// ";
///
/// let expected = "\
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// Second:
/// I will protect those who cannot protect themselves.
/// ";
///
/// assert_eq!(apply(base_image, &patch).unwrap(), expected);
/// ```
pub fn apply(base_image: &str, patch: &Patch<'_, str>) -> Result<String, ApplyError> {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
for (i, hunk) in patch.hunks().iter().enumerate() {
apply_hunk(&mut image, hunk, &ApplyOptions::new()).map_err(|_| ApplyError(i + 1))?;
}
Ok(image.into_iter().map(ImageLine::into_inner).collect())
}
/// Apply a non-utf8 `Patch` to a base image
pub fn apply_bytes(base_image: &[u8], patch: &Patch<'_, [u8]>) -> Result<Vec<u8>, ApplyError> {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
for (i, hunk) in patch.hunks().iter().enumerate() {
apply_hunk(&mut image, hunk, &ApplyOptions::new()).map_err(|_| ApplyError(i + 1))?;
}
Ok(image
.into_iter()
.flat_map(ImageLine::into_inner)
.copied()
.collect())
}
/// Try applying all hunks a `Patch` to a base image
pub fn apply_all_bytes(
base_image: &[u8],
patch: &Patch<'_, [u8]>,
options: ApplyOptions,
) -> (Vec<u8>, Vec<usize>) {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
let mut failed_indices = Vec::new();
for (i, hunk) in patch.hunks().iter().enumerate() {
if let Some(_) = apply_hunk(&mut image, hunk, &options).err() {
failed_indices.push(i);
}
}
(
image
.into_iter()
.flat_map(ImageLine::into_inner)
.copied()
.collect(),
failed_indices,
)
}
/// Try applying all hunks a `Patch` to a base image
pub fn apply_all(
base_image: &str,
patch: &Patch<'_, str>,
options: ApplyOptions,
) -> (String, Vec<usize>) {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
let mut failed_indices = Vec::new();
for (i, hunk) in patch.hunks().iter().enumerate() {
if let Some(_) = apply_hunk(&mut image, hunk, &options).err() {
failed_indices.push(i);
}
}
(
image.into_iter().map(ImageLine::into_inner).collect(),
failed_indices,
)
}
fn apply_hunk<'a, T: PartialEq + ?Sized>(
image: &mut Vec<ImageLine<'a, T>>,
hunk: &Hunk<'a, T>,
options: &ApplyOptions,
) -> Result<(), ()> {
// Find position
let max_fuzzy = pre_context_line_count(hunk.lines())
.min(post_context_line_count(hunk.lines()))
.min(options.max_fuzzy);
let (pos, fuzzy) = find_position(image, hunk, max_fuzzy).ok_or(())?;
let begin = pos + fuzzy;
let end = pos
+ pre_image_line_count(hunk.lines())
.checked_sub(fuzzy)
.unwrap_or(0);
// update image
image.splice(
begin..end,
skip_last(post_image(hunk.lines()).skip(fuzzy), fuzzy).map(ImageLine::Patched),
);
Ok(())
}
// Search in `image` for a palce to apply hunk.
// This follows the general algorithm (minus fuzzy-matching context lines) described in GNU patch's
// man page.
//
// It might be worth looking into other possible positions to apply the hunk to as described here:
// https://neil.fraser.name/writing/patch/
fn find_position<T: PartialEq + ?Sized>(
image: &[ImageLine<T>],
hunk: &Hunk<'_, T>,
max_fuzzy: usize,
) -> Option<(usize, usize)> {
let pos = hunk.new_range().start().saturating_sub(1);
for fuzzy in 0..=max_fuzzy {
// Create an iterator that starts with 'pos' and then interleaves
// moving pos backward/foward by one.
let backward = (0..pos).rev();
let forward = pos + 1..image.len();
for pos in iter::once(pos).chain(interleave(backward, forward)) {
if match_fragment(image, hunk.lines(), pos, fuzzy) {
return Some((pos, fuzzy));
}
}
}
None
}
fn pre_context_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
lines
.iter()
.take_while(|x| matches!(x, Line::Context(_)))
.count()
}
fn post_context_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
lines
.iter()
.rev()
.take_while(|x| matches!(x, Line::Context(_)))
.count()
}
fn pre_image_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
pre_image(lines).count()
}
fn post_image<'a, 'b, T: ?Sized>(lines: &'b [Line<'a, T>]) -> impl Iterator<Item = &'a T> + 'b {
lines.iter().filter_map(|line| match line {
Line::Context(l) | Line::Insert(l) => Some(*l),
Line::Delete(_) => None,
})
}
fn pre_image<'a, 'b, T: ?Sized>(lines: &'b [Line<'a, T>]) -> impl Iterator<Item = &'a T> + 'b {
lines.iter().filter_map(|line| match line {
Line::Context(l) | Line::Delete(l) => Some(*l),
Line::Insert(_) => None,
})
}
fn match_fragment<T: PartialEq + ?Sized>(
image: &[ImageLine<T>],
lines: &[Line<'_, T>],
pos: usize,
fuzzy: usize,
) -> bool {
let len = pre_image_line_count(lines);
let begin = pos + fuzzy;
let end = pos + len.checked_sub(fuzzy).unwrap_or(0);
let image = if let Some(image) = image.get(begin..end) {
image
} else {
return false;
};
// If any of these lines have already been patched then we can't match at this position
if image.iter().any(ImageLine::is_patched) {
return false;
}
pre_image(lines).eq(image.iter().map(ImageLine::inner))
}
#[derive(Debug)]
struct Interleave<I, J> {
a: iter::Fuse<I>,
b: iter::Fuse<J>,
flag: bool,
}
fn interleave<I, J>(
i: I,
j: J,
) -> Interleave<<I as IntoIterator>::IntoIter, <J as IntoIterator>::IntoIter>
where
I: IntoIterator,
J: IntoIterator<Item = I::Item>,
{
Interleave {
a: i.into_iter().fuse(),
b: j.into_iter().fuse(),
flag: false,
}
}
impl<I, J> Iterator for Interleave<I, J>
where
I: Iterator,
J: Iterator<Item = I::Item>,
{
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> {
self.flag = !self.flag;
if self.flag {
match self.a.next() {
None => self.b.next(),
item => item,
}
} else {
match self.b.next() {
None => self.a.next(),
item => item,
}
}
}
}
fn skip_last<I: Iterator>(iter: I, count: usize) -> SkipLast<I, I::Item> {
SkipLast {
iter: iter.fuse(),
buffer: VecDeque::with_capacity(count),
count,
}
}
#[derive(Debug)]
struct SkipLast<Iter: Iterator<Item = Item>, Item> {
iter: iter::Fuse<Iter>,
buffer: VecDeque<Item>,
count: usize,
}
impl<Iter: Iterator<Item = Item>, Item> Iterator for SkipLast<Iter, Item> {
type Item = Item;
fn next(&mut self) -> Option<Self::Item> {
if self.count == 0 {
return self.iter.next();
}
while self.buffer.len() != self.count {
self.buffer.push_front(self.iter.next()?);
}
let next = self.iter.next()?;
let res = self.buffer.pop_back()?;
self.buffer.push_front(next);
Some(res)
}
}
#[cfg(test)]
mod skip_last_test {
use crate::apply::skip_last;
#[test]
fn skip_last_test() {
let a = [1, 2, 3, 4, 5, 6, 7];
assert_eq!(
skip_last(a.iter().copied(), 0)
.collect::<Vec<_>>()
.as_slice(),
&[1, 2, 3, 4, 5, 6, 7]
);
assert_eq!(
skip_last(a.iter().copied(), 5)
.collect::<Vec<_>>()
.as_slice(),
&[1, 2]
);
assert_eq!(
skip_last(a.iter().copied(), 7)
.collect::<Vec<_>>()
.as_slice(),
&[]
);
}
}
| {
match self {
ImageLine::Unpatched(inner) | ImageLine::Patched(inner) => inner,
}
} | identifier_body |
apply.rs | use crate::{
patch::{Hunk, Line, Patch},
utils::LineIter,
};
use std::collections::VecDeque;
use std::{fmt, iter};
/// An error returned when [`apply`]ing a `Patch` fails
///
/// [`apply`]: fn.apply.html
#[derive(Debug)]
pub struct ApplyError(usize);
impl fmt::Display for ApplyError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "error applying hunk #{}", self.0)
}
}
impl std::error::Error for ApplyError {}
#[derive(Debug)]
enum ImageLine<'a, T: ?Sized> {
Unpatched(&'a T),
Patched(&'a T),
}
impl<'a, T: ?Sized> ImageLine<'a, T> {
fn inner(&self) -> &'a T {
match self {
ImageLine::Unpatched(inner) | ImageLine::Patched(inner) => inner,
}
}
fn into_inner(self) -> &'a T {
self.inner()
}
fn is_patched(&self) -> bool {
match self {
ImageLine::Unpatched(_) => false,
ImageLine::Patched(_) => true,
}
}
}
impl<T: ?Sized> Copy for ImageLine<'_, T> {}
impl<T: ?Sized> Clone for ImageLine<'_, T> {
fn clone(&self) -> Self {
*self
}
}
#[derive(Debug)]
pub struct ApplyOptions {
max_fuzzy: usize,
}
impl Default for ApplyOptions {
fn default() -> Self {
ApplyOptions::new()
}
}
impl ApplyOptions {
pub fn new() -> Self {
ApplyOptions { max_fuzzy: 0 }
}
pub fn with_max_fuzzy(mut self, max_fuzzy: usize) -> Self {
self.max_fuzzy = max_fuzzy;
self
}
}
/// Apply a `Patch` to a base image
///
/// ```
/// use diffy::{apply, Patch};
///
/// let s = "\
/// --- a/ideals
/// +++ b/ideals
/// @@ -1,4 +1,6 @@
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// +Second:
/// + I will protect those who cannot protect themselves.
/// ";
///
/// let patch = Patch::from_str(s).unwrap();
///
/// let base_image = "\
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// ";
///
/// let expected = "\
/// First:
/// Life before death,
/// strength before weakness,
/// journey before destination.
/// Second:
/// I will protect those who cannot protect themselves.
/// ";
///
/// assert_eq!(apply(base_image, &patch).unwrap(), expected);
/// ```
pub fn apply(base_image: &str, patch: &Patch<'_, str>) -> Result<String, ApplyError> {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
for (i, hunk) in patch.hunks().iter().enumerate() {
apply_hunk(&mut image, hunk, &ApplyOptions::new()).map_err(|_| ApplyError(i + 1))?;
}
Ok(image.into_iter().map(ImageLine::into_inner).collect()) | }
/// Apply a non-utf8 `Patch` to a base image
pub fn apply_bytes(base_image: &[u8], patch: &Patch<'_, [u8]>) -> Result<Vec<u8>, ApplyError> {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
for (i, hunk) in patch.hunks().iter().enumerate() {
apply_hunk(&mut image, hunk, &ApplyOptions::new()).map_err(|_| ApplyError(i + 1))?;
}
Ok(image
.into_iter()
.flat_map(ImageLine::into_inner)
.copied()
.collect())
}
/// Try applying all hunks a `Patch` to a base image
pub fn apply_all_bytes(
base_image: &[u8],
patch: &Patch<'_, [u8]>,
options: ApplyOptions,
) -> (Vec<u8>, Vec<usize>) {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
let mut failed_indices = Vec::new();
for (i, hunk) in patch.hunks().iter().enumerate() {
if let Some(_) = apply_hunk(&mut image, hunk, &options).err() {
failed_indices.push(i);
}
}
(
image
.into_iter()
.flat_map(ImageLine::into_inner)
.copied()
.collect(),
failed_indices,
)
}
/// Try applying all hunks a `Patch` to a base image
pub fn apply_all(
base_image: &str,
patch: &Patch<'_, str>,
options: ApplyOptions,
) -> (String, Vec<usize>) {
let mut image: Vec<_> = LineIter::new(base_image)
.map(ImageLine::Unpatched)
.collect();
let mut failed_indices = Vec::new();
for (i, hunk) in patch.hunks().iter().enumerate() {
if let Some(_) = apply_hunk(&mut image, hunk, &options).err() {
failed_indices.push(i);
}
}
(
image.into_iter().map(ImageLine::into_inner).collect(),
failed_indices,
)
}
fn apply_hunk<'a, T: PartialEq + ?Sized>(
image: &mut Vec<ImageLine<'a, T>>,
hunk: &Hunk<'a, T>,
options: &ApplyOptions,
) -> Result<(), ()> {
// Find position
let max_fuzzy = pre_context_line_count(hunk.lines())
.min(post_context_line_count(hunk.lines()))
.min(options.max_fuzzy);
let (pos, fuzzy) = find_position(image, hunk, max_fuzzy).ok_or(())?;
let begin = pos + fuzzy;
let end = pos
+ pre_image_line_count(hunk.lines())
.checked_sub(fuzzy)
.unwrap_or(0);
// update image
image.splice(
begin..end,
skip_last(post_image(hunk.lines()).skip(fuzzy), fuzzy).map(ImageLine::Patched),
);
Ok(())
}
// Search in `image` for a palce to apply hunk.
// This follows the general algorithm (minus fuzzy-matching context lines) described in GNU patch's
// man page.
//
// It might be worth looking into other possible positions to apply the hunk to as described here:
// https://neil.fraser.name/writing/patch/
fn find_position<T: PartialEq + ?Sized>(
image: &[ImageLine<T>],
hunk: &Hunk<'_, T>,
max_fuzzy: usize,
) -> Option<(usize, usize)> {
let pos = hunk.new_range().start().saturating_sub(1);
for fuzzy in 0..=max_fuzzy {
// Create an iterator that starts with 'pos' and then interleaves
// moving pos backward/foward by one.
let backward = (0..pos).rev();
let forward = pos + 1..image.len();
for pos in iter::once(pos).chain(interleave(backward, forward)) {
if match_fragment(image, hunk.lines(), pos, fuzzy) {
return Some((pos, fuzzy));
}
}
}
None
}
fn pre_context_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
lines
.iter()
.take_while(|x| matches!(x, Line::Context(_)))
.count()
}
fn post_context_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
lines
.iter()
.rev()
.take_while(|x| matches!(x, Line::Context(_)))
.count()
}
fn pre_image_line_count<T: ?Sized>(lines: &[Line<'_, T>]) -> usize {
pre_image(lines).count()
}
fn post_image<'a, 'b, T: ?Sized>(lines: &'b [Line<'a, T>]) -> impl Iterator<Item = &'a T> + 'b {
lines.iter().filter_map(|line| match line {
Line::Context(l) | Line::Insert(l) => Some(*l),
Line::Delete(_) => None,
})
}
fn pre_image<'a, 'b, T: ?Sized>(lines: &'b [Line<'a, T>]) -> impl Iterator<Item = &'a T> + 'b {
lines.iter().filter_map(|line| match line {
Line::Context(l) | Line::Delete(l) => Some(*l),
Line::Insert(_) => None,
})
}
fn match_fragment<T: PartialEq + ?Sized>(
image: &[ImageLine<T>],
lines: &[Line<'_, T>],
pos: usize,
fuzzy: usize,
) -> bool {
let len = pre_image_line_count(lines);
let begin = pos + fuzzy;
let end = pos + len.checked_sub(fuzzy).unwrap_or(0);
let image = if let Some(image) = image.get(begin..end) {
image
} else {
return false;
};
// If any of these lines have already been patched then we can't match at this position
if image.iter().any(ImageLine::is_patched) {
return false;
}
pre_image(lines).eq(image.iter().map(ImageLine::inner))
}
#[derive(Debug)]
struct Interleave<I, J> {
a: iter::Fuse<I>,
b: iter::Fuse<J>,
flag: bool,
}
fn interleave<I, J>(
i: I,
j: J,
) -> Interleave<<I as IntoIterator>::IntoIter, <J as IntoIterator>::IntoIter>
where
I: IntoIterator,
J: IntoIterator<Item = I::Item>,
{
Interleave {
a: i.into_iter().fuse(),
b: j.into_iter().fuse(),
flag: false,
}
}
impl<I, J> Iterator for Interleave<I, J>
where
I: Iterator,
J: Iterator<Item = I::Item>,
{
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> {
self.flag = !self.flag;
if self.flag {
match self.a.next() {
None => self.b.next(),
item => item,
}
} else {
match self.b.next() {
None => self.a.next(),
item => item,
}
}
}
}
fn skip_last<I: Iterator>(iter: I, count: usize) -> SkipLast<I, I::Item> {
SkipLast {
iter: iter.fuse(),
buffer: VecDeque::with_capacity(count),
count,
}
}
#[derive(Debug)]
struct SkipLast<Iter: Iterator<Item = Item>, Item> {
iter: iter::Fuse<Iter>,
buffer: VecDeque<Item>,
count: usize,
}
impl<Iter: Iterator<Item = Item>, Item> Iterator for SkipLast<Iter, Item> {
type Item = Item;
fn next(&mut self) -> Option<Self::Item> {
if self.count == 0 {
return self.iter.next();
}
while self.buffer.len() != self.count {
self.buffer.push_front(self.iter.next()?);
}
let next = self.iter.next()?;
let res = self.buffer.pop_back()?;
self.buffer.push_front(next);
Some(res)
}
}
#[cfg(test)]
mod skip_last_test {
use crate::apply::skip_last;
#[test]
fn skip_last_test() {
let a = [1, 2, 3, 4, 5, 6, 7];
assert_eq!(
skip_last(a.iter().copied(), 0)
.collect::<Vec<_>>()
.as_slice(),
&[1, 2, 3, 4, 5, 6, 7]
);
assert_eq!(
skip_last(a.iter().copied(), 5)
.collect::<Vec<_>>()
.as_slice(),
&[1, 2]
);
assert_eq!(
skip_last(a.iter().copied(), 7)
.collect::<Vec<_>>()
.as_slice(),
&[]
);
}
} | random_line_split | |
views.py | # -*- coding: utf-8 -*-
# Django
from django.shortcuts import render
from django.contrib.auth.decorators import login_required, user_passes_test
from django.db.models import Count
from django.forms.models import model_to_dict
from django_filters import rest_framework as filters
# REST
from rest_framework import status
from rest_framework import viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.decorators import api_view, renderer_classes
from rest_framework.renderers import JSONRenderer
# Tarteel
from evaluation.models import TajweedEvaluation, Evaluation
from evaluation.serializers import TajweedEvaluationSerializer, EvaluationSerializer
from restapi.models import AnnotatedRecording
from quran.models import Ayah, AyahWord, Translation
# Python
import io
import json
import os
import random
# =============================================== #
# Constant Global Definitions #
# =============================================== #
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# ===================================== #
# Utility Functions #
# ===================================== #
# TODO: Update to use Quran DB
def get_tajweed_rule(surah_num=0, ayah_num=0, random_rule=False):
"""If random_rule is true then we get a random tajweed rule. Otherwise returns a
specific rule. Both options return the text and word index.
:return: A tuple with the surah & ayah number, text, rule, and word position
:rtype: tuple(int, int, str, str, int) or tuple(str, str, int)
"""
TAJWEED_FILE = os.path.join(BASE_DIR, 'utils/data-rules.json')
with io.open(TAJWEED_FILE) as file:
tajweed_rules = json.load(file)
tajweed_rules = tajweed_rules['quran']
file.close()
UTHMANI_FILE = os.path.join(BASE_DIR, 'utils/data-uthmani.json')
with io.open(UTHMANI_FILE, 'r', encoding="utf-8-sig") as file:
uthmani_q = json.load(file)
uthmani_q = uthmani_q['quran']
file.close()
if random_rule:
random_surah = random.choice(tajweed_rules['surahs'])
surah_num = random_surah['num']
random_ayah = random.choice(random_surah['ayahs'])
ayah_num = random_ayah['num']
rule_dict = random.choice(random_ayah['rules'])
else:
rule_dict = tajweed_rules['surah'][surah_num - 1]['ayahs'][ayah_num - 1]
rule = rule_dict['rule']
rule_start = rule_dict['start']
rule_end = rule_dict['end']
# 1-indexed
ayah_text = uthmani_q['surahs'][surah_num - 1]['ayahs'][ayah_num - 1]['text']
ayah_text_list = ayah_text.split(" ")
# Get the index of the word we're looking for
position = 0
curr_word_ind = 0
for i, word in enumerate(ayah_text_list):
position += len(word)
if position >= rule_start:
curr_word_ind = i
break
if random_rule:
return surah_num, ayah_num, ayah_text, rule, curr_word_ind
return ayah_text, rule, curr_word_ind
def is_evaluator(user):
if user:
return user.groups.filter(name='evaluator').exists()
return False
# TODO: Deprecated
def get_low_evaluation_count():
"""Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: AnnotatedRecording
"""
recording_evals = AnnotatedRecording.objects.annotate(total=Count('evaluation'))
recording_evals_dict = {entry : entry.total for entry in recording_evals}
min_evals = min(recording_evals_dict.values())
min_evals_recordings = [k for k, v in recording_evals_dict.items() if v==min_evals]
return random.choice(min_evals_recordings)
def get_no_evaluation_recording(surah_num=None, ayah_num=None):
"""Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations
along with its words, url and recording ID.
:rtype: dict
"""
# Get recordings with a file.
if surah_num is not None and ayah_num is not None:
recording_evals = AnnotatedRecording.objects.filter(
surah_num=surah_num, ayah_num=ayah_num, file__gt='',
file__isnull=False).annotate(total=Count('evaluation'))
# If no recordings, move on to random one
try:
random_recording = random.choice(recording_evals)
except IndexError:
surah_num = None
ayah_num = None
if surah_num is None and ayah_num is None:
recording_evals = AnnotatedRecording.objects.filter(
file__gt='', file__isnull=False).annotate(total=Count('evaluation'))
try:
random_recording = random.choice(recording_evals)
except IndexError:
error_str = "No more unevaluated recordings!"
print(error_str)
return {'detail': error_str}
surah_num = random_recording.surah_num
ayah_num = random_recording.ayah_num
audio_url = random_recording.file.url
recording_id = random_recording.id
# Prep response
ayah = Ayah.objects.get(chapter_id__number=surah_num, verse_number=ayah_num)
ayah = model_to_dict(ayah)
# Get all the words
words = AyahWord.objects.filter(ayah__verse_number=ayah_num,
ayah__chapter_id__number=surah_num)
translations = Translation.objects.filter(ayah__verse_number=ayah_num,
ayah__chapter_id__number=surah_num)
# Convert to list of dicts, note that order is usually flipped.
ayah['words'] = list(reversed(words.values()))
ayah['translations'] = list(translations.values())
ayah["audio_url"] = audio_url
ayah["recording_id"] = recording_id
return ayah
# ============================= #
# API Views #
# ============================= #
class EvaluationFilter(filters.FilterSet):
|
class EvaluationViewSet(viewsets.ModelViewSet):
"""API to handle query parameters
Example: v1/evaluations/?surah=114&ayah=1&evaluation=correct
"""
serializer_class = EvaluationSerializer
queryset = Evaluation.objects.all()
filter_backends = (filters.DjangoFilterBackend,)
filter_class = EvaluationFilter
@action(detail=False, methods=['get'])
def low_count(self, request):
"""Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: Response
"""
ayah = get_no_evaluation_recording()
return Response(ayah)
@low_count.mapping.post
def low_count_specific(self, request):
"""Get a recording of a specific surah and ayah with no evaluation.
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: Response
"""
surah_num = int(request.data['surah'])
ayah_num = int(request.data['ayah'])
ayah = get_no_evaluation_recording(surah_num=surah_num, ayah_num=ayah_num)
return Response(ayah)
class TajweedEvaluationList(APIView):
"""API Endpoint that allows tajweed evaluations to be posted or
retrieved """
def get(self, request, format=None):
evaluations = TajweedEvaluation.objects.all().order_by('-timestamp')
tajweed_serializer = TajweedEvaluationSerializer(evaluations, many=True)
return Response(tajweed_serializer.data)
def post(self, request, *args, **kwargs):
print("EVALUATOR: Received a tajweed evaluation:\n{}".format(request.data))
new_evaluation = TajweedEvaluationSerializer(data=request.data)
if new_evaluation.is_valid(raise_exception=True):
new_evaluation.save()
return Response(new_evaluation.data, status=status.HTTP_201_CREATED)
return Response(new_evaluation.errors, status=status.HTTP_400_BAD_REQUEST)
# ===================================== #
# Static Page Views #
# ===================================== #
@api_view(('GET',))
@renderer_classes((JSONRenderer,))
def get_evaluations_count(request, format=None):
evaluations = Evaluation.objects.all().count()
res = {
"count": evaluations
}
return Response(res)
@login_required
@user_passes_test(is_evaluator, login_url='/')
def tajweed_evaluator(request):
"""Returns a random ayah for an expert to evaluate for any mistakes.
:param request: rest API request object.
:type request: Request
:return: Rendered view of evaluator page with form, ayah info, and URL.
:rtype: HttpResponse
"""
# User tracking - Ensure there is always a session key.
if not request.session.session_key:
request.session.create()
session_key = request.session.session_key
# Get a random tajweed rule and make sure we have something to display
recordings = None
while not recordings:
surah_num, ayah_num, ayah_text, rule, word_index = get_tajweed_rule(random_rule=True)
recordings = AnnotatedRecording.objects.filter(file__gt='', file__isnull=False,
surah_num=surah_num,
ayah_num=ayah_num)
random_recording = random.choice(recordings)
# Make sure we avoid negative count
prev_word_ind = word_index - 1 if word_index > 0 else None
# Make sure we avoid overflow
ayah_text_list = ayah_text.split(" ")
next_word_ind = word_index + 1 if word_index + 1 < len(ayah_text_list) else None
# Fields
audio_url = random_recording.file.url
recording_id = random_recording.id
# Get text rep of rule
category_dict = dict(TajweedEvaluation.CATEGORY_CHOICES)
rule_text = category_dict[rule]
return render(request, 'evaluation/tajweed_evaluator.html',
{'session_key': session_key,
'rule_text': rule_text,
'rule_id': rule,
'surah_num': surah_num,
'ayah_num': ayah_num,
'ayah_text': ayah_text_list,
'word_index': word_index,
'prev_word_index': prev_word_ind,
'next_word_index': next_word_ind,
'audio_url': audio_url,
'recording_id': recording_id})
| """Custom filter based on surah, ayah, evaluation type or recording."""
EVAL_CHOICES = (
('correct', 'Correct'),
('incorrect', 'Incorrect')
)
surah = filters.NumberFilter(field_name='associated_recording__surah_num')
ayah = filters.NumberFilter(field_name='associated_recording__ayah_num')
evaluation = filters.ChoiceFilter(choices=EVAL_CHOICES)
associated_recording = filters.ModelChoiceFilter(
queryset=AnnotatedRecording.objects.all())
class Meta:
model = Evaluation
fields = ['surah', 'ayah', 'evaluation', 'associated_recording'] | identifier_body |
views.py | # -*- coding: utf-8 -*-
# Django
from django.shortcuts import render
from django.contrib.auth.decorators import login_required, user_passes_test
from django.db.models import Count
from django.forms.models import model_to_dict
from django_filters import rest_framework as filters
# REST
from rest_framework import status
from rest_framework import viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.decorators import api_view, renderer_classes
from rest_framework.renderers import JSONRenderer
# Tarteel
from evaluation.models import TajweedEvaluation, Evaluation
from evaluation.serializers import TajweedEvaluationSerializer, EvaluationSerializer
from restapi.models import AnnotatedRecording
from quran.models import Ayah, AyahWord, Translation
# Python
import io
import json
import os
import random
# =============================================== #
# Constant Global Definitions #
# =============================================== #
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# ===================================== #
# Utility Functions #
# ===================================== #
# TODO: Update to use Quran DB
def | (surah_num=0, ayah_num=0, random_rule=False):
"""If random_rule is true then we get a random tajweed rule. Otherwise returns a
specific rule. Both options return the text and word index.
:return: A tuple with the surah & ayah number, text, rule, and word position
:rtype: tuple(int, int, str, str, int) or tuple(str, str, int)
"""
TAJWEED_FILE = os.path.join(BASE_DIR, 'utils/data-rules.json')
with io.open(TAJWEED_FILE) as file:
tajweed_rules = json.load(file)
tajweed_rules = tajweed_rules['quran']
file.close()
UTHMANI_FILE = os.path.join(BASE_DIR, 'utils/data-uthmani.json')
with io.open(UTHMANI_FILE, 'r', encoding="utf-8-sig") as file:
uthmani_q = json.load(file)
uthmani_q = uthmani_q['quran']
file.close()
if random_rule:
random_surah = random.choice(tajweed_rules['surahs'])
surah_num = random_surah['num']
random_ayah = random.choice(random_surah['ayahs'])
ayah_num = random_ayah['num']
rule_dict = random.choice(random_ayah['rules'])
else:
rule_dict = tajweed_rules['surah'][surah_num - 1]['ayahs'][ayah_num - 1]
rule = rule_dict['rule']
rule_start = rule_dict['start']
rule_end = rule_dict['end']
# 1-indexed
ayah_text = uthmani_q['surahs'][surah_num - 1]['ayahs'][ayah_num - 1]['text']
ayah_text_list = ayah_text.split(" ")
# Get the index of the word we're looking for
position = 0
curr_word_ind = 0
for i, word in enumerate(ayah_text_list):
position += len(word)
if position >= rule_start:
curr_word_ind = i
break
if random_rule:
return surah_num, ayah_num, ayah_text, rule, curr_word_ind
return ayah_text, rule, curr_word_ind
def is_evaluator(user):
if user:
return user.groups.filter(name='evaluator').exists()
return False
# TODO: Deprecated
def get_low_evaluation_count():
"""Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: AnnotatedRecording
"""
recording_evals = AnnotatedRecording.objects.annotate(total=Count('evaluation'))
recording_evals_dict = {entry : entry.total for entry in recording_evals}
min_evals = min(recording_evals_dict.values())
min_evals_recordings = [k for k, v in recording_evals_dict.items() if v==min_evals]
return random.choice(min_evals_recordings)
def get_no_evaluation_recording(surah_num=None, ayah_num=None):
"""Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations
along with its words, url and recording ID.
:rtype: dict
"""
# Get recordings with a file.
if surah_num is not None and ayah_num is not None:
recording_evals = AnnotatedRecording.objects.filter(
surah_num=surah_num, ayah_num=ayah_num, file__gt='',
file__isnull=False).annotate(total=Count('evaluation'))
# If no recordings, move on to random one
try:
random_recording = random.choice(recording_evals)
except IndexError:
surah_num = None
ayah_num = None
if surah_num is None and ayah_num is None:
recording_evals = AnnotatedRecording.objects.filter(
file__gt='', file__isnull=False).annotate(total=Count('evaluation'))
try:
random_recording = random.choice(recording_evals)
except IndexError:
error_str = "No more unevaluated recordings!"
print(error_str)
return {'detail': error_str}
surah_num = random_recording.surah_num
ayah_num = random_recording.ayah_num
audio_url = random_recording.file.url
recording_id = random_recording.id
# Prep response
ayah = Ayah.objects.get(chapter_id__number=surah_num, verse_number=ayah_num)
ayah = model_to_dict(ayah)
# Get all the words
words = AyahWord.objects.filter(ayah__verse_number=ayah_num,
ayah__chapter_id__number=surah_num)
translations = Translation.objects.filter(ayah__verse_number=ayah_num,
ayah__chapter_id__number=surah_num)
# Convert to list of dicts, note that order is usually flipped.
ayah['words'] = list(reversed(words.values()))
ayah['translations'] = list(translations.values())
ayah["audio_url"] = audio_url
ayah["recording_id"] = recording_id
return ayah
# ============================= #
# API Views #
# ============================= #
class EvaluationFilter(filters.FilterSet):
"""Custom filter based on surah, ayah, evaluation type or recording."""
EVAL_CHOICES = (
('correct', 'Correct'),
('incorrect', 'Incorrect')
)
surah = filters.NumberFilter(field_name='associated_recording__surah_num')
ayah = filters.NumberFilter(field_name='associated_recording__ayah_num')
evaluation = filters.ChoiceFilter(choices=EVAL_CHOICES)
associated_recording = filters.ModelChoiceFilter(
queryset=AnnotatedRecording.objects.all())
class Meta:
model = Evaluation
fields = ['surah', 'ayah', 'evaluation', 'associated_recording']
class EvaluationViewSet(viewsets.ModelViewSet):
"""API to handle query parameters
Example: v1/evaluations/?surah=114&ayah=1&evaluation=correct
"""
serializer_class = EvaluationSerializer
queryset = Evaluation.objects.all()
filter_backends = (filters.DjangoFilterBackend,)
filter_class = EvaluationFilter
@action(detail=False, methods=['get'])
def low_count(self, request):
"""Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: Response
"""
ayah = get_no_evaluation_recording()
return Response(ayah)
@low_count.mapping.post
def low_count_specific(self, request):
"""Get a recording of a specific surah and ayah with no evaluation.
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: Response
"""
surah_num = int(request.data['surah'])
ayah_num = int(request.data['ayah'])
ayah = get_no_evaluation_recording(surah_num=surah_num, ayah_num=ayah_num)
return Response(ayah)
class TajweedEvaluationList(APIView):
"""API Endpoint that allows tajweed evaluations to be posted or
retrieved """
def get(self, request, format=None):
evaluations = TajweedEvaluation.objects.all().order_by('-timestamp')
tajweed_serializer = TajweedEvaluationSerializer(evaluations, many=True)
return Response(tajweed_serializer.data)
def post(self, request, *args, **kwargs):
print("EVALUATOR: Received a tajweed evaluation:\n{}".format(request.data))
new_evaluation = TajweedEvaluationSerializer(data=request.data)
if new_evaluation.is_valid(raise_exception=True):
new_evaluation.save()
return Response(new_evaluation.data, status=status.HTTP_201_CREATED)
return Response(new_evaluation.errors, status=status.HTTP_400_BAD_REQUEST)
# ===================================== #
# Static Page Views #
# ===================================== #
@api_view(('GET',))
@renderer_classes((JSONRenderer,))
def get_evaluations_count(request, format=None):
evaluations = Evaluation.objects.all().count()
res = {
"count": evaluations
}
return Response(res)
@login_required
@user_passes_test(is_evaluator, login_url='/')
def tajweed_evaluator(request):
"""Returns a random ayah for an expert to evaluate for any mistakes.
:param request: rest API request object.
:type request: Request
:return: Rendered view of evaluator page with form, ayah info, and URL.
:rtype: HttpResponse
"""
# User tracking - Ensure there is always a session key.
if not request.session.session_key:
request.session.create()
session_key = request.session.session_key
# Get a random tajweed rule and make sure we have something to display
recordings = None
while not recordings:
surah_num, ayah_num, ayah_text, rule, word_index = get_tajweed_rule(random_rule=True)
recordings = AnnotatedRecording.objects.filter(file__gt='', file__isnull=False,
surah_num=surah_num,
ayah_num=ayah_num)
random_recording = random.choice(recordings)
# Make sure we avoid negative count
prev_word_ind = word_index - 1 if word_index > 0 else None
# Make sure we avoid overflow
ayah_text_list = ayah_text.split(" ")
next_word_ind = word_index + 1 if word_index + 1 < len(ayah_text_list) else None
# Fields
audio_url = random_recording.file.url
recording_id = random_recording.id
# Get text rep of rule
category_dict = dict(TajweedEvaluation.CATEGORY_CHOICES)
rule_text = category_dict[rule]
return render(request, 'evaluation/tajweed_evaluator.html',
{'session_key': session_key,
'rule_text': rule_text,
'rule_id': rule,
'surah_num': surah_num,
'ayah_num': ayah_num,
'ayah_text': ayah_text_list,
'word_index': word_index,
'prev_word_index': prev_word_ind,
'next_word_index': next_word_ind,
'audio_url': audio_url,
'recording_id': recording_id})
| get_tajweed_rule | identifier_name |
views.py | # -*- coding: utf-8 -*-
# Django
from django.shortcuts import render
from django.contrib.auth.decorators import login_required, user_passes_test
from django.db.models import Count
from django.forms.models import model_to_dict
from django_filters import rest_framework as filters
# REST
from rest_framework import status
from rest_framework import viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.decorators import api_view, renderer_classes
from rest_framework.renderers import JSONRenderer
# Tarteel
from evaluation.models import TajweedEvaluation, Evaluation
from evaluation.serializers import TajweedEvaluationSerializer, EvaluationSerializer
from restapi.models import AnnotatedRecording
from quran.models import Ayah, AyahWord, Translation
# Python
import io
import json
import os
import random
# =============================================== #
# Constant Global Definitions #
# =============================================== #
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# ===================================== #
# Utility Functions #
# ===================================== #
# TODO: Update to use Quran DB
def get_tajweed_rule(surah_num=0, ayah_num=0, random_rule=False):
"""If random_rule is true then we get a random tajweed rule. Otherwise returns a
specific rule. Both options return the text and word index.
:return: A tuple with the surah & ayah number, text, rule, and word position
:rtype: tuple(int, int, str, str, int) or tuple(str, str, int)
"""
TAJWEED_FILE = os.path.join(BASE_DIR, 'utils/data-rules.json')
with io.open(TAJWEED_FILE) as file:
tajweed_rules = json.load(file)
tajweed_rules = tajweed_rules['quran']
file.close()
UTHMANI_FILE = os.path.join(BASE_DIR, 'utils/data-uthmani.json')
with io.open(UTHMANI_FILE, 'r', encoding="utf-8-sig") as file:
uthmani_q = json.load(file)
uthmani_q = uthmani_q['quran']
file.close()
if random_rule:
random_surah = random.choice(tajweed_rules['surahs'])
surah_num = random_surah['num']
random_ayah = random.choice(random_surah['ayahs'])
ayah_num = random_ayah['num']
rule_dict = random.choice(random_ayah['rules'])
else:
rule_dict = tajweed_rules['surah'][surah_num - 1]['ayahs'][ayah_num - 1]
rule = rule_dict['rule']
rule_start = rule_dict['start']
rule_end = rule_dict['end']
# 1-indexed
ayah_text = uthmani_q['surahs'][surah_num - 1]['ayahs'][ayah_num - 1]['text']
ayah_text_list = ayah_text.split(" ")
# Get the index of the word we're looking for
position = 0
curr_word_ind = 0
for i, word in enumerate(ayah_text_list):
position += len(word)
if position >= rule_start:
curr_word_ind = i
break
if random_rule:
return surah_num, ayah_num, ayah_text, rule, curr_word_ind
return ayah_text, rule, curr_word_ind
def is_evaluator(user):
if user:
return user.groups.filter(name='evaluator').exists()
return False
# TODO: Deprecated
def get_low_evaluation_count():
"""Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: AnnotatedRecording
"""
recording_evals = AnnotatedRecording.objects.annotate(total=Count('evaluation'))
recording_evals_dict = {entry : entry.total for entry in recording_evals}
min_evals = min(recording_evals_dict.values())
min_evals_recordings = [k for k, v in recording_evals_dict.items() if v==min_evals]
return random.choice(min_evals_recordings)
def get_no_evaluation_recording(surah_num=None, ayah_num=None): | along with its words, url and recording ID.
:rtype: dict
"""
# Get recordings with a file.
if surah_num is not None and ayah_num is not None:
recording_evals = AnnotatedRecording.objects.filter(
surah_num=surah_num, ayah_num=ayah_num, file__gt='',
file__isnull=False).annotate(total=Count('evaluation'))
# If no recordings, move on to random one
try:
random_recording = random.choice(recording_evals)
except IndexError:
surah_num = None
ayah_num = None
if surah_num is None and ayah_num is None:
recording_evals = AnnotatedRecording.objects.filter(
file__gt='', file__isnull=False).annotate(total=Count('evaluation'))
try:
random_recording = random.choice(recording_evals)
except IndexError:
error_str = "No more unevaluated recordings!"
print(error_str)
return {'detail': error_str}
surah_num = random_recording.surah_num
ayah_num = random_recording.ayah_num
audio_url = random_recording.file.url
recording_id = random_recording.id
# Prep response
ayah = Ayah.objects.get(chapter_id__number=surah_num, verse_number=ayah_num)
ayah = model_to_dict(ayah)
# Get all the words
words = AyahWord.objects.filter(ayah__verse_number=ayah_num,
ayah__chapter_id__number=surah_num)
translations = Translation.objects.filter(ayah__verse_number=ayah_num,
ayah__chapter_id__number=surah_num)
# Convert to list of dicts, note that order is usually flipped.
ayah['words'] = list(reversed(words.values()))
ayah['translations'] = list(translations.values())
ayah["audio_url"] = audio_url
ayah["recording_id"] = recording_id
return ayah
# ============================= #
# API Views #
# ============================= #
class EvaluationFilter(filters.FilterSet):
"""Custom filter based on surah, ayah, evaluation type or recording."""
EVAL_CHOICES = (
('correct', 'Correct'),
('incorrect', 'Incorrect')
)
surah = filters.NumberFilter(field_name='associated_recording__surah_num')
ayah = filters.NumberFilter(field_name='associated_recording__ayah_num')
evaluation = filters.ChoiceFilter(choices=EVAL_CHOICES)
associated_recording = filters.ModelChoiceFilter(
queryset=AnnotatedRecording.objects.all())
class Meta:
model = Evaluation
fields = ['surah', 'ayah', 'evaluation', 'associated_recording']
class EvaluationViewSet(viewsets.ModelViewSet):
"""API to handle query parameters
Example: v1/evaluations/?surah=114&ayah=1&evaluation=correct
"""
serializer_class = EvaluationSerializer
queryset = Evaluation.objects.all()
filter_backends = (filters.DjangoFilterBackend,)
filter_class = EvaluationFilter
@action(detail=False, methods=['get'])
def low_count(self, request):
"""Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: Response
"""
ayah = get_no_evaluation_recording()
return Response(ayah)
@low_count.mapping.post
def low_count_specific(self, request):
"""Get a recording of a specific surah and ayah with no evaluation.
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: Response
"""
surah_num = int(request.data['surah'])
ayah_num = int(request.data['ayah'])
ayah = get_no_evaluation_recording(surah_num=surah_num, ayah_num=ayah_num)
return Response(ayah)
class TajweedEvaluationList(APIView):
"""API Endpoint that allows tajweed evaluations to be posted or
retrieved """
def get(self, request, format=None):
evaluations = TajweedEvaluation.objects.all().order_by('-timestamp')
tajweed_serializer = TajweedEvaluationSerializer(evaluations, many=True)
return Response(tajweed_serializer.data)
def post(self, request, *args, **kwargs):
print("EVALUATOR: Received a tajweed evaluation:\n{}".format(request.data))
new_evaluation = TajweedEvaluationSerializer(data=request.data)
if new_evaluation.is_valid(raise_exception=True):
new_evaluation.save()
return Response(new_evaluation.data, status=status.HTTP_201_CREATED)
return Response(new_evaluation.errors, status=status.HTTP_400_BAD_REQUEST)
# ===================================== #
# Static Page Views #
# ===================================== #
@api_view(('GET',))
@renderer_classes((JSONRenderer,))
def get_evaluations_count(request, format=None):
evaluations = Evaluation.objects.all().count()
res = {
"count": evaluations
}
return Response(res)
@login_required
@user_passes_test(is_evaluator, login_url='/')
def tajweed_evaluator(request):
"""Returns a random ayah for an expert to evaluate for any mistakes.
:param request: rest API request object.
:type request: Request
:return: Rendered view of evaluator page with form, ayah info, and URL.
:rtype: HttpResponse
"""
# User tracking - Ensure there is always a session key.
if not request.session.session_key:
request.session.create()
session_key = request.session.session_key
# Get a random tajweed rule and make sure we have something to display
recordings = None
while not recordings:
surah_num, ayah_num, ayah_text, rule, word_index = get_tajweed_rule(random_rule=True)
recordings = AnnotatedRecording.objects.filter(file__gt='', file__isnull=False,
surah_num=surah_num,
ayah_num=ayah_num)
random_recording = random.choice(recordings)
# Make sure we avoid negative count
prev_word_ind = word_index - 1 if word_index > 0 else None
# Make sure we avoid overflow
ayah_text_list = ayah_text.split(" ")
next_word_ind = word_index + 1 if word_index + 1 < len(ayah_text_list) else None
# Fields
audio_url = random_recording.file.url
recording_id = random_recording.id
# Get text rep of rule
category_dict = dict(TajweedEvaluation.CATEGORY_CHOICES)
rule_text = category_dict[rule]
return render(request, 'evaluation/tajweed_evaluator.html',
{'session_key': session_key,
'rule_text': rule_text,
'rule_id': rule,
'surah_num': surah_num,
'ayah_num': ayah_num,
'ayah_text': ayah_text_list,
'word_index': word_index,
'prev_word_index': prev_word_ind,
'next_word_index': next_word_ind,
'audio_url': audio_url,
'recording_id': recording_id}) | """Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations | random_line_split |
views.py | # -*- coding: utf-8 -*-
# Django
from django.shortcuts import render
from django.contrib.auth.decorators import login_required, user_passes_test
from django.db.models import Count
from django.forms.models import model_to_dict
from django_filters import rest_framework as filters
# REST
from rest_framework import status
from rest_framework import viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.decorators import api_view, renderer_classes
from rest_framework.renderers import JSONRenderer
# Tarteel
from evaluation.models import TajweedEvaluation, Evaluation
from evaluation.serializers import TajweedEvaluationSerializer, EvaluationSerializer
from restapi.models import AnnotatedRecording
from quran.models import Ayah, AyahWord, Translation
# Python
import io
import json
import os
import random
# =============================================== #
# Constant Global Definitions #
# =============================================== #
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# ===================================== #
# Utility Functions #
# ===================================== #
# TODO: Update to use Quran DB
def get_tajweed_rule(surah_num=0, ayah_num=0, random_rule=False):
"""If random_rule is true then we get a random tajweed rule. Otherwise returns a
specific rule. Both options return the text and word index.
:return: A tuple with the surah & ayah number, text, rule, and word position
:rtype: tuple(int, int, str, str, int) or tuple(str, str, int)
"""
TAJWEED_FILE = os.path.join(BASE_DIR, 'utils/data-rules.json')
with io.open(TAJWEED_FILE) as file:
tajweed_rules = json.load(file)
tajweed_rules = tajweed_rules['quran']
file.close()
UTHMANI_FILE = os.path.join(BASE_DIR, 'utils/data-uthmani.json')
with io.open(UTHMANI_FILE, 'r', encoding="utf-8-sig") as file:
uthmani_q = json.load(file)
uthmani_q = uthmani_q['quran']
file.close()
if random_rule:
random_surah = random.choice(tajweed_rules['surahs'])
surah_num = random_surah['num']
random_ayah = random.choice(random_surah['ayahs'])
ayah_num = random_ayah['num']
rule_dict = random.choice(random_ayah['rules'])
else:
rule_dict = tajweed_rules['surah'][surah_num - 1]['ayahs'][ayah_num - 1]
rule = rule_dict['rule']
rule_start = rule_dict['start']
rule_end = rule_dict['end']
# 1-indexed
ayah_text = uthmani_q['surahs'][surah_num - 1]['ayahs'][ayah_num - 1]['text']
ayah_text_list = ayah_text.split(" ")
# Get the index of the word we're looking for
position = 0
curr_word_ind = 0
for i, word in enumerate(ayah_text_list):
position += len(word)
if position >= rule_start:
curr_word_ind = i
break
if random_rule:
return surah_num, ayah_num, ayah_text, rule, curr_word_ind
return ayah_text, rule, curr_word_ind
def is_evaluator(user):
if user:
return user.groups.filter(name='evaluator').exists()
return False
# TODO: Deprecated
def get_low_evaluation_count():
"""Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: AnnotatedRecording
"""
recording_evals = AnnotatedRecording.objects.annotate(total=Count('evaluation'))
recording_evals_dict = {entry : entry.total for entry in recording_evals}
min_evals = min(recording_evals_dict.values())
min_evals_recordings = [k for k, v in recording_evals_dict.items() if v==min_evals]
return random.choice(min_evals_recordings)
def get_no_evaluation_recording(surah_num=None, ayah_num=None):
"""Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations
along with its words, url and recording ID.
:rtype: dict
"""
# Get recordings with a file.
if surah_num is not None and ayah_num is not None:
recording_evals = AnnotatedRecording.objects.filter(
surah_num=surah_num, ayah_num=ayah_num, file__gt='',
file__isnull=False).annotate(total=Count('evaluation'))
# If no recordings, move on to random one
try:
random_recording = random.choice(recording_evals)
except IndexError:
surah_num = None
ayah_num = None
if surah_num is None and ayah_num is None:
recording_evals = AnnotatedRecording.objects.filter(
file__gt='', file__isnull=False).annotate(total=Count('evaluation'))
try:
random_recording = random.choice(recording_evals)
except IndexError:
error_str = "No more unevaluated recordings!"
print(error_str)
return {'detail': error_str}
surah_num = random_recording.surah_num
ayah_num = random_recording.ayah_num
audio_url = random_recording.file.url
recording_id = random_recording.id
# Prep response
ayah = Ayah.objects.get(chapter_id__number=surah_num, verse_number=ayah_num)
ayah = model_to_dict(ayah)
# Get all the words
words = AyahWord.objects.filter(ayah__verse_number=ayah_num,
ayah__chapter_id__number=surah_num)
translations = Translation.objects.filter(ayah__verse_number=ayah_num,
ayah__chapter_id__number=surah_num)
# Convert to list of dicts, note that order is usually flipped.
ayah['words'] = list(reversed(words.values()))
ayah['translations'] = list(translations.values())
ayah["audio_url"] = audio_url
ayah["recording_id"] = recording_id
return ayah
# ============================= #
# API Views #
# ============================= #
class EvaluationFilter(filters.FilterSet):
"""Custom filter based on surah, ayah, evaluation type or recording."""
EVAL_CHOICES = (
('correct', 'Correct'),
('incorrect', 'Incorrect')
)
surah = filters.NumberFilter(field_name='associated_recording__surah_num')
ayah = filters.NumberFilter(field_name='associated_recording__ayah_num')
evaluation = filters.ChoiceFilter(choices=EVAL_CHOICES)
associated_recording = filters.ModelChoiceFilter(
queryset=AnnotatedRecording.objects.all())
class Meta:
model = Evaluation
fields = ['surah', 'ayah', 'evaluation', 'associated_recording']
class EvaluationViewSet(viewsets.ModelViewSet):
"""API to handle query parameters
Example: v1/evaluations/?surah=114&ayah=1&evaluation=correct
"""
serializer_class = EvaluationSerializer
queryset = Evaluation.objects.all()
filter_backends = (filters.DjangoFilterBackend,)
filter_class = EvaluationFilter
@action(detail=False, methods=['get'])
def low_count(self, request):
"""Finds a recording with the lowest number of evaluations
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: Response
"""
ayah = get_no_evaluation_recording()
return Response(ayah)
@low_count.mapping.post
def low_count_specific(self, request):
"""Get a recording of a specific surah and ayah with no evaluation.
:returns: A random AnnotatedRecording object which has the minimum evaluations
:rtype: Response
"""
surah_num = int(request.data['surah'])
ayah_num = int(request.data['ayah'])
ayah = get_no_evaluation_recording(surah_num=surah_num, ayah_num=ayah_num)
return Response(ayah)
class TajweedEvaluationList(APIView):
"""API Endpoint that allows tajweed evaluations to be posted or
retrieved """
def get(self, request, format=None):
evaluations = TajweedEvaluation.objects.all().order_by('-timestamp')
tajweed_serializer = TajweedEvaluationSerializer(evaluations, many=True)
return Response(tajweed_serializer.data)
def post(self, request, *args, **kwargs):
print("EVALUATOR: Received a tajweed evaluation:\n{}".format(request.data))
new_evaluation = TajweedEvaluationSerializer(data=request.data)
if new_evaluation.is_valid(raise_exception=True):
|
return Response(new_evaluation.errors, status=status.HTTP_400_BAD_REQUEST)
# ===================================== #
# Static Page Views #
# ===================================== #
@api_view(('GET',))
@renderer_classes((JSONRenderer,))
def get_evaluations_count(request, format=None):
evaluations = Evaluation.objects.all().count()
res = {
"count": evaluations
}
return Response(res)
@login_required
@user_passes_test(is_evaluator, login_url='/')
def tajweed_evaluator(request):
"""Returns a random ayah for an expert to evaluate for any mistakes.
:param request: rest API request object.
:type request: Request
:return: Rendered view of evaluator page with form, ayah info, and URL.
:rtype: HttpResponse
"""
# User tracking - Ensure there is always a session key.
if not request.session.session_key:
request.session.create()
session_key = request.session.session_key
# Get a random tajweed rule and make sure we have something to display
recordings = None
while not recordings:
surah_num, ayah_num, ayah_text, rule, word_index = get_tajweed_rule(random_rule=True)
recordings = AnnotatedRecording.objects.filter(file__gt='', file__isnull=False,
surah_num=surah_num,
ayah_num=ayah_num)
random_recording = random.choice(recordings)
# Make sure we avoid negative count
prev_word_ind = word_index - 1 if word_index > 0 else None
# Make sure we avoid overflow
ayah_text_list = ayah_text.split(" ")
next_word_ind = word_index + 1 if word_index + 1 < len(ayah_text_list) else None
# Fields
audio_url = random_recording.file.url
recording_id = random_recording.id
# Get text rep of rule
category_dict = dict(TajweedEvaluation.CATEGORY_CHOICES)
rule_text = category_dict[rule]
return render(request, 'evaluation/tajweed_evaluator.html',
{'session_key': session_key,
'rule_text': rule_text,
'rule_id': rule,
'surah_num': surah_num,
'ayah_num': ayah_num,
'ayah_text': ayah_text_list,
'word_index': word_index,
'prev_word_index': prev_word_ind,
'next_word_index': next_word_ind,
'audio_url': audio_url,
'recording_id': recording_id})
| new_evaluation.save()
return Response(new_evaluation.data, status=status.HTTP_201_CREATED) | conditional_block |
memory.rs | //! Interface with Screeps' `Memory` global variable
//!
//! Screeps' memory lives in the javascript `Memory` global variable and is
//! encoded as a javascript object. This object's reference is tracked within
//! rust as a `MemoryReference`. The [`root`] function gives access to a
//! reference to the `Memory` global object.
//!
//! # Typing
//! Contrary to accessing the memory in javascript, rust's strong type system,
//! requires that read values be assigned a type. To facilitate this, the
//! `MemoryReference` provides methods to read a part of the memory as a
//! certain type. If the value read cannot be transformed to the requested
//! type, the method return `None`.
//!
//! # Accessing the memory
//! Memory can be accessed in two ways:
//! - via _keys_
//! - via _paths_ (methods prefixed with `path_`)
//!
//! In both cases, if the value requested is `undefined`, `null`, or even just
//! of the wrong type, the method returns `None`.
//!
//! ## Accessing memory with a _key_
//! Since a `MemoryReference` represents a javascript object, its children can
//! be accessed using the `object["key"]` javascript syntax using type methods.
//! ```no_run
//! let mem = screeps::memory::root();
//! let cpu_used_last_tick = mem.i32("cpu_used_last_tick").unwrap();
//! ```
//!
//! ## Accessing memory with a _path_
//! A quality of life improvement upon the key access is through full path. In
//! javascript, it is possible to query a value with a full path:
//! ```javascript
//! var creep_time = Memory.creeps.John.time;
//! ```
//!
//! To emulate this behavior in rust, you can write such a path to a string and
//! it will fetch the javascript object using
//! [lodash](https://lodash.com/docs/4.17.10#get) and convert the result
//! depending on the method used. For example,
//! ```no_run
//! let mem = screeps::memory::root();
//! let creep_time = mem.path_i32("creeps.John.time").unwrap();
//! ```
//!
//! # Other methods that provide `MemoryReference`s
//! In addition to accessing the memory from the root, it is possible to
//! access the memory via creeps, spawns, rooms and flags. Accessing the memory
//! from those objects will also result in a `MemoryReference` which instead
//! points at the root of this object's memory.
//!
//! [`root`]: crate::memory::root
use std::fmt;
use stdweb::{JsSerialize, Reference, Value};
use crate::{
traits::{TryFrom, TryInto},
ConversionError,
};
#[derive(Clone, Debug)]
pub struct UnexpectedTypeError;
impl fmt::Display for UnexpectedTypeError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// TODO: include &'static str references to the type names in this error...
write!(f, "expected one memory type, found another")
}
}
// TODO: do we even need this over just a raw 'Reference'?
/// A [`Reference`] to a screeps memory object
///
/// [`Reference`]: stdweb::Reference
pub struct MemoryReference(Reference);
impl AsRef<Reference> for MemoryReference {
fn as_ref(&self) -> &Reference {
&self.0
}
}
impl Default for MemoryReference {
fn default() -> Self {
Self::new()
}
}
impl MemoryReference {
pub fn new() -> Self {
js_unwrap!({})
}
/// Creates a MemoryReference from some JavaScript reference.
///
/// Warning: `MemoryReference` is only designed to work with "plain"
/// JavaScript objects, and passing an array or a non-plain object
/// into this method probably won't be what you want. `MemoryReference`
/// also gives access to all properties, so if this is indeed a plain
/// object, all of its values should also be plain objects.
///
/// Passing a non-plain-object reference into this function won't
/// invoke undefined behavior in and of itself, but other functions
/// can rely on `MemoryReference` being "plain".
pub unsafe fn from_reference_unchecked(reference: Reference) -> Self {
MemoryReference(reference)
}
pub fn bool(&self, key: &str) -> bool {
js_unwrap!(Boolean(@{self.as_ref()}[@{key}]))
}
pub fn path_bool(&self, path: &str) -> bool {
js_unwrap!(Boolean(_.get(@{self.as_ref()}, @{path})))
}
pub fn f64(&self, key: &str) -> Result<Option<f64>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_f64(&self, path: &str) -> Result<Option<f64>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn i32(&self, key: &str) -> Result<Option<i32>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_i32(&self, path: &str) -> Result<Option<i32>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn string(&self, key: &str) -> Result<Option<String>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_string(&self, path: &str) -> Result<Option<String>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn dict(&self, key: &str) -> Result<Option<MemoryReference>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_dict(&self, path: &str) -> Result<Option<MemoryReference>, ConversionError> {
(js! { | return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
/// Get a dictionary value or create it if it does not exist.
///
/// If the value exists but is a different type, this will return `None`.
pub fn dict_or_create(&self, key: &str) -> Result<MemoryReference, UnexpectedTypeError> {
(js! {
var map = (@{self.as_ref()});
var key = (@{key});
var value = map[key];
if (value === null || value === undefined) {
map[key] = value = {};
}
if ((typeof value) == "object" && !_.isArray(value)) {
return value;
} else {
return null;
}
})
.try_into()
.map_err(|_| UnexpectedTypeError)
.map(MemoryReference)
}
pub fn keys(&self) -> Vec<String> {
js_unwrap!(Object.keys(@{self.as_ref()}))
}
pub fn del(&self, key: &str) {
js! { @(no_return)
(@{self.as_ref()})[@{key}] = undefined;
}
}
pub fn path_del(&self, path: &str) {
js! {
_.set(@{self.as_ref()}, @{path}, undefined);
}
}
/// Gets a custom type. Will return `None` if `null` or `undefined`, and
/// `Err` if incorrect type.
///
/// # Example
///
/// ```no_run
/// use log::info;
/// use screeps::{prelude::*, Position};
///
/// let creep = screeps::game::creeps::get("mycreepname").unwrap();
/// let mem = creep.memory();
/// let pos = mem.get::<Position>("saved_pos").unwrap();
/// let pos = match pos {
/// Some(pos) => {
/// info!("found position: {}", pos);
/// pos
/// }
/// None => {
/// info!("no position. saving new one!");
/// let pos = creep.pos();
/// mem.set("saved_pos", pos);
/// pos
/// }
/// };
/// info!("final position: {}", pos);
/// creep.move_to(&pos);
/// ```
pub fn get<T>(&self, key: &str) -> Result<Option<T>, <T as TryFrom<Value>>::Error>
where
T: TryFrom<Value>,
{
let val = js! {
return (@{self.as_ref()})[@{key}];
};
if val == Value::Null || val == Value::Undefined {
Ok(None)
} else {
Some(val.try_into()).transpose()
}
}
/// Gets a custom type at a memory path. Will return `None` if `null` or
/// `undefined`, and `Err` if incorrect type.
///
/// Uses lodash in JavaScript to evaluate the path. See https://lodash.com/docs/#get.
pub fn get_path<T>(&self, path: &str) -> Result<Option<T>, <T as TryFrom<Value>>::Error>
where
T: TryFrom<Value>,
{
let val = js! {
return _.get(@{self.as_ref()}, @{path});
};
if val == Value::Null || val == Value::Undefined {
Ok(None)
} else {
Some(val.try_into()).transpose()
}
}
pub fn set<T>(&self, key: &str, value: T)
where
T: JsSerialize,
{
js! { @(no_return)
(@{self.as_ref()})[@{key}] = @{value};
}
}
pub fn path_set<T>(&self, path: &str, value: T)
where
T: JsSerialize,
{
js! { @(no_return)
_.set(@{self.as_ref()}, @{path}, @{value});
}
}
pub fn arr<T>(&self, key: &str) -> Result<Option<Vec<T>>, ConversionError>
where
T: TryFrom<Value, Error = ConversionError>,
{
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_arr<T>(&self, path: &str) -> Result<Option<Vec<T>>, ConversionError>
where
T: TryFrom<Value, Error = ConversionError>,
{
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
}
impl TryFrom<Value> for MemoryReference {
type Error = ConversionError;
fn try_from(v: Value) -> Result<Self, Self::Error> {
let r: Reference = v.try_into()?; // fail early.
Ok(MemoryReference(
(js! {
var v = (@{r});
if (_.isArray(v)) {
return null;
} else {
return v;
}
})
.try_into()?,
))
}
}
/// Get a reference to the `Memory` global object
pub fn root() -> MemoryReference {
js_unwrap!(Memory)
} | random_line_split | |
memory.rs | //! Interface with Screeps' `Memory` global variable
//!
//! Screeps' memory lives in the javascript `Memory` global variable and is
//! encoded as a javascript object. This object's reference is tracked within
//! rust as a `MemoryReference`. The [`root`] function gives access to a
//! reference to the `Memory` global object.
//!
//! # Typing
//! Contrary to accessing the memory in javascript, rust's strong type system,
//! requires that read values be assigned a type. To facilitate this, the
//! `MemoryReference` provides methods to read a part of the memory as a
//! certain type. If the value read cannot be transformed to the requested
//! type, the method return `None`.
//!
//! # Accessing the memory
//! Memory can be accessed in two ways:
//! - via _keys_
//! - via _paths_ (methods prefixed with `path_`)
//!
//! In both cases, if the value requested is `undefined`, `null`, or even just
//! of the wrong type, the method returns `None`.
//!
//! ## Accessing memory with a _key_
//! Since a `MemoryReference` represents a javascript object, its children can
//! be accessed using the `object["key"]` javascript syntax using type methods.
//! ```no_run
//! let mem = screeps::memory::root();
//! let cpu_used_last_tick = mem.i32("cpu_used_last_tick").unwrap();
//! ```
//!
//! ## Accessing memory with a _path_
//! A quality of life improvement upon the key access is through full path. In
//! javascript, it is possible to query a value with a full path:
//! ```javascript
//! var creep_time = Memory.creeps.John.time;
//! ```
//!
//! To emulate this behavior in rust, you can write such a path to a string and
//! it will fetch the javascript object using
//! [lodash](https://lodash.com/docs/4.17.10#get) and convert the result
//! depending on the method used. For example,
//! ```no_run
//! let mem = screeps::memory::root();
//! let creep_time = mem.path_i32("creeps.John.time").unwrap();
//! ```
//!
//! # Other methods that provide `MemoryReference`s
//! In addition to accessing the memory from the root, it is possible to
//! access the memory via creeps, spawns, rooms and flags. Accessing the memory
//! from those objects will also result in a `MemoryReference` which instead
//! points at the root of this object's memory.
//!
//! [`root`]: crate::memory::root
use std::fmt;
use stdweb::{JsSerialize, Reference, Value};
use crate::{
traits::{TryFrom, TryInto},
ConversionError,
};
#[derive(Clone, Debug)]
pub struct UnexpectedTypeError;
impl fmt::Display for UnexpectedTypeError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// TODO: include &'static str references to the type names in this error...
write!(f, "expected one memory type, found another")
}
}
// TODO: do we even need this over just a raw 'Reference'?
/// A [`Reference`] to a screeps memory object
///
/// [`Reference`]: stdweb::Reference
pub struct MemoryReference(Reference);
impl AsRef<Reference> for MemoryReference {
fn as_ref(&self) -> &Reference {
&self.0
}
}
impl Default for MemoryReference {
fn default() -> Self {
Self::new()
}
}
impl MemoryReference {
pub fn new() -> Self {
js_unwrap!({})
}
/// Creates a MemoryReference from some JavaScript reference.
///
/// Warning: `MemoryReference` is only designed to work with "plain"
/// JavaScript objects, and passing an array or a non-plain object
/// into this method probably won't be what you want. `MemoryReference`
/// also gives access to all properties, so if this is indeed a plain
/// object, all of its values should also be plain objects.
///
/// Passing a non-plain-object reference into this function won't
/// invoke undefined behavior in and of itself, but other functions
/// can rely on `MemoryReference` being "plain".
pub unsafe fn from_reference_unchecked(reference: Reference) -> Self {
MemoryReference(reference)
}
pub fn bool(&self, key: &str) -> bool {
js_unwrap!(Boolean(@{self.as_ref()}[@{key}]))
}
pub fn path_bool(&self, path: &str) -> bool {
js_unwrap!(Boolean(_.get(@{self.as_ref()}, @{path})))
}
pub fn f64(&self, key: &str) -> Result<Option<f64>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_f64(&self, path: &str) -> Result<Option<f64>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn i32(&self, key: &str) -> Result<Option<i32>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_i32(&self, path: &str) -> Result<Option<i32>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn string(&self, key: &str) -> Result<Option<String>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_string(&self, path: &str) -> Result<Option<String>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn dict(&self, key: &str) -> Result<Option<MemoryReference>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_dict(&self, path: &str) -> Result<Option<MemoryReference>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
/// Get a dictionary value or create it if it does not exist.
///
/// If the value exists but is a different type, this will return `None`.
pub fn dict_or_create(&self, key: &str) -> Result<MemoryReference, UnexpectedTypeError> {
(js! {
var map = (@{self.as_ref()});
var key = (@{key});
var value = map[key];
if (value === null || value === undefined) {
map[key] = value = {};
}
if ((typeof value) == "object" && !_.isArray(value)) {
return value;
} else {
return null;
}
})
.try_into()
.map_err(|_| UnexpectedTypeError)
.map(MemoryReference)
}
pub fn keys(&self) -> Vec<String> {
js_unwrap!(Object.keys(@{self.as_ref()}))
}
pub fn del(&self, key: &str) {
js! { @(no_return)
(@{self.as_ref()})[@{key}] = undefined;
}
}
pub fn path_del(&self, path: &str) {
js! {
_.set(@{self.as_ref()}, @{path}, undefined);
}
}
/// Gets a custom type. Will return `None` if `null` or `undefined`, and
/// `Err` if incorrect type.
///
/// # Example
///
/// ```no_run
/// use log::info;
/// use screeps::{prelude::*, Position};
///
/// let creep = screeps::game::creeps::get("mycreepname").unwrap();
/// let mem = creep.memory();
/// let pos = mem.get::<Position>("saved_pos").unwrap();
/// let pos = match pos {
/// Some(pos) => {
/// info!("found position: {}", pos);
/// pos
/// }
/// None => {
/// info!("no position. saving new one!");
/// let pos = creep.pos();
/// mem.set("saved_pos", pos);
/// pos
/// }
/// };
/// info!("final position: {}", pos);
/// creep.move_to(&pos);
/// ```
pub fn get<T>(&self, key: &str) -> Result<Option<T>, <T as TryFrom<Value>>::Error>
where
T: TryFrom<Value>,
{
let val = js! {
return (@{self.as_ref()})[@{key}];
};
if val == Value::Null || val == Value::Undefined {
Ok(None)
} else {
Some(val.try_into()).transpose()
}
}
/// Gets a custom type at a memory path. Will return `None` if `null` or
/// `undefined`, and `Err` if incorrect type.
///
/// Uses lodash in JavaScript to evaluate the path. See https://lodash.com/docs/#get.
pub fn get_path<T>(&self, path: &str) -> Result<Option<T>, <T as TryFrom<Value>>::Error>
where
T: TryFrom<Value>,
{
let val = js! {
return _.get(@{self.as_ref()}, @{path});
};
if val == Value::Null || val == Value::Undefined | else {
Some(val.try_into()).transpose()
}
}
pub fn set<T>(&self, key: &str, value: T)
where
T: JsSerialize,
{
js! { @(no_return)
(@{self.as_ref()})[@{key}] = @{value};
}
}
pub fn path_set<T>(&self, path: &str, value: T)
where
T: JsSerialize,
{
js! { @(no_return)
_.set(@{self.as_ref()}, @{path}, @{value});
}
}
pub fn arr<T>(&self, key: &str) -> Result<Option<Vec<T>>, ConversionError>
where
T: TryFrom<Value, Error = ConversionError>,
{
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_arr<T>(&self, path: &str) -> Result<Option<Vec<T>>, ConversionError>
where
T: TryFrom<Value, Error = ConversionError>,
{
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
}
impl TryFrom<Value> for MemoryReference {
type Error = ConversionError;
fn try_from(v: Value) -> Result<Self, Self::Error> {
let r: Reference = v.try_into()?; // fail early.
Ok(MemoryReference(
(js! {
var v = (@{r});
if (_.isArray(v)) {
return null;
} else {
return v;
}
})
.try_into()?,
))
}
}
/// Get a reference to the `Memory` global object
pub fn root() -> MemoryReference {
js_unwrap!(Memory)
}
| {
Ok(None)
} | conditional_block |
memory.rs | //! Interface with Screeps' `Memory` global variable
//!
//! Screeps' memory lives in the javascript `Memory` global variable and is
//! encoded as a javascript object. This object's reference is tracked within
//! rust as a `MemoryReference`. The [`root`] function gives access to a
//! reference to the `Memory` global object.
//!
//! # Typing
//! Contrary to accessing the memory in javascript, rust's strong type system,
//! requires that read values be assigned a type. To facilitate this, the
//! `MemoryReference` provides methods to read a part of the memory as a
//! certain type. If the value read cannot be transformed to the requested
//! type, the method return `None`.
//!
//! # Accessing the memory
//! Memory can be accessed in two ways:
//! - via _keys_
//! - via _paths_ (methods prefixed with `path_`)
//!
//! In both cases, if the value requested is `undefined`, `null`, or even just
//! of the wrong type, the method returns `None`.
//!
//! ## Accessing memory with a _key_
//! Since a `MemoryReference` represents a javascript object, its children can
//! be accessed using the `object["key"]` javascript syntax using type methods.
//! ```no_run
//! let mem = screeps::memory::root();
//! let cpu_used_last_tick = mem.i32("cpu_used_last_tick").unwrap();
//! ```
//!
//! ## Accessing memory with a _path_
//! A quality of life improvement upon the key access is through full path. In
//! javascript, it is possible to query a value with a full path:
//! ```javascript
//! var creep_time = Memory.creeps.John.time;
//! ```
//!
//! To emulate this behavior in rust, you can write such a path to a string and
//! it will fetch the javascript object using
//! [lodash](https://lodash.com/docs/4.17.10#get) and convert the result
//! depending on the method used. For example,
//! ```no_run
//! let mem = screeps::memory::root();
//! let creep_time = mem.path_i32("creeps.John.time").unwrap();
//! ```
//!
//! # Other methods that provide `MemoryReference`s
//! In addition to accessing the memory from the root, it is possible to
//! access the memory via creeps, spawns, rooms and flags. Accessing the memory
//! from those objects will also result in a `MemoryReference` which instead
//! points at the root of this object's memory.
//!
//! [`root`]: crate::memory::root
use std::fmt;
use stdweb::{JsSerialize, Reference, Value};
use crate::{
traits::{TryFrom, TryInto},
ConversionError,
};
#[derive(Clone, Debug)]
pub struct UnexpectedTypeError;
impl fmt::Display for UnexpectedTypeError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// TODO: include &'static str references to the type names in this error...
write!(f, "expected one memory type, found another")
}
}
// TODO: do we even need this over just a raw 'Reference'?
/// A [`Reference`] to a screeps memory object
///
/// [`Reference`]: stdweb::Reference
pub struct MemoryReference(Reference);
impl AsRef<Reference> for MemoryReference {
fn as_ref(&self) -> &Reference {
&self.0
}
}
impl Default for MemoryReference {
fn default() -> Self {
Self::new()
}
}
impl MemoryReference {
pub fn new() -> Self {
js_unwrap!({})
}
/// Creates a MemoryReference from some JavaScript reference.
///
/// Warning: `MemoryReference` is only designed to work with "plain"
/// JavaScript objects, and passing an array or a non-plain object
/// into this method probably won't be what you want. `MemoryReference`
/// also gives access to all properties, so if this is indeed a plain
/// object, all of its values should also be plain objects.
///
/// Passing a non-plain-object reference into this function won't
/// invoke undefined behavior in and of itself, but other functions
/// can rely on `MemoryReference` being "plain".
pub unsafe fn from_reference_unchecked(reference: Reference) -> Self {
MemoryReference(reference)
}
pub fn bool(&self, key: &str) -> bool {
js_unwrap!(Boolean(@{self.as_ref()}[@{key}]))
}
pub fn path_bool(&self, path: &str) -> bool {
js_unwrap!(Boolean(_.get(@{self.as_ref()}, @{path})))
}
pub fn | (&self, key: &str) -> Result<Option<f64>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_f64(&self, path: &str) -> Result<Option<f64>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn i32(&self, key: &str) -> Result<Option<i32>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_i32(&self, path: &str) -> Result<Option<i32>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn string(&self, key: &str) -> Result<Option<String>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_string(&self, path: &str) -> Result<Option<String>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn dict(&self, key: &str) -> Result<Option<MemoryReference>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_dict(&self, path: &str) -> Result<Option<MemoryReference>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
/// Get a dictionary value or create it if it does not exist.
///
/// If the value exists but is a different type, this will return `None`.
pub fn dict_or_create(&self, key: &str) -> Result<MemoryReference, UnexpectedTypeError> {
(js! {
var map = (@{self.as_ref()});
var key = (@{key});
var value = map[key];
if (value === null || value === undefined) {
map[key] = value = {};
}
if ((typeof value) == "object" && !_.isArray(value)) {
return value;
} else {
return null;
}
})
.try_into()
.map_err(|_| UnexpectedTypeError)
.map(MemoryReference)
}
pub fn keys(&self) -> Vec<String> {
js_unwrap!(Object.keys(@{self.as_ref()}))
}
pub fn del(&self, key: &str) {
js! { @(no_return)
(@{self.as_ref()})[@{key}] = undefined;
}
}
pub fn path_del(&self, path: &str) {
js! {
_.set(@{self.as_ref()}, @{path}, undefined);
}
}
/// Gets a custom type. Will return `None` if `null` or `undefined`, and
/// `Err` if incorrect type.
///
/// # Example
///
/// ```no_run
/// use log::info;
/// use screeps::{prelude::*, Position};
///
/// let creep = screeps::game::creeps::get("mycreepname").unwrap();
/// let mem = creep.memory();
/// let pos = mem.get::<Position>("saved_pos").unwrap();
/// let pos = match pos {
/// Some(pos) => {
/// info!("found position: {}", pos);
/// pos
/// }
/// None => {
/// info!("no position. saving new one!");
/// let pos = creep.pos();
/// mem.set("saved_pos", pos);
/// pos
/// }
/// };
/// info!("final position: {}", pos);
/// creep.move_to(&pos);
/// ```
pub fn get<T>(&self, key: &str) -> Result<Option<T>, <T as TryFrom<Value>>::Error>
where
T: TryFrom<Value>,
{
let val = js! {
return (@{self.as_ref()})[@{key}];
};
if val == Value::Null || val == Value::Undefined {
Ok(None)
} else {
Some(val.try_into()).transpose()
}
}
/// Gets a custom type at a memory path. Will return `None` if `null` or
/// `undefined`, and `Err` if incorrect type.
///
/// Uses lodash in JavaScript to evaluate the path. See https://lodash.com/docs/#get.
pub fn get_path<T>(&self, path: &str) -> Result<Option<T>, <T as TryFrom<Value>>::Error>
where
T: TryFrom<Value>,
{
let val = js! {
return _.get(@{self.as_ref()}, @{path});
};
if val == Value::Null || val == Value::Undefined {
Ok(None)
} else {
Some(val.try_into()).transpose()
}
}
pub fn set<T>(&self, key: &str, value: T)
where
T: JsSerialize,
{
js! { @(no_return)
(@{self.as_ref()})[@{key}] = @{value};
}
}
pub fn path_set<T>(&self, path: &str, value: T)
where
T: JsSerialize,
{
js! { @(no_return)
_.set(@{self.as_ref()}, @{path}, @{value});
}
}
pub fn arr<T>(&self, key: &str) -> Result<Option<Vec<T>>, ConversionError>
where
T: TryFrom<Value, Error = ConversionError>,
{
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_arr<T>(&self, path: &str) -> Result<Option<Vec<T>>, ConversionError>
where
T: TryFrom<Value, Error = ConversionError>,
{
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
}
impl TryFrom<Value> for MemoryReference {
type Error = ConversionError;
fn try_from(v: Value) -> Result<Self, Self::Error> {
let r: Reference = v.try_into()?; // fail early.
Ok(MemoryReference(
(js! {
var v = (@{r});
if (_.isArray(v)) {
return null;
} else {
return v;
}
})
.try_into()?,
))
}
}
/// Get a reference to the `Memory` global object
pub fn root() -> MemoryReference {
js_unwrap!(Memory)
}
| f64 | identifier_name |
memory.rs | //! Interface with Screeps' `Memory` global variable
//!
//! Screeps' memory lives in the javascript `Memory` global variable and is
//! encoded as a javascript object. This object's reference is tracked within
//! rust as a `MemoryReference`. The [`root`] function gives access to a
//! reference to the `Memory` global object.
//!
//! # Typing
//! Contrary to accessing the memory in javascript, rust's strong type system,
//! requires that read values be assigned a type. To facilitate this, the
//! `MemoryReference` provides methods to read a part of the memory as a
//! certain type. If the value read cannot be transformed to the requested
//! type, the method return `None`.
//!
//! # Accessing the memory
//! Memory can be accessed in two ways:
//! - via _keys_
//! - via _paths_ (methods prefixed with `path_`)
//!
//! In both cases, if the value requested is `undefined`, `null`, or even just
//! of the wrong type, the method returns `None`.
//!
//! ## Accessing memory with a _key_
//! Since a `MemoryReference` represents a javascript object, its children can
//! be accessed using the `object["key"]` javascript syntax using type methods.
//! ```no_run
//! let mem = screeps::memory::root();
//! let cpu_used_last_tick = mem.i32("cpu_used_last_tick").unwrap();
//! ```
//!
//! ## Accessing memory with a _path_
//! A quality of life improvement upon the key access is through full path. In
//! javascript, it is possible to query a value with a full path:
//! ```javascript
//! var creep_time = Memory.creeps.John.time;
//! ```
//!
//! To emulate this behavior in rust, you can write such a path to a string and
//! it will fetch the javascript object using
//! [lodash](https://lodash.com/docs/4.17.10#get) and convert the result
//! depending on the method used. For example,
//! ```no_run
//! let mem = screeps::memory::root();
//! let creep_time = mem.path_i32("creeps.John.time").unwrap();
//! ```
//!
//! # Other methods that provide `MemoryReference`s
//! In addition to accessing the memory from the root, it is possible to
//! access the memory via creeps, spawns, rooms and flags. Accessing the memory
//! from those objects will also result in a `MemoryReference` which instead
//! points at the root of this object's memory.
//!
//! [`root`]: crate::memory::root
use std::fmt;
use stdweb::{JsSerialize, Reference, Value};
use crate::{
traits::{TryFrom, TryInto},
ConversionError,
};
#[derive(Clone, Debug)]
pub struct UnexpectedTypeError;
impl fmt::Display for UnexpectedTypeError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// TODO: include &'static str references to the type names in this error...
write!(f, "expected one memory type, found another")
}
}
// TODO: do we even need this over just a raw 'Reference'?
/// A [`Reference`] to a screeps memory object
///
/// [`Reference`]: stdweb::Reference
pub struct MemoryReference(Reference);
impl AsRef<Reference> for MemoryReference {
fn as_ref(&self) -> &Reference {
&self.0
}
}
impl Default for MemoryReference {
fn default() -> Self {
Self::new()
}
}
impl MemoryReference {
pub fn new() -> Self {
js_unwrap!({})
}
/// Creates a MemoryReference from some JavaScript reference.
///
/// Warning: `MemoryReference` is only designed to work with "plain"
/// JavaScript objects, and passing an array or a non-plain object
/// into this method probably won't be what you want. `MemoryReference`
/// also gives access to all properties, so if this is indeed a plain
/// object, all of its values should also be plain objects.
///
/// Passing a non-plain-object reference into this function won't
/// invoke undefined behavior in and of itself, but other functions
/// can rely on `MemoryReference` being "plain".
pub unsafe fn from_reference_unchecked(reference: Reference) -> Self {
MemoryReference(reference)
}
pub fn bool(&self, key: &str) -> bool {
js_unwrap!(Boolean(@{self.as_ref()}[@{key}]))
}
pub fn path_bool(&self, path: &str) -> bool {
js_unwrap!(Boolean(_.get(@{self.as_ref()}, @{path})))
}
pub fn f64(&self, key: &str) -> Result<Option<f64>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_f64(&self, path: &str) -> Result<Option<f64>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn i32(&self, key: &str) -> Result<Option<i32>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_i32(&self, path: &str) -> Result<Option<i32>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn string(&self, key: &str) -> Result<Option<String>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_string(&self, path: &str) -> Result<Option<String>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
pub fn dict(&self, key: &str) -> Result<Option<MemoryReference>, ConversionError> {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
}
pub fn path_dict(&self, path: &str) -> Result<Option<MemoryReference>, ConversionError> {
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
/// Get a dictionary value or create it if it does not exist.
///
/// If the value exists but is a different type, this will return `None`.
pub fn dict_or_create(&self, key: &str) -> Result<MemoryReference, UnexpectedTypeError> {
(js! {
var map = (@{self.as_ref()});
var key = (@{key});
var value = map[key];
if (value === null || value === undefined) {
map[key] = value = {};
}
if ((typeof value) == "object" && !_.isArray(value)) {
return value;
} else {
return null;
}
})
.try_into()
.map_err(|_| UnexpectedTypeError)
.map(MemoryReference)
}
pub fn keys(&self) -> Vec<String> {
js_unwrap!(Object.keys(@{self.as_ref()}))
}
pub fn del(&self, key: &str) {
js! { @(no_return)
(@{self.as_ref()})[@{key}] = undefined;
}
}
pub fn path_del(&self, path: &str) {
js! {
_.set(@{self.as_ref()}, @{path}, undefined);
}
}
/// Gets a custom type. Will return `None` if `null` or `undefined`, and
/// `Err` if incorrect type.
///
/// # Example
///
/// ```no_run
/// use log::info;
/// use screeps::{prelude::*, Position};
///
/// let creep = screeps::game::creeps::get("mycreepname").unwrap();
/// let mem = creep.memory();
/// let pos = mem.get::<Position>("saved_pos").unwrap();
/// let pos = match pos {
/// Some(pos) => {
/// info!("found position: {}", pos);
/// pos
/// }
/// None => {
/// info!("no position. saving new one!");
/// let pos = creep.pos();
/// mem.set("saved_pos", pos);
/// pos
/// }
/// };
/// info!("final position: {}", pos);
/// creep.move_to(&pos);
/// ```
pub fn get<T>(&self, key: &str) -> Result<Option<T>, <T as TryFrom<Value>>::Error>
where
T: TryFrom<Value>,
{
let val = js! {
return (@{self.as_ref()})[@{key}];
};
if val == Value::Null || val == Value::Undefined {
Ok(None)
} else {
Some(val.try_into()).transpose()
}
}
/// Gets a custom type at a memory path. Will return `None` if `null` or
/// `undefined`, and `Err` if incorrect type.
///
/// Uses lodash in JavaScript to evaluate the path. See https://lodash.com/docs/#get.
pub fn get_path<T>(&self, path: &str) -> Result<Option<T>, <T as TryFrom<Value>>::Error>
where
T: TryFrom<Value>,
{
let val = js! {
return _.get(@{self.as_ref()}, @{path});
};
if val == Value::Null || val == Value::Undefined {
Ok(None)
} else {
Some(val.try_into()).transpose()
}
}
pub fn set<T>(&self, key: &str, value: T)
where
T: JsSerialize,
{
js! { @(no_return)
(@{self.as_ref()})[@{key}] = @{value};
}
}
pub fn path_set<T>(&self, path: &str, value: T)
where
T: JsSerialize,
{
js! { @(no_return)
_.set(@{self.as_ref()}, @{path}, @{value});
}
}
pub fn arr<T>(&self, key: &str) -> Result<Option<Vec<T>>, ConversionError>
where
T: TryFrom<Value, Error = ConversionError>,
|
pub fn path_arr<T>(&self, path: &str) -> Result<Option<Vec<T>>, ConversionError>
where
T: TryFrom<Value, Error = ConversionError>,
{
(js! {
return _.get(@{self.as_ref()}, @{path});
})
.try_into()
}
}
impl TryFrom<Value> for MemoryReference {
type Error = ConversionError;
fn try_from(v: Value) -> Result<Self, Self::Error> {
let r: Reference = v.try_into()?; // fail early.
Ok(MemoryReference(
(js! {
var v = (@{r});
if (_.isArray(v)) {
return null;
} else {
return v;
}
})
.try_into()?,
))
}
}
/// Get a reference to the `Memory` global object
pub fn root() -> MemoryReference {
js_unwrap!(Memory)
}
| {
(js! {
return (@{self.as_ref()})[@{key}];
})
.try_into()
} | identifier_body |
crud.go | // Copyright 2020 The Okteto Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package deployments
import (
"context"
"encoding/json"
"fmt"
"strings"
"time"
"github.com/okteto/okteto/pkg/config"
"github.com/okteto/okteto/pkg/errors"
okLabels "github.com/okteto/okteto/pkg/k8s/labels"
"github.com/okteto/okteto/pkg/log"
"github.com/okteto/okteto/pkg/model"
appsv1 "k8s.io/api/apps/v1"
apiv1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)
//List returns the list of deployments
func List(namespace string, c kubernetes.Interface) ([]appsv1.Deployment, error) {
dList, err := c.AppsV1().Deployments(namespace).List(metav1.ListOptions{})
if err != nil {
return nil, err
}
return dList.Items, nil
}
//Get returns a deployment object given its name and namespace
func Get(dev *model.Dev, namespace string, c kubernetes.Interface) (*appsv1.Deployment, error) {
if namespace == "" {
return nil, fmt.Errorf("empty namespace")
}
var d *appsv1.Deployment
var err error
if len(dev.Labels) == 0 {
d, err = c.AppsV1().Deployments(namespace).Get(dev.Name, metav1.GetOptions{})
if err != nil {
log.Debugf("error while retrieving deployment %s/%s: %s", namespace, dev.Name, err)
return nil, err
}
} else {
deploys, err := c.AppsV1().Deployments(namespace).List(
metav1.ListOptions{
LabelSelector: dev.LabelsSelector(),
},
)
if err != nil {
return nil, err
}
if len(deploys.Items) == 0 {
return nil, fmt.Errorf("deployment for labels '%s' not found", dev.LabelsSelector())
}
if len(deploys.Items) > 1 {
return nil, fmt.Errorf("Found '%d' deployments for labels '%s' instead of 1", len(deploys.Items), dev.LabelsSelector())
}
d = &deploys.Items[0]
}
return d, nil
}
//GetRevisionAnnotatedDeploymentOrFailed returns a deployment object if it is healthy and annotated with its revision or an error
func GetRevisionAnnotatedDeploymentOrFailed(dev *model.Dev, c *kubernetes.Clientset, waitUntilDeployed bool) (*appsv1.Deployment, error) {
d, err := Get(dev, dev.Namespace, c)
if err != nil |
for _, c := range d.Status.Conditions {
if c.Type == appsv1.DeploymentReplicaFailure && c.Reason == "FailedCreate" && c.Status == apiv1.ConditionTrue {
if strings.Contains(c.Message, "exceeded quota") {
log.Infof("%s: %s", errors.ErrQuota, c.Message)
return nil, errors.ErrQuota
}
return nil, fmt.Errorf(c.Message)
}
}
if d.Generation != d.Status.ObservedGeneration {
return nil, nil
}
return d, nil
}
//GetTranslations fills all the deployments pointed by a development container
func GetTranslations(dev *model.Dev, d *appsv1.Deployment, c *kubernetes.Clientset) (map[string]*model.Translation, error) {
result := map[string]*model.Translation{}
if d != nil {
rule := dev.ToTranslationRule(dev)
result[d.Name] = &model.Translation{
Interactive: true,
Name: dev.Name,
Version: model.TranslationVersion,
Deployment: d,
Annotations: dev.Annotations,
Tolerations: dev.Tolerations,
Replicas: *d.Spec.Replicas,
Rules: []*model.TranslationRule{rule},
}
}
if err := loadServiceTranslations(dev, result, c); err != nil {
return nil, err
}
return result, nil
}
func loadServiceTranslations(dev *model.Dev, result map[string]*model.Translation, c kubernetes.Interface) error {
for _, s := range dev.Services {
d, err := Get(s, dev.Namespace, c)
if err != nil {
return err
}
rule := s.ToTranslationRule(dev)
if _, ok := result[d.Name]; ok {
result[d.Name].Rules = append(result[d.Name].Rules, rule)
continue
}
result[d.Name] = &model.Translation{
Name: dev.Name,
Interactive: false,
Version: model.TranslationVersion,
Deployment: d,
Annotations: dev.Annotations,
Tolerations: dev.Tolerations,
Replicas: *d.Spec.Replicas,
Rules: []*model.TranslationRule{rule},
}
}
return nil
}
//Deploy creates or updates a deployment
func Deploy(d *appsv1.Deployment, forceCreate bool, client *kubernetes.Clientset) error {
if forceCreate {
if err := create(d, client); err != nil {
return err
}
} else {
if err := update(d, client); err != nil {
return err
}
}
return nil
}
//UpdateOktetoRevision updates the okteto version annotation
func UpdateOktetoRevision(ctx context.Context, d *appsv1.Deployment, client *kubernetes.Clientset) error {
ticker := time.NewTicker(200 * time.Millisecond)
timeout := time.Now().Add(2 * config.GetTimeout()) // 60 seconds
for i := 0; ; i++ {
updated, err := client.AppsV1().Deployments(d.Namespace).Get(d.Name, metav1.GetOptions{})
if err != nil {
log.Debugf("error while retrieving deployment %s/%s: %s", d.Namespace, d.Name, err)
return err
}
revision := updated.Annotations[revisionAnnotation]
if revision != "" {
d.Annotations[okLabels.RevisionAnnotation] = revision
return update(d, client)
}
if time.Now().After(timeout) {
return fmt.Errorf("kubernetes is taking too long to update the '%s' annotation of the deployment '%s'. Please check for errors and try again", revisionAnnotation, d.Name)
}
select {
case <-ticker.C:
continue
case <-ctx.Done():
log.Debug("cancelling call to update okteto revision")
return ctx.Err()
}
}
}
//TranslateDevMode translates the deployment manifests to put them in dev mode
func TranslateDevMode(tr map[string]*model.Translation, ns *apiv1.Namespace, c *kubernetes.Clientset) error {
for _, t := range tr {
err := translate(t, ns, c)
if err != nil {
return err
}
}
return nil
}
//IsDevModeOn returns if a deployment is in devmode
func IsDevModeOn(d *appsv1.Deployment) bool {
labels := d.GetObjectMeta().GetLabels()
if labels == nil {
return false
}
_, ok := labels[okLabels.DevLabel]
return ok
}
//HasBeenChanged returns if a deployment has been updated since the development container was activated
func HasBeenChanged(d *appsv1.Deployment) bool {
oktetoRevision := d.Annotations[okLabels.RevisionAnnotation]
if oktetoRevision == "" {
return false
}
return oktetoRevision != d.Annotations[revisionAnnotation]
}
// UpdateDeployments update all deployments in the given translation list
func UpdateDeployments(trList map[string]*model.Translation, c *kubernetes.Clientset) error {
for _, tr := range trList {
if tr.Deployment == nil {
continue
}
if err := update(tr.Deployment, c); err != nil {
return err
}
}
return nil
}
//TranslateDevModeOff reverses the dev mode translation
func TranslateDevModeOff(d *appsv1.Deployment) (*appsv1.Deployment, error) {
trRulesJSON := getAnnotation(d.Spec.Template.GetObjectMeta(), okLabels.TranslationAnnotation)
if trRulesJSON == "" {
dManifest := getAnnotation(d.GetObjectMeta(), oktetoDeploymentAnnotation)
if dManifest == "" {
log.Infof("%s/%s is not a development container", d.Namespace, d.Name)
return d, nil
}
dOrig := &appsv1.Deployment{}
if err := json.Unmarshal([]byte(dManifest), dOrig); err != nil {
return nil, fmt.Errorf("malformed manifest: %s", err)
}
return dOrig, nil
}
trRules := &model.Translation{}
if err := json.Unmarshal([]byte(trRulesJSON), trRules); err != nil {
return nil, fmt.Errorf("malformed tr rules: %s", err)
}
d.Spec.Replicas = &trRules.Replicas
annotations := d.GetObjectMeta().GetAnnotations()
delete(annotations, oktetoVersionAnnotation)
if err := deleteUserAnnotations(annotations, trRules); err != nil {
return nil, err
}
d.GetObjectMeta().SetAnnotations(annotations)
annotations = d.Spec.Template.GetObjectMeta().GetAnnotations()
delete(annotations, okLabels.TranslationAnnotation)
delete(annotations, model.OktetoRestartAnnotation)
d.Spec.Template.GetObjectMeta().SetAnnotations(annotations)
labels := d.GetObjectMeta().GetLabels()
delete(labels, okLabels.DevLabel)
delete(labels, okLabels.InteractiveDevLabel)
delete(labels, okLabels.DetachedDevLabel)
d.GetObjectMeta().SetLabels(labels)
labels = d.Spec.Template.GetObjectMeta().GetLabels()
delete(labels, okLabels.InteractiveDevLabel)
delete(labels, okLabels.DetachedDevLabel)
d.Spec.Template.GetObjectMeta().SetLabels(labels)
return d, nil
}
func create(d *appsv1.Deployment, c *kubernetes.Clientset) error {
log.Debugf("creating deployment %s/%s", d.Namespace, d.Name)
_, err := c.AppsV1().Deployments(d.Namespace).Create(d)
if err != nil {
return err
}
return nil
}
func update(d *appsv1.Deployment, c *kubernetes.Clientset) error {
log.Debugf("updating deployment %s/%s", d.Namespace, d.Name)
d.ResourceVersion = ""
d.Status = appsv1.DeploymentStatus{}
_, err := c.AppsV1().Deployments(d.Namespace).Update(d)
if err != nil {
return err
}
return nil
}
func deleteUserAnnotations(annotations map[string]string, tr *model.Translation) error {
if tr.Annotations == nil {
return nil
}
for key := range tr.Annotations {
delete(annotations, key)
}
return nil
}
//Destroy destroys a k8s service
func Destroy(dev *model.Dev, c *kubernetes.Clientset) error {
log.Infof("deleting deployment '%s'", dev.Name)
dClient := c.AppsV1().Deployments(dev.Namespace)
err := dClient.Delete(dev.Name, &metav1.DeleteOptions{GracePeriodSeconds: &devTerminationGracePeriodSeconds})
if err != nil {
if strings.Contains(err.Error(), "not found") {
log.Infof("deployment '%s' was already deleted.", dev.Name)
return nil
}
return fmt.Errorf("error deleting kubernetes deployment: %s", err)
}
log.Infof("deployment '%s' deleted", dev.Name)
return nil
}
| {
if waitUntilDeployed && errors.IsNotFound(err) {
return nil, nil
}
return nil, err
} | conditional_block |
crud.go | // Copyright 2020 The Okteto Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package deployments
import (
"context"
"encoding/json"
"fmt"
"strings"
"time"
"github.com/okteto/okteto/pkg/config"
"github.com/okteto/okteto/pkg/errors"
okLabels "github.com/okteto/okteto/pkg/k8s/labels"
"github.com/okteto/okteto/pkg/log"
"github.com/okteto/okteto/pkg/model"
appsv1 "k8s.io/api/apps/v1"
apiv1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)
//List returns the list of deployments
func List(namespace string, c kubernetes.Interface) ([]appsv1.Deployment, error) {
dList, err := c.AppsV1().Deployments(namespace).List(metav1.ListOptions{})
if err != nil {
return nil, err
}
return dList.Items, nil
}
//Get returns a deployment object given its name and namespace
func Get(dev *model.Dev, namespace string, c kubernetes.Interface) (*appsv1.Deployment, error) {
if namespace == "" {
return nil, fmt.Errorf("empty namespace")
}
var d *appsv1.Deployment
var err error
if len(dev.Labels) == 0 {
d, err = c.AppsV1().Deployments(namespace).Get(dev.Name, metav1.GetOptions{})
if err != nil {
log.Debugf("error while retrieving deployment %s/%s: %s", namespace, dev.Name, err)
return nil, err
}
} else {
deploys, err := c.AppsV1().Deployments(namespace).List(
metav1.ListOptions{
LabelSelector: dev.LabelsSelector(),
},
)
if err != nil {
return nil, err
}
if len(deploys.Items) == 0 {
return nil, fmt.Errorf("deployment for labels '%s' not found", dev.LabelsSelector())
}
if len(deploys.Items) > 1 {
return nil, fmt.Errorf("Found '%d' deployments for labels '%s' instead of 1", len(deploys.Items), dev.LabelsSelector())
}
d = &deploys.Items[0]
}
return d, nil
}
//GetRevisionAnnotatedDeploymentOrFailed returns a deployment object if it is healthy and annotated with its revision or an error
func GetRevisionAnnotatedDeploymentOrFailed(dev *model.Dev, c *kubernetes.Clientset, waitUntilDeployed bool) (*appsv1.Deployment, error) {
d, err := Get(dev, dev.Namespace, c)
if err != nil {
if waitUntilDeployed && errors.IsNotFound(err) {
return nil, nil
}
return nil, err
}
for _, c := range d.Status.Conditions {
if c.Type == appsv1.DeploymentReplicaFailure && c.Reason == "FailedCreate" && c.Status == apiv1.ConditionTrue {
if strings.Contains(c.Message, "exceeded quota") {
log.Infof("%s: %s", errors.ErrQuota, c.Message)
return nil, errors.ErrQuota
}
return nil, fmt.Errorf(c.Message)
}
}
if d.Generation != d.Status.ObservedGeneration {
return nil, nil
}
return d, nil
}
//GetTranslations fills all the deployments pointed by a development container
func GetTranslations(dev *model.Dev, d *appsv1.Deployment, c *kubernetes.Clientset) (map[string]*model.Translation, error) {
result := map[string]*model.Translation{}
if d != nil {
rule := dev.ToTranslationRule(dev)
result[d.Name] = &model.Translation{
Interactive: true,
Name: dev.Name,
Version: model.TranslationVersion,
Deployment: d,
Annotations: dev.Annotations,
Tolerations: dev.Tolerations,
Replicas: *d.Spec.Replicas,
Rules: []*model.TranslationRule{rule},
}
}
if err := loadServiceTranslations(dev, result, c); err != nil {
return nil, err
}
return result, nil
}
func loadServiceTranslations(dev *model.Dev, result map[string]*model.Translation, c kubernetes.Interface) error |
//Deploy creates or updates a deployment
func Deploy(d *appsv1.Deployment, forceCreate bool, client *kubernetes.Clientset) error {
if forceCreate {
if err := create(d, client); err != nil {
return err
}
} else {
if err := update(d, client); err != nil {
return err
}
}
return nil
}
//UpdateOktetoRevision updates the okteto version annotation
func UpdateOktetoRevision(ctx context.Context, d *appsv1.Deployment, client *kubernetes.Clientset) error {
ticker := time.NewTicker(200 * time.Millisecond)
timeout := time.Now().Add(2 * config.GetTimeout()) // 60 seconds
for i := 0; ; i++ {
updated, err := client.AppsV1().Deployments(d.Namespace).Get(d.Name, metav1.GetOptions{})
if err != nil {
log.Debugf("error while retrieving deployment %s/%s: %s", d.Namespace, d.Name, err)
return err
}
revision := updated.Annotations[revisionAnnotation]
if revision != "" {
d.Annotations[okLabels.RevisionAnnotation] = revision
return update(d, client)
}
if time.Now().After(timeout) {
return fmt.Errorf("kubernetes is taking too long to update the '%s' annotation of the deployment '%s'. Please check for errors and try again", revisionAnnotation, d.Name)
}
select {
case <-ticker.C:
continue
case <-ctx.Done():
log.Debug("cancelling call to update okteto revision")
return ctx.Err()
}
}
}
//TranslateDevMode translates the deployment manifests to put them in dev mode
func TranslateDevMode(tr map[string]*model.Translation, ns *apiv1.Namespace, c *kubernetes.Clientset) error {
for _, t := range tr {
err := translate(t, ns, c)
if err != nil {
return err
}
}
return nil
}
//IsDevModeOn returns if a deployment is in devmode
func IsDevModeOn(d *appsv1.Deployment) bool {
labels := d.GetObjectMeta().GetLabels()
if labels == nil {
return false
}
_, ok := labels[okLabels.DevLabel]
return ok
}
//HasBeenChanged returns if a deployment has been updated since the development container was activated
func HasBeenChanged(d *appsv1.Deployment) bool {
oktetoRevision := d.Annotations[okLabels.RevisionAnnotation]
if oktetoRevision == "" {
return false
}
return oktetoRevision != d.Annotations[revisionAnnotation]
}
// UpdateDeployments update all deployments in the given translation list
func UpdateDeployments(trList map[string]*model.Translation, c *kubernetes.Clientset) error {
for _, tr := range trList {
if tr.Deployment == nil {
continue
}
if err := update(tr.Deployment, c); err != nil {
return err
}
}
return nil
}
//TranslateDevModeOff reverses the dev mode translation
func TranslateDevModeOff(d *appsv1.Deployment) (*appsv1.Deployment, error) {
trRulesJSON := getAnnotation(d.Spec.Template.GetObjectMeta(), okLabels.TranslationAnnotation)
if trRulesJSON == "" {
dManifest := getAnnotation(d.GetObjectMeta(), oktetoDeploymentAnnotation)
if dManifest == "" {
log.Infof("%s/%s is not a development container", d.Namespace, d.Name)
return d, nil
}
dOrig := &appsv1.Deployment{}
if err := json.Unmarshal([]byte(dManifest), dOrig); err != nil {
return nil, fmt.Errorf("malformed manifest: %s", err)
}
return dOrig, nil
}
trRules := &model.Translation{}
if err := json.Unmarshal([]byte(trRulesJSON), trRules); err != nil {
return nil, fmt.Errorf("malformed tr rules: %s", err)
}
d.Spec.Replicas = &trRules.Replicas
annotations := d.GetObjectMeta().GetAnnotations()
delete(annotations, oktetoVersionAnnotation)
if err := deleteUserAnnotations(annotations, trRules); err != nil {
return nil, err
}
d.GetObjectMeta().SetAnnotations(annotations)
annotations = d.Spec.Template.GetObjectMeta().GetAnnotations()
delete(annotations, okLabels.TranslationAnnotation)
delete(annotations, model.OktetoRestartAnnotation)
d.Spec.Template.GetObjectMeta().SetAnnotations(annotations)
labels := d.GetObjectMeta().GetLabels()
delete(labels, okLabels.DevLabel)
delete(labels, okLabels.InteractiveDevLabel)
delete(labels, okLabels.DetachedDevLabel)
d.GetObjectMeta().SetLabels(labels)
labels = d.Spec.Template.GetObjectMeta().GetLabels()
delete(labels, okLabels.InteractiveDevLabel)
delete(labels, okLabels.DetachedDevLabel)
d.Spec.Template.GetObjectMeta().SetLabels(labels)
return d, nil
}
func create(d *appsv1.Deployment, c *kubernetes.Clientset) error {
log.Debugf("creating deployment %s/%s", d.Namespace, d.Name)
_, err := c.AppsV1().Deployments(d.Namespace).Create(d)
if err != nil {
return err
}
return nil
}
func update(d *appsv1.Deployment, c *kubernetes.Clientset) error {
log.Debugf("updating deployment %s/%s", d.Namespace, d.Name)
d.ResourceVersion = ""
d.Status = appsv1.DeploymentStatus{}
_, err := c.AppsV1().Deployments(d.Namespace).Update(d)
if err != nil {
return err
}
return nil
}
func deleteUserAnnotations(annotations map[string]string, tr *model.Translation) error {
if tr.Annotations == nil {
return nil
}
for key := range tr.Annotations {
delete(annotations, key)
}
return nil
}
//Destroy destroys a k8s service
func Destroy(dev *model.Dev, c *kubernetes.Clientset) error {
log.Infof("deleting deployment '%s'", dev.Name)
dClient := c.AppsV1().Deployments(dev.Namespace)
err := dClient.Delete(dev.Name, &metav1.DeleteOptions{GracePeriodSeconds: &devTerminationGracePeriodSeconds})
if err != nil {
if strings.Contains(err.Error(), "not found") {
log.Infof("deployment '%s' was already deleted.", dev.Name)
return nil
}
return fmt.Errorf("error deleting kubernetes deployment: %s", err)
}
log.Infof("deployment '%s' deleted", dev.Name)
return nil
}
| {
for _, s := range dev.Services {
d, err := Get(s, dev.Namespace, c)
if err != nil {
return err
}
rule := s.ToTranslationRule(dev)
if _, ok := result[d.Name]; ok {
result[d.Name].Rules = append(result[d.Name].Rules, rule)
continue
}
result[d.Name] = &model.Translation{
Name: dev.Name,
Interactive: false,
Version: model.TranslationVersion,
Deployment: d,
Annotations: dev.Annotations,
Tolerations: dev.Tolerations,
Replicas: *d.Spec.Replicas,
Rules: []*model.TranslationRule{rule},
}
}
return nil
} | identifier_body |
crud.go | // Copyright 2020 The Okteto Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package deployments
import (
"context"
"encoding/json"
"fmt"
"strings"
"time"
"github.com/okteto/okteto/pkg/config"
"github.com/okteto/okteto/pkg/errors"
okLabels "github.com/okteto/okteto/pkg/k8s/labels"
"github.com/okteto/okteto/pkg/log"
"github.com/okteto/okteto/pkg/model"
appsv1 "k8s.io/api/apps/v1"
apiv1 "k8s.io/api/core/v1" | //List returns the list of deployments
func List(namespace string, c kubernetes.Interface) ([]appsv1.Deployment, error) {
dList, err := c.AppsV1().Deployments(namespace).List(metav1.ListOptions{})
if err != nil {
return nil, err
}
return dList.Items, nil
}
//Get returns a deployment object given its name and namespace
func Get(dev *model.Dev, namespace string, c kubernetes.Interface) (*appsv1.Deployment, error) {
if namespace == "" {
return nil, fmt.Errorf("empty namespace")
}
var d *appsv1.Deployment
var err error
if len(dev.Labels) == 0 {
d, err = c.AppsV1().Deployments(namespace).Get(dev.Name, metav1.GetOptions{})
if err != nil {
log.Debugf("error while retrieving deployment %s/%s: %s", namespace, dev.Name, err)
return nil, err
}
} else {
deploys, err := c.AppsV1().Deployments(namespace).List(
metav1.ListOptions{
LabelSelector: dev.LabelsSelector(),
},
)
if err != nil {
return nil, err
}
if len(deploys.Items) == 0 {
return nil, fmt.Errorf("deployment for labels '%s' not found", dev.LabelsSelector())
}
if len(deploys.Items) > 1 {
return nil, fmt.Errorf("Found '%d' deployments for labels '%s' instead of 1", len(deploys.Items), dev.LabelsSelector())
}
d = &deploys.Items[0]
}
return d, nil
}
//GetRevisionAnnotatedDeploymentOrFailed returns a deployment object if it is healthy and annotated with its revision or an error
func GetRevisionAnnotatedDeploymentOrFailed(dev *model.Dev, c *kubernetes.Clientset, waitUntilDeployed bool) (*appsv1.Deployment, error) {
d, err := Get(dev, dev.Namespace, c)
if err != nil {
if waitUntilDeployed && errors.IsNotFound(err) {
return nil, nil
}
return nil, err
}
for _, c := range d.Status.Conditions {
if c.Type == appsv1.DeploymentReplicaFailure && c.Reason == "FailedCreate" && c.Status == apiv1.ConditionTrue {
if strings.Contains(c.Message, "exceeded quota") {
log.Infof("%s: %s", errors.ErrQuota, c.Message)
return nil, errors.ErrQuota
}
return nil, fmt.Errorf(c.Message)
}
}
if d.Generation != d.Status.ObservedGeneration {
return nil, nil
}
return d, nil
}
//GetTranslations fills all the deployments pointed by a development container
func GetTranslations(dev *model.Dev, d *appsv1.Deployment, c *kubernetes.Clientset) (map[string]*model.Translation, error) {
result := map[string]*model.Translation{}
if d != nil {
rule := dev.ToTranslationRule(dev)
result[d.Name] = &model.Translation{
Interactive: true,
Name: dev.Name,
Version: model.TranslationVersion,
Deployment: d,
Annotations: dev.Annotations,
Tolerations: dev.Tolerations,
Replicas: *d.Spec.Replicas,
Rules: []*model.TranslationRule{rule},
}
}
if err := loadServiceTranslations(dev, result, c); err != nil {
return nil, err
}
return result, nil
}
func loadServiceTranslations(dev *model.Dev, result map[string]*model.Translation, c kubernetes.Interface) error {
for _, s := range dev.Services {
d, err := Get(s, dev.Namespace, c)
if err != nil {
return err
}
rule := s.ToTranslationRule(dev)
if _, ok := result[d.Name]; ok {
result[d.Name].Rules = append(result[d.Name].Rules, rule)
continue
}
result[d.Name] = &model.Translation{
Name: dev.Name,
Interactive: false,
Version: model.TranslationVersion,
Deployment: d,
Annotations: dev.Annotations,
Tolerations: dev.Tolerations,
Replicas: *d.Spec.Replicas,
Rules: []*model.TranslationRule{rule},
}
}
return nil
}
//Deploy creates or updates a deployment
func Deploy(d *appsv1.Deployment, forceCreate bool, client *kubernetes.Clientset) error {
if forceCreate {
if err := create(d, client); err != nil {
return err
}
} else {
if err := update(d, client); err != nil {
return err
}
}
return nil
}
//UpdateOktetoRevision updates the okteto version annotation
func UpdateOktetoRevision(ctx context.Context, d *appsv1.Deployment, client *kubernetes.Clientset) error {
ticker := time.NewTicker(200 * time.Millisecond)
timeout := time.Now().Add(2 * config.GetTimeout()) // 60 seconds
for i := 0; ; i++ {
updated, err := client.AppsV1().Deployments(d.Namespace).Get(d.Name, metav1.GetOptions{})
if err != nil {
log.Debugf("error while retrieving deployment %s/%s: %s", d.Namespace, d.Name, err)
return err
}
revision := updated.Annotations[revisionAnnotation]
if revision != "" {
d.Annotations[okLabels.RevisionAnnotation] = revision
return update(d, client)
}
if time.Now().After(timeout) {
return fmt.Errorf("kubernetes is taking too long to update the '%s' annotation of the deployment '%s'. Please check for errors and try again", revisionAnnotation, d.Name)
}
select {
case <-ticker.C:
continue
case <-ctx.Done():
log.Debug("cancelling call to update okteto revision")
return ctx.Err()
}
}
}
//TranslateDevMode translates the deployment manifests to put them in dev mode
func TranslateDevMode(tr map[string]*model.Translation, ns *apiv1.Namespace, c *kubernetes.Clientset) error {
for _, t := range tr {
err := translate(t, ns, c)
if err != nil {
return err
}
}
return nil
}
//IsDevModeOn returns if a deployment is in devmode
func IsDevModeOn(d *appsv1.Deployment) bool {
labels := d.GetObjectMeta().GetLabels()
if labels == nil {
return false
}
_, ok := labels[okLabels.DevLabel]
return ok
}
//HasBeenChanged returns if a deployment has been updated since the development container was activated
func HasBeenChanged(d *appsv1.Deployment) bool {
oktetoRevision := d.Annotations[okLabels.RevisionAnnotation]
if oktetoRevision == "" {
return false
}
return oktetoRevision != d.Annotations[revisionAnnotation]
}
// UpdateDeployments update all deployments in the given translation list
func UpdateDeployments(trList map[string]*model.Translation, c *kubernetes.Clientset) error {
for _, tr := range trList {
if tr.Deployment == nil {
continue
}
if err := update(tr.Deployment, c); err != nil {
return err
}
}
return nil
}
//TranslateDevModeOff reverses the dev mode translation
func TranslateDevModeOff(d *appsv1.Deployment) (*appsv1.Deployment, error) {
trRulesJSON := getAnnotation(d.Spec.Template.GetObjectMeta(), okLabels.TranslationAnnotation)
if trRulesJSON == "" {
dManifest := getAnnotation(d.GetObjectMeta(), oktetoDeploymentAnnotation)
if dManifest == "" {
log.Infof("%s/%s is not a development container", d.Namespace, d.Name)
return d, nil
}
dOrig := &appsv1.Deployment{}
if err := json.Unmarshal([]byte(dManifest), dOrig); err != nil {
return nil, fmt.Errorf("malformed manifest: %s", err)
}
return dOrig, nil
}
trRules := &model.Translation{}
if err := json.Unmarshal([]byte(trRulesJSON), trRules); err != nil {
return nil, fmt.Errorf("malformed tr rules: %s", err)
}
d.Spec.Replicas = &trRules.Replicas
annotations := d.GetObjectMeta().GetAnnotations()
delete(annotations, oktetoVersionAnnotation)
if err := deleteUserAnnotations(annotations, trRules); err != nil {
return nil, err
}
d.GetObjectMeta().SetAnnotations(annotations)
annotations = d.Spec.Template.GetObjectMeta().GetAnnotations()
delete(annotations, okLabels.TranslationAnnotation)
delete(annotations, model.OktetoRestartAnnotation)
d.Spec.Template.GetObjectMeta().SetAnnotations(annotations)
labels := d.GetObjectMeta().GetLabels()
delete(labels, okLabels.DevLabel)
delete(labels, okLabels.InteractiveDevLabel)
delete(labels, okLabels.DetachedDevLabel)
d.GetObjectMeta().SetLabels(labels)
labels = d.Spec.Template.GetObjectMeta().GetLabels()
delete(labels, okLabels.InteractiveDevLabel)
delete(labels, okLabels.DetachedDevLabel)
d.Spec.Template.GetObjectMeta().SetLabels(labels)
return d, nil
}
func create(d *appsv1.Deployment, c *kubernetes.Clientset) error {
log.Debugf("creating deployment %s/%s", d.Namespace, d.Name)
_, err := c.AppsV1().Deployments(d.Namespace).Create(d)
if err != nil {
return err
}
return nil
}
func update(d *appsv1.Deployment, c *kubernetes.Clientset) error {
log.Debugf("updating deployment %s/%s", d.Namespace, d.Name)
d.ResourceVersion = ""
d.Status = appsv1.DeploymentStatus{}
_, err := c.AppsV1().Deployments(d.Namespace).Update(d)
if err != nil {
return err
}
return nil
}
func deleteUserAnnotations(annotations map[string]string, tr *model.Translation) error {
if tr.Annotations == nil {
return nil
}
for key := range tr.Annotations {
delete(annotations, key)
}
return nil
}
//Destroy destroys a k8s service
func Destroy(dev *model.Dev, c *kubernetes.Clientset) error {
log.Infof("deleting deployment '%s'", dev.Name)
dClient := c.AppsV1().Deployments(dev.Namespace)
err := dClient.Delete(dev.Name, &metav1.DeleteOptions{GracePeriodSeconds: &devTerminationGracePeriodSeconds})
if err != nil {
if strings.Contains(err.Error(), "not found") {
log.Infof("deployment '%s' was already deleted.", dev.Name)
return nil
}
return fmt.Errorf("error deleting kubernetes deployment: %s", err)
}
log.Infof("deployment '%s' deleted", dev.Name)
return nil
} | metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)
| random_line_split |
crud.go | // Copyright 2020 The Okteto Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package deployments
import (
"context"
"encoding/json"
"fmt"
"strings"
"time"
"github.com/okteto/okteto/pkg/config"
"github.com/okteto/okteto/pkg/errors"
okLabels "github.com/okteto/okteto/pkg/k8s/labels"
"github.com/okteto/okteto/pkg/log"
"github.com/okteto/okteto/pkg/model"
appsv1 "k8s.io/api/apps/v1"
apiv1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)
//List returns the list of deployments
func List(namespace string, c kubernetes.Interface) ([]appsv1.Deployment, error) {
dList, err := c.AppsV1().Deployments(namespace).List(metav1.ListOptions{})
if err != nil {
return nil, err
}
return dList.Items, nil
}
//Get returns a deployment object given its name and namespace
func Get(dev *model.Dev, namespace string, c kubernetes.Interface) (*appsv1.Deployment, error) {
if namespace == "" {
return nil, fmt.Errorf("empty namespace")
}
var d *appsv1.Deployment
var err error
if len(dev.Labels) == 0 {
d, err = c.AppsV1().Deployments(namespace).Get(dev.Name, metav1.GetOptions{})
if err != nil {
log.Debugf("error while retrieving deployment %s/%s: %s", namespace, dev.Name, err)
return nil, err
}
} else {
deploys, err := c.AppsV1().Deployments(namespace).List(
metav1.ListOptions{
LabelSelector: dev.LabelsSelector(),
},
)
if err != nil {
return nil, err
}
if len(deploys.Items) == 0 {
return nil, fmt.Errorf("deployment for labels '%s' not found", dev.LabelsSelector())
}
if len(deploys.Items) > 1 {
return nil, fmt.Errorf("Found '%d' deployments for labels '%s' instead of 1", len(deploys.Items), dev.LabelsSelector())
}
d = &deploys.Items[0]
}
return d, nil
}
//GetRevisionAnnotatedDeploymentOrFailed returns a deployment object if it is healthy and annotated with its revision or an error
func | (dev *model.Dev, c *kubernetes.Clientset, waitUntilDeployed bool) (*appsv1.Deployment, error) {
d, err := Get(dev, dev.Namespace, c)
if err != nil {
if waitUntilDeployed && errors.IsNotFound(err) {
return nil, nil
}
return nil, err
}
for _, c := range d.Status.Conditions {
if c.Type == appsv1.DeploymentReplicaFailure && c.Reason == "FailedCreate" && c.Status == apiv1.ConditionTrue {
if strings.Contains(c.Message, "exceeded quota") {
log.Infof("%s: %s", errors.ErrQuota, c.Message)
return nil, errors.ErrQuota
}
return nil, fmt.Errorf(c.Message)
}
}
if d.Generation != d.Status.ObservedGeneration {
return nil, nil
}
return d, nil
}
//GetTranslations fills all the deployments pointed by a development container
func GetTranslations(dev *model.Dev, d *appsv1.Deployment, c *kubernetes.Clientset) (map[string]*model.Translation, error) {
result := map[string]*model.Translation{}
if d != nil {
rule := dev.ToTranslationRule(dev)
result[d.Name] = &model.Translation{
Interactive: true,
Name: dev.Name,
Version: model.TranslationVersion,
Deployment: d,
Annotations: dev.Annotations,
Tolerations: dev.Tolerations,
Replicas: *d.Spec.Replicas,
Rules: []*model.TranslationRule{rule},
}
}
if err := loadServiceTranslations(dev, result, c); err != nil {
return nil, err
}
return result, nil
}
func loadServiceTranslations(dev *model.Dev, result map[string]*model.Translation, c kubernetes.Interface) error {
for _, s := range dev.Services {
d, err := Get(s, dev.Namespace, c)
if err != nil {
return err
}
rule := s.ToTranslationRule(dev)
if _, ok := result[d.Name]; ok {
result[d.Name].Rules = append(result[d.Name].Rules, rule)
continue
}
result[d.Name] = &model.Translation{
Name: dev.Name,
Interactive: false,
Version: model.TranslationVersion,
Deployment: d,
Annotations: dev.Annotations,
Tolerations: dev.Tolerations,
Replicas: *d.Spec.Replicas,
Rules: []*model.TranslationRule{rule},
}
}
return nil
}
//Deploy creates or updates a deployment
func Deploy(d *appsv1.Deployment, forceCreate bool, client *kubernetes.Clientset) error {
if forceCreate {
if err := create(d, client); err != nil {
return err
}
} else {
if err := update(d, client); err != nil {
return err
}
}
return nil
}
//UpdateOktetoRevision updates the okteto version annotation
func UpdateOktetoRevision(ctx context.Context, d *appsv1.Deployment, client *kubernetes.Clientset) error {
ticker := time.NewTicker(200 * time.Millisecond)
timeout := time.Now().Add(2 * config.GetTimeout()) // 60 seconds
for i := 0; ; i++ {
updated, err := client.AppsV1().Deployments(d.Namespace).Get(d.Name, metav1.GetOptions{})
if err != nil {
log.Debugf("error while retrieving deployment %s/%s: %s", d.Namespace, d.Name, err)
return err
}
revision := updated.Annotations[revisionAnnotation]
if revision != "" {
d.Annotations[okLabels.RevisionAnnotation] = revision
return update(d, client)
}
if time.Now().After(timeout) {
return fmt.Errorf("kubernetes is taking too long to update the '%s' annotation of the deployment '%s'. Please check for errors and try again", revisionAnnotation, d.Name)
}
select {
case <-ticker.C:
continue
case <-ctx.Done():
log.Debug("cancelling call to update okteto revision")
return ctx.Err()
}
}
}
//TranslateDevMode translates the deployment manifests to put them in dev mode
func TranslateDevMode(tr map[string]*model.Translation, ns *apiv1.Namespace, c *kubernetes.Clientset) error {
for _, t := range tr {
err := translate(t, ns, c)
if err != nil {
return err
}
}
return nil
}
//IsDevModeOn returns if a deployment is in devmode
func IsDevModeOn(d *appsv1.Deployment) bool {
labels := d.GetObjectMeta().GetLabels()
if labels == nil {
return false
}
_, ok := labels[okLabels.DevLabel]
return ok
}
//HasBeenChanged returns if a deployment has been updated since the development container was activated
func HasBeenChanged(d *appsv1.Deployment) bool {
oktetoRevision := d.Annotations[okLabels.RevisionAnnotation]
if oktetoRevision == "" {
return false
}
return oktetoRevision != d.Annotations[revisionAnnotation]
}
// UpdateDeployments update all deployments in the given translation list
func UpdateDeployments(trList map[string]*model.Translation, c *kubernetes.Clientset) error {
for _, tr := range trList {
if tr.Deployment == nil {
continue
}
if err := update(tr.Deployment, c); err != nil {
return err
}
}
return nil
}
//TranslateDevModeOff reverses the dev mode translation
func TranslateDevModeOff(d *appsv1.Deployment) (*appsv1.Deployment, error) {
trRulesJSON := getAnnotation(d.Spec.Template.GetObjectMeta(), okLabels.TranslationAnnotation)
if trRulesJSON == "" {
dManifest := getAnnotation(d.GetObjectMeta(), oktetoDeploymentAnnotation)
if dManifest == "" {
log.Infof("%s/%s is not a development container", d.Namespace, d.Name)
return d, nil
}
dOrig := &appsv1.Deployment{}
if err := json.Unmarshal([]byte(dManifest), dOrig); err != nil {
return nil, fmt.Errorf("malformed manifest: %s", err)
}
return dOrig, nil
}
trRules := &model.Translation{}
if err := json.Unmarshal([]byte(trRulesJSON), trRules); err != nil {
return nil, fmt.Errorf("malformed tr rules: %s", err)
}
d.Spec.Replicas = &trRules.Replicas
annotations := d.GetObjectMeta().GetAnnotations()
delete(annotations, oktetoVersionAnnotation)
if err := deleteUserAnnotations(annotations, trRules); err != nil {
return nil, err
}
d.GetObjectMeta().SetAnnotations(annotations)
annotations = d.Spec.Template.GetObjectMeta().GetAnnotations()
delete(annotations, okLabels.TranslationAnnotation)
delete(annotations, model.OktetoRestartAnnotation)
d.Spec.Template.GetObjectMeta().SetAnnotations(annotations)
labels := d.GetObjectMeta().GetLabels()
delete(labels, okLabels.DevLabel)
delete(labels, okLabels.InteractiveDevLabel)
delete(labels, okLabels.DetachedDevLabel)
d.GetObjectMeta().SetLabels(labels)
labels = d.Spec.Template.GetObjectMeta().GetLabels()
delete(labels, okLabels.InteractiveDevLabel)
delete(labels, okLabels.DetachedDevLabel)
d.Spec.Template.GetObjectMeta().SetLabels(labels)
return d, nil
}
func create(d *appsv1.Deployment, c *kubernetes.Clientset) error {
log.Debugf("creating deployment %s/%s", d.Namespace, d.Name)
_, err := c.AppsV1().Deployments(d.Namespace).Create(d)
if err != nil {
return err
}
return nil
}
func update(d *appsv1.Deployment, c *kubernetes.Clientset) error {
log.Debugf("updating deployment %s/%s", d.Namespace, d.Name)
d.ResourceVersion = ""
d.Status = appsv1.DeploymentStatus{}
_, err := c.AppsV1().Deployments(d.Namespace).Update(d)
if err != nil {
return err
}
return nil
}
func deleteUserAnnotations(annotations map[string]string, tr *model.Translation) error {
if tr.Annotations == nil {
return nil
}
for key := range tr.Annotations {
delete(annotations, key)
}
return nil
}
//Destroy destroys a k8s service
func Destroy(dev *model.Dev, c *kubernetes.Clientset) error {
log.Infof("deleting deployment '%s'", dev.Name)
dClient := c.AppsV1().Deployments(dev.Namespace)
err := dClient.Delete(dev.Name, &metav1.DeleteOptions{GracePeriodSeconds: &devTerminationGracePeriodSeconds})
if err != nil {
if strings.Contains(err.Error(), "not found") {
log.Infof("deployment '%s' was already deleted.", dev.Name)
return nil
}
return fmt.Errorf("error deleting kubernetes deployment: %s", err)
}
log.Infof("deployment '%s' deleted", dev.Name)
return nil
}
| GetRevisionAnnotatedDeploymentOrFailed | identifier_name |
appinfo.pyi | from google.appengine.api import appinfo_errors as appinfo_errors, backendinfo as backendinfo, validation as validation, yaml_builder as yaml_builder, yaml_listener as yaml_listener, yaml_object as yaml_object
from typing import Any
APP_ID_MAX_LEN: int
MODULE_ID_MAX_LEN: int
MODULE_VERSION_ID_MAX_LEN: int
MAX_URL_MAPS: int
PARTITION_SEPARATOR: str
DOMAIN_SEPARATOR: str
VERSION_SEPARATOR: str
MODULE_SEPARATOR: str
DEFAULT_MODULE: str
PARTITION_RE_STRING_WITHOUT_SEPARATOR: Any
PARTITION_RE_STRING: Any
DOMAIN_RE_STRING_WITHOUT_SEPARATOR: Any
DOMAIN_RE_STRING: Any
DISPLAY_APP_ID_RE_STRING: Any
APPLICATION_RE_STRING: Any
MODULE_ID_RE_STRING: Any
MODULE_VERSION_ID_RE_STRING: Any
GCE_RESOURCE_PATH_REGEX: str
GCE_RESOURCE_NAME_REGEX: str
VPC_ACCESS_CONNECTOR_NAME_REGEX: str
ALTERNATE_HOSTNAME_SEPARATOR: str
BUILTIN_NAME_PREFIX: str
RUNTIME_RE_STRING: str
API_VERSION_RE_STRING: str
ENV_RE_STRING: str
SOURCE_LANGUAGE_RE_STRING: str
HANDLER_STATIC_FILES: str
HANDLER_STATIC_DIR: str
HANDLER_SCRIPT: str
HANDLER_API_ENDPOINT: str
LOGIN_OPTIONAL: str
LOGIN_REQUIRED: str
LOGIN_ADMIN: str
AUTH_FAIL_ACTION_REDIRECT: str
AUTH_FAIL_ACTION_UNAUTHORIZED: str
DATASTORE_ID_POLICY_LEGACY: str
DATASTORE_ID_POLICY_DEFAULT: str
SECURE_HTTP: str
SECURE_HTTPS: str
SECURE_HTTP_OR_HTTPS: str
SECURE_DEFAULT: str
REQUIRE_MATCHING_FILE: str
DEFAULT_SKIP_FILES: str
SKIP_NO_FILES: str
DEFAULT_NOBUILD_FILES: str
LOGIN: str
AUTH_FAIL_ACTION: str
SECURE: str
URL: str
POSITION: str
POSITION_HEAD: str
POSITION_TAIL: str
STATIC_FILES: str
UPLOAD: str
STATIC_DIR: str
MIME_TYPE: str
SCRIPT: str
EXPIRATION: str
API_ENDPOINT: str
HTTP_HEADERS: str
APPLICATION_READABLE: str
REDIRECT_HTTP_RESPONSE_CODE: str
APPLICATION: str
PROJECT: str
MODULE: str
SERVICE: str
AUTOMATIC_SCALING: str
MANUAL_SCALING: str
BASIC_SCALING: str
VM: str
VM_SETTINGS: str
ZONES: str
BETA_SETTINGS: str
VM_HEALTH_CHECK: str
HEALTH_CHECK: str
RESOURCES: str
LIVENESS_CHECK: str
READINESS_CHECK: str
NETWORK: str
VPC_ACCESS_CONNECTOR: str
VERSION: str
MAJOR_VERSION: str
MINOR_VERSION: str
RUNTIME: str
RUNTIME_CHANNEL: str
API_VERSION: str
MAIN: str
ENDPOINTS_API_SERVICE: str
ENV: str
ENTRYPOINT: str
RUNTIME_CONFIG: str
SOURCE_LANGUAGE: str
BUILTINS: str
INCLUDES: str
HANDLERS: str
LIBRARIES: str
DEFAULT_EXPIRATION: str
SKIP_FILES: str
NOBUILD_FILES: str
SERVICES: str
DERIVED_FILE_TYPE: str
JAVA_PRECOMPILED: str
PYTHON_PRECOMPILED: str
ADMIN_CONSOLE: str
ERROR_HANDLERS: str
BACKENDS: str
THREADSAFE: str
SERVICEACCOUNT: str
DATASTORE_AUTO_ID_POLICY: str
API_CONFIG: str
CODE_LOCK: str
ENV_VARIABLES: str
BUILD_ENV_VARIABLES: str
STANDARD_WEBSOCKET: str
APP_ENGINE_APIS: str
SOURCE_REPO_RE_STRING: str
SOURCE_REVISION_RE_STRING: str
SOURCE_REFERENCES_MAX_SIZE: int
INSTANCE_CLASS: str
MINIMUM_PENDING_LATENCY: str
MAXIMUM_PENDING_LATENCY: str
MINIMUM_IDLE_INSTANCES: str
MAXIMUM_IDLE_INSTANCES: str
MAXIMUM_CONCURRENT_REQUEST: str
MIN_NUM_INSTANCES: str
MAX_NUM_INSTANCES: str
COOL_DOWN_PERIOD_SEC: str
CPU_UTILIZATION: str
CPU_UTILIZATION_UTILIZATION: str
CPU_UTILIZATION_AGGREGATION_WINDOW_LENGTH_SEC: str
TARGET_NETWORK_SENT_BYTES_PER_SEC: str
TARGET_NETWORK_SENT_PACKETS_PER_SEC: str
TARGET_NETWORK_RECEIVED_BYTES_PER_SEC: str
TARGET_NETWORK_RECEIVED_PACKETS_PER_SEC: str
TARGET_DISK_WRITE_BYTES_PER_SEC: str
TARGET_DISK_WRITE_OPS_PER_SEC: str
TARGET_DISK_READ_BYTES_PER_SEC: str
TARGET_DISK_READ_OPS_PER_SEC: str
TARGET_REQUEST_COUNT_PER_SEC: str
TARGET_CONCURRENT_REQUESTS: str
CUSTOM_METRICS: str
METRIC_NAME: str
TARGET_TYPE: str
TARGET_TYPE_REGEX: str
CUSTOM_METRIC_UTILIZATION: str
SINGLE_INSTANCE_ASSIGNMENT: str
FILTER: str
INSTANCES: str
MAX_INSTANCES: str
IDLE_TIMEOUT: str
PAGES: str
NAME: str
ENDPOINTS_NAME: str
CONFIG_ID: str
ROLLOUT_STRATEGY: str
ROLLOUT_STRATEGY_FIXED: str
ROLLOUT_STRATEGY_MANAGED: str
TRACE_SAMPLING: str
ERROR_CODE: str
FILE: str
ON: str
ON_ALIASES: Any
OFF: str
OFF_ALIASES: Any
ENABLE_HEALTH_CHECK: str
CHECK_INTERVAL_SEC: str
TIMEOUT_SEC: str
APP_START_TIMEOUT_SEC: str
UNHEALTHY_THRESHOLD: str
HEALTHY_THRESHOLD: str
FAILURE_THRESHOLD: str
SUCCESS_THRESHOLD: str
RESTART_THRESHOLD: str
INITIAL_DELAY_SEC: str
HOST: str
PATH: str
CPU: str
MEMORY_GB: str
DISK_SIZE_GB: str
VOLUMES: str
VOLUME_NAME: str
VOLUME_TYPE: str
SIZE_GB: str
FORWARDED_PORTS: str
INSTANCE_TAG: str
NETWORK_NAME: str
SUBNETWORK_NAME: str
SESSION_AFFINITY: str
STANDARD_MIN_INSTANCES: str
STANDARD_MAX_INSTANCES: str
STANDARD_TARGET_CPU_UTILIZATION: str
STANDARD_TARGET_THROUGHPUT_UTILIZATION: str
VPC_ACCESS_CONNECTOR_NAME: str
VPC_ACCESS_CONNECTOR_EGRESS_SETTING: str
EGRESS_SETTING_ALL_TRAFFIC: str
EGRESS_SETTING_PRIVATE_RANGES_ONLY: str
class _VersionedLibrary:
name: Any
url: Any
description: Any
supported_versions: Any
latest_version: Any
default_version: Any
deprecated_versions: Any
experimental_versions: Any
hidden_versions: Any
def __init__(self, name, url, description, supported_versions, latest_version, default_version: Any | None = ..., deprecated_versions: Any | None = ..., experimental_versions: Any | None = ..., hidden_versions: Any | None = ...) -> None: ...
@property
def hidden(self): ...
@property
def non_deprecated_versions(self): ...
REQUIRED_LIBRARIES: Any
def GetAllRuntimes(): ...
def EnsureAsciiString(s, err): ...
class HandlerBase(validation.Validated):
ATTRIBUTES: Any
class HttpHeadersDict(validation.ValidatedDict):
DISALLOWED_HEADERS: Any
MAX_HEADER_LENGTH: int
MAX_HEADER_VALUE_LENGTHS: Any
MAX_LEN: int
class KeyValidator(validation.Validator):
def Validate(self, name, unused_key: Any | None = ...): ...
class ValueValidator(validation.Validator):
def Validate(self, value, key: Any | None = ...): ...
@staticmethod
def AssertHeaderNotTooLong(name, value) -> None: ...
KEY_VALIDATOR: Any | def Get(self, header_name): ...
def __setitem__(self, key, value) -> None: ...
class URLMap(HandlerBase):
ATTRIBUTES: Any
COMMON_FIELDS: Any
ALLOWED_FIELDS: Any
def GetHandler(self): ...
def GetHandlerType(self): ...
def CheckInitialized(self) -> None: ...
def AssertUniqueContentType(self) -> None: ...
secure: Any
def FixSecureDefaults(self) -> None: ...
def WarnReservedURLs(self) -> None: ...
def ErrorOnPositionForAppInfo(self) -> None: ...
def PrettyRepr(self): ...
class AdminConsolePage(validation.Validated):
ATTRIBUTES: Any
class AdminConsole(validation.Validated):
ATTRIBUTES: Any
@classmethod
def Merge(cls, adminconsole_one, adminconsole_two): ...
class ErrorHandlers(validation.Validated):
ATTRIBUTES: Any
class BuiltinHandler(validation.Validated):
class DynamicAttributes(dict):
def __init__(self, return_value, **parameters) -> None: ...
def __contains__(self, _): ...
def __getitem__(self, _): ...
ATTRIBUTES: Any
builtin_name: str
def __init__(self, **attributes) -> None: ...
def __setattr__(self, key, value) -> None: ...
def __getattr__(self, key) -> None: ...
def GetUnnormalized(self, key): ...
def ToDict(self): ...
@classmethod
def IsDefined(cls, builtins_list, builtin_name): ...
@classmethod
def ListToTuples(cls, builtins_list): ...
@classmethod
def Validate(cls, builtins_list, runtime: Any | None = ...) -> None: ...
class ApiConfigHandler(HandlerBase):
ATTRIBUTES: Any
class Library(validation.Validated):
ATTRIBUTES: Any
version: Any
def CheckInitialized(self) -> None: ...
class CpuUtilization(validation.Validated):
ATTRIBUTES: Any
class CustomMetric(validation.Validated):
ATTRIBUTES: Any
def CheckInitialized(self) -> None: ...
class EndpointsApiService(validation.Validated):
ATTRIBUTES: Any
def CheckInitialized(self) -> None: ...
class AutomaticScaling(validation.Validated):
ATTRIBUTES: Any
class ManualScaling(validation.Validated):
ATTRIBUTES: Any
class BasicScaling(validation.Validated):
ATTRIBUTES: Any
class RuntimeConfig(validation.ValidatedDict):
KEY_VALIDATOR: Any
VALUE_VALIDATOR: Any
class VmSettings(validation.ValidatedDict):
KEY_VALIDATOR: Any
VALUE_VALIDATOR: Any
@classmethod
def Merge(cls, vm_settings_one, vm_settings_two): ...
class BetaSettings(VmSettings):
@classmethod
def Merge(cls, beta_settings_one, beta_settings_two): ...
class EnvironmentVariables(validation.ValidatedDict):
KEY_VALIDATOR: Any
VALUE_VALIDATOR: Any
@classmethod
def Merge(cls, env_variables_one, env_variables_two): ...
def ValidateSourceReference(ref) -> None: ...
def ValidateCombinedSourceReferencesString(source_refs) -> None: ...
class HealthCheck(validation.Validated):
ATTRIBUTES: Any
class LivenessCheck(validation.Validated):
ATTRIBUTES: Any
class ReadinessCheck(validation.Validated):
ATTRIBUTES: Any
class VmHealthCheck(HealthCheck): ...
class Volume(validation.Validated):
ATTRIBUTES: Any
class Resources(validation.Validated):
ATTRIBUTES: Any
class Network(validation.Validated):
ATTRIBUTES: Any
class VpcAccessConnector(validation.Validated):
ATTRIBUTES: Any
class AppInclude(validation.Validated):
ATTRIBUTES: Any
@classmethod
def MergeManualScaling(cls, appinclude_one, appinclude_two): ...
@classmethod
def MergeAppYamlAppInclude(cls, appyaml, appinclude): ...
@classmethod
def MergeAppIncludes(cls, appinclude_one, appinclude_two): ...
@staticmethod
def MergeSkipFiles(skip_files_one, skip_files_two): ...
class AppInfoExternal(validation.Validated):
ATTRIBUTES: Any
runtime: str
def CheckInitialized(self) -> None: ...
def GetAllLibraries(self): ...
def GetNormalizedLibraries(self): ...
version: Any
def ApplyBackendSettings(self, backend_name) -> None: ...
def GetEffectiveRuntime(self): ...
vm_settings: Any
def SetEffectiveRuntime(self, runtime) -> None: ...
def NormalizeVmSettings(self) -> None: ...
def IsVm(self): ...
def IsThreadsafe(self): ...
def ValidateHandlers(handlers, is_include_file: bool = ...) -> None: ...
def LoadSingleAppInfo(app_info): ...
class AppInfoSummary(validation.Validated):
ATTRIBUTES: Any
def LoadAppInclude(app_include): ...
def ParseExpiration(expiration): ...
def ValidFilename(filename): ... | VALUE_VALIDATOR: Any | random_line_split |
Website_Version.py | # Overview
# Instructions [Line 17]
# Imported modules [Line 36]
# Item scanner [Line 44]
# Drug class [Line 79]
# File converter [Line 78]
# Date transformer [Line 95]
# Redirection to pharmacy website [Line 105]
# Manual reorder request [Line 190]
# Request inventory [Line 197]
# Search inventory [Line 207]
# Check expiry date [Line 219]
# Remove drugs from inventory [Line 233]
# Object converter [Line 244]
# Instructions
# 1) This code requires to be in the same folder as the corresponding Website_Server.py file.
# Furthermore, it requires the MyMedibox.html file to be in another 'templates' folder.
# Furthermore, it requires the cc_style.css file to be in another 'static' folder.
# 2) Another condition exists in regard to the scanner. QR- and barcodes must contain the following data structure:
# name_of_drug
# purchase_date in format: %Y-%m-%d %H:%M:%S
# expiry_date in format: %Y-%m-%d %H:%M:%S
# An example: Adrenalin
# 2021-09-10 12:00:00
# 2021-09-20 12:00:00
# 3) If these conditions are fulfilled the user can simply run the Website_Server.py file to get to the website.
# Imported modules
from datetime import datetime
from pyzbar import pyzbar
import webbrowser
import cv2
# Item scanner:
# This function scans the QR- or barcode and writes its contents into a file.
def read_barcodes(frame):
barcodes = pyzbar.decode(frame)
for barcode in barcodes:
barcode_info = barcode.data.decode('utf-8')
with open("barcode_file.txt", mode ='a') as file:
file.write(barcode_info + "\n" )
return len(barcodes)
def scan_item():
camera = cv2.VideoCapture(0)
ret, frame = camera.read()
while ret:
ret, frame = camera.read()
cv2.imshow('Barcode/QR code reader', frame)
number_barcodes = read_barcodes(frame)
if cv2.waitKey(1) & 0xFF == 27 or number_barcodes > 0:
break
camera.release()
cv2.destroyAllWindows()
# Drug class
class Drug:
def __init__(self, name, purchase_date, expiry_date):
self.name = name
self.purchase_date = purchase_date
self.expiry_date = expiry_date
# File converter:
# This function converts the contents of the file into objects.
def convert_file_into_objects(file):
drug_inventory = []
with open(file, "r") as f:
while True:
line1 = f.readline().rstrip()
if line1 == "":
break
line2 = f.readline().rstrip()
line3 = f.readline().rstrip()
drug_name = Drug(line1, line2, line3)
drug_inventory.append(drug_name)
return drug_inventory
# Date transformer:
# This function transforms the dates of type string into the datetime format.
def transform_dates(drug_inventory):
for drug in drug_inventory:
drug.purchase_date = datetime.strptime(drug.purchase_date, "%Y-%m-%d %H:%M:%S")
drug.expiry_date = datetime.strptime(drug.expiry_date, "%Y-%m-%d %H:%M:%S")
return drug_inventory
# Redirection to pharmacy website:
# This function redirects the user to the website of a pharmacy to buy new items.
def website_forward(drug):
if drug == "Aspirin":
webbrowser.open("https://www.amavita.ch/de/aspirin-s-tabl-500-mg-20-stk.html")
elif drug == "Ibuprofen":
webbrowser.open("https://www.amavita.ch/de/amavita-ibuprofen-filmtabl-400-mg-10-stk.html")
elif drug == "Eye drops":
webbrowser.open("https://www.amavita.ch/de/bepanthenr-augentropfen-fur-trockene-und-irritierte-augen-10-ml.html")
elif drug == "Ear drops":
webbrowser.open("https://www.amavita.ch/de/similasan-ohrentropfen-10-ml.html")
elif drug == "Nasal spray":
webbrowser.open("https://www.amavita.ch/de/rinosedin-nasenspray-0-1-10-ml.html")
elif drug == "Paracetamol":
webbrowser.open("https://www.amavita.ch/de/amavita-paracetamol-tabl-500-mg-20-stk.html")
elif drug == "Voltaren":
webbrowser.open("https://www.amavita.ch/de/voltaren-dolo-forte-drag-25-mg-10-stk.html")
elif drug == "Imodium":
webbrowser.open("https://www.amavita.ch/de/imodium-lingual-schmelztabl-2-mg-20-stk.html")
elif drug == "Bepanthen":
webbrowser.open("https://www.amavita.ch/de/bepanthen-plus-creme-5-tb-30-g.html")
elif drug == "Adrenalin":
webbrowser.open("https://www.amavita.ch/de/rubimed-adrenalin-comp-glob-45-g.html")
elif drug == "Effervescent tablets":
webbrowser.open("https://www.amavita.ch/de/nasobol-inhalo-brausetabl-30-stk.html")
elif drug == "Bandage":
webbrowser.open("https://www.amavita.ch/de/emosan-medi-ellbogen-bandage-l.html")
elif drug == "Syrup":
webbrowser.open("https://www.amavita.ch/de/supradynr-junior-sirup-325-ml.html")
elif drug == "Magnesium":
webbrowser.open("https://www.amavita.ch/de/magnesium-vital-complex-kaps-1-25-mmol-100-stk.html")
elif drug == "NeoCitran":
webbrowser.open("https://www.amavita.ch/de/neocitran-schnupfen-erkaltung-filmtabl-12-stk.html")
elif drug == "Cough Syrup":
webbrowser.open("https://www.amavita.ch/de/prospanex-hustensaft-fl-200-ml.html")
elif drug == "Nasal ointment":
webbrowser.open("https://www.amavita.ch/de/amavita-panthoben-nasensalbe-10-g.html")
elif drug == "Eukalyptus":
|
elif drug == "Lozenges":
webbrowser.open("https://www.amavita.ch/de/solmucol-erkaltungshusten-lutschtabl-100-mg-24-stk.html")
elif drug == "Dextromethorphan":
webbrowser.open("https://www.amavita.ch/de/bisolvon-dextromethorphan-pastillen-10-5-mg-20-stk.html")
elif drug == "Effervescent salt":
webbrowser.open("https://www.amavita.ch/de/siesta-1-brausesalz-150-g.html")
elif drug == "Lactease":
webbrowser.open("https://www.amavita.ch/de/lactease-4500-fcc-kautabl-40-stk.html")
elif drug == "Glycerin":
webbrowser.open("https://www.amavita.ch/de/glycerin-elk-pharma-supp-18-stk.html")
elif drug == "Normolytoral":
webbrowser.open("https://www.amavita.ch/de/normolytoral-plv-btl-10-stk.html")
elif drug == "Riopan Gel":
webbrowser.open("https://www.amavita.ch/de/riopan-gel-800-mg-fl-250-ml.html")
elif drug == "Itinerol":
webbrowser.open("https://www.amavita.ch/de/itinerol-b6-supp-erw-10-stk.html")
elif drug == "Disflatyl drops":
webbrowser.open("https://www.amavita.ch/de/disflatyl-tropfen-fl-30-ml.html")
elif drug == "Vitamin C + zinc":
webbrowser.open("https://www.amavita.ch/de/redoxon-zinc-brausetabl-30-stk.html")
elif drug == "Vitamin D3":
webbrowser.open("https://www.amavita.ch/de/vita-d3-protect-losung-zum-einnehmen-fl-20-ml.html")
elif drug == "Supradyn Gummies":
webbrowser.open("https://www.amavita.ch/de/supradyn-junior-gummies-ds-60-stk.html")
elif drug == "Power essence":
webbrowser.open("https://www.amavita.ch/de/wonnensteiner-kraftessenz-liq-fl-750-ml.html")
elif drug == "Dibase solution":
webbrowser.open("https://www.amavita.ch/de/dibase-los-25000-ie-fl-2-5-ml.html")
elif drug == "Vitamin B6 tablets":
webbrowser.open("https://www.amavita.ch/de/vitamin-b6-streuli-tabl-300-mg-ds-20-stk.html")
elif drug == "Mint Spray":
webbrowser.open("https://www.amavita.ch/de/nicorette-mint-spray-zur-anwendung-in-der-mundhohle-150-dos.html")
elif drug == "Nail polish":
webbrowser.open("https://www.amavita.ch/de/kloril-p-nagellack-fl-3-3-ml.html")
elif drug == "Nicotinell Gum":
webbrowser.open("https://www.amavita.ch/de/nicotinell-gum-4-mg-fruit-96-stk.html")
elif drug == "Biotin Merz":
webbrowser.open("https://www.amavita.ch/de/biotin-merz-tabl-5-mg-25-stk.html")
elif drug == "Nasal rinsing salt":
webbrowser.open("https://www.amavita.ch/de/emser-r-nasenspulsalz-2-5-g-50-beutel.html")
else:
return "We cannot seem to find your requested item. Please try again."
return "We opened the website for you."
# Manual reorder request:
# This function lets the user order new items manually.
def manual_reorder_request(drug):
return website_forward(drug)
# Request inventory:
# This function lets the user check the current inventory.
def request_inventory(drug_inventory):
html_return = "<table border='1'><thead><tr><th>Name</th><th>Purchase Date</th><th>Expiry Date</th></tr></thead>"
for drug in drug_inventory:
html_return += "<tr><td>" + drug.name + "</td><td>" + str(drug.purchase_date) + "</td><td>" + str(drug.expiry_date) + "</td></tr>"
return html_return + """</table><br><a href="/">Return to homepage</a>"""
# Search inventory:
# This function lets the user search for specific items in his current inventory.
def search_inventory(drug_inventory, drug):
html_return = "<table border='1'><thead><tr><th>Name</th><th>Purchase Date</th><th>Expiry Date</th></tr></thead>"
for drug_in_inventory in drug_inventory:
if drug == drug_in_inventory.name:
html_return += "<tr><td>" + drug_in_inventory.name + "</td><td>" + str(drug_in_inventory.purchase_date) + "</td><td>" + str(drug_in_inventory.expiry_date) + "</td></tr>"
return html_return + """</table><br><a href="/">Return to homepage</a>"""
return """This drug does not exist in your inventory.<br><a href="/">Return to homepage</a>"""
# Check expiry date:
# This function lets the user check the expiry dates of his items.
def check_expiry_date(drug_inventory):
html_return = ""
for drug in drug_inventory:
day_difference = abs(drug.expiry_date - datetime.today()).days
if datetime.today() > drug.expiry_date:
html_return += "You should have disposed of your item '" + drug.name + "' " + str(day_difference) + " days ago. Please go to your local pharmacy and dispose of it there.<br>"
elif day_difference < 14:
html_return += "Your item '" + drug.name + "' expires in " + str(day_difference) + " days.<br>"
return html_return + """<br><a href="/">Return to homepage</a>"""
# Remove drugs from inventory:
# This function lets the user remove items from his current inventory when he has disposed of them.
def remove_drugs(drug_inventory, drug):
for drug_in_inventory in drug_inventory:
if drug == drug_in_inventory.name:
drug_inventory.remove(drug_in_inventory)
return """The item was removed from your inventary.<br><a href="/check-inventory">Go to inventory</a>"""
return """No such drug currently exists in your inventory.<br><a href="/check-inventory">Go to inventory</a>"""
# Object converter:
# This function converts the objects back into a file to save them after the program is closed.
def convert_objects_into_file(file, drug_inventory):
with open(file, "w") as f:
for drug in drug_inventory:
f.write(drug.name + "\n")
f.seek
f.write(str(drug.purchase_date) + "\n")
f.write(str(drug.expiry_date) + "\n") | webbrowser.open("https://www.amavita.ch/de/otrivin-natural-plus-mit-eukalyptus-spray-20-ml.html") | conditional_block |
Website_Version.py | # Overview
# Instructions [Line 17]
# Imported modules [Line 36]
# Item scanner [Line 44]
# Drug class [Line 79]
# File converter [Line 78]
# Date transformer [Line 95]
# Redirection to pharmacy website [Line 105]
# Manual reorder request [Line 190]
# Request inventory [Line 197]
# Search inventory [Line 207]
# Check expiry date [Line 219]
# Remove drugs from inventory [Line 233]
# Object converter [Line 244]
# Instructions
# 1) This code requires to be in the same folder as the corresponding Website_Server.py file.
# Furthermore, it requires the MyMedibox.html file to be in another 'templates' folder.
# Furthermore, it requires the cc_style.css file to be in another 'static' folder.
# 2) Another condition exists in regard to the scanner. QR- and barcodes must contain the following data structure:
# name_of_drug
# purchase_date in format: %Y-%m-%d %H:%M:%S
# expiry_date in format: %Y-%m-%d %H:%M:%S
# An example: Adrenalin
# 2021-09-10 12:00:00
# 2021-09-20 12:00:00
# 3) If these conditions are fulfilled the user can simply run the Website_Server.py file to get to the website.
# Imported modules
from datetime import datetime
from pyzbar import pyzbar
import webbrowser
import cv2
# Item scanner:
# This function scans the QR- or barcode and writes its contents into a file.
def read_barcodes(frame):
barcodes = pyzbar.decode(frame)
for barcode in barcodes:
barcode_info = barcode.data.decode('utf-8')
with open("barcode_file.txt", mode ='a') as file:
file.write(barcode_info + "\n" )
return len(barcodes)
def scan_item():
camera = cv2.VideoCapture(0)
ret, frame = camera.read()
while ret:
ret, frame = camera.read()
cv2.imshow('Barcode/QR code reader', frame)
number_barcodes = read_barcodes(frame)
if cv2.waitKey(1) & 0xFF == 27 or number_barcodes > 0:
break
camera.release()
cv2.destroyAllWindows()
# Drug class
class Drug:
def __init__(self, name, purchase_date, expiry_date):
self.name = name
self.purchase_date = purchase_date
self.expiry_date = expiry_date
# File converter:
# This function converts the contents of the file into objects.
def convert_file_into_objects(file):
drug_inventory = []
with open(file, "r") as f:
while True:
line1 = f.readline().rstrip()
if line1 == "":
break
line2 = f.readline().rstrip()
line3 = f.readline().rstrip()
drug_name = Drug(line1, line2, line3)
drug_inventory.append(drug_name)
return drug_inventory
# Date transformer:
# This function transforms the dates of type string into the datetime format.
def transform_dates(drug_inventory):
for drug in drug_inventory:
drug.purchase_date = datetime.strptime(drug.purchase_date, "%Y-%m-%d %H:%M:%S")
drug.expiry_date = datetime.strptime(drug.expiry_date, "%Y-%m-%d %H:%M:%S")
return drug_inventory
# Redirection to pharmacy website:
# This function redirects the user to the website of a pharmacy to buy new items.
def website_forward(drug):
if drug == "Aspirin":
webbrowser.open("https://www.amavita.ch/de/aspirin-s-tabl-500-mg-20-stk.html")
elif drug == "Ibuprofen":
webbrowser.open("https://www.amavita.ch/de/amavita-ibuprofen-filmtabl-400-mg-10-stk.html")
elif drug == "Eye drops":
webbrowser.open("https://www.amavita.ch/de/bepanthenr-augentropfen-fur-trockene-und-irritierte-augen-10-ml.html")
elif drug == "Ear drops":
webbrowser.open("https://www.amavita.ch/de/similasan-ohrentropfen-10-ml.html")
elif drug == "Nasal spray":
webbrowser.open("https://www.amavita.ch/de/rinosedin-nasenspray-0-1-10-ml.html")
elif drug == "Paracetamol":
webbrowser.open("https://www.amavita.ch/de/amavita-paracetamol-tabl-500-mg-20-stk.html")
elif drug == "Voltaren":
webbrowser.open("https://www.amavita.ch/de/voltaren-dolo-forte-drag-25-mg-10-stk.html")
elif drug == "Imodium":
webbrowser.open("https://www.amavita.ch/de/imodium-lingual-schmelztabl-2-mg-20-stk.html")
elif drug == "Bepanthen":
webbrowser.open("https://www.amavita.ch/de/bepanthen-plus-creme-5-tb-30-g.html")
elif drug == "Adrenalin":
webbrowser.open("https://www.amavita.ch/de/rubimed-adrenalin-comp-glob-45-g.html")
elif drug == "Effervescent tablets":
webbrowser.open("https://www.amavita.ch/de/nasobol-inhalo-brausetabl-30-stk.html")
elif drug == "Bandage":
webbrowser.open("https://www.amavita.ch/de/emosan-medi-ellbogen-bandage-l.html")
elif drug == "Syrup":
webbrowser.open("https://www.amavita.ch/de/supradynr-junior-sirup-325-ml.html")
elif drug == "Magnesium":
webbrowser.open("https://www.amavita.ch/de/magnesium-vital-complex-kaps-1-25-mmol-100-stk.html")
elif drug == "NeoCitran":
webbrowser.open("https://www.amavita.ch/de/neocitran-schnupfen-erkaltung-filmtabl-12-stk.html")
elif drug == "Cough Syrup":
webbrowser.open("https://www.amavita.ch/de/prospanex-hustensaft-fl-200-ml.html")
elif drug == "Nasal ointment":
webbrowser.open("https://www.amavita.ch/de/amavita-panthoben-nasensalbe-10-g.html")
elif drug == "Eukalyptus":
webbrowser.open("https://www.amavita.ch/de/otrivin-natural-plus-mit-eukalyptus-spray-20-ml.html")
elif drug == "Lozenges":
webbrowser.open("https://www.amavita.ch/de/solmucol-erkaltungshusten-lutschtabl-100-mg-24-stk.html")
elif drug == "Dextromethorphan":
| webbrowser.open("https://www.amavita.ch/de/siesta-1-brausesalz-150-g.html")
elif drug == "Lactease":
webbrowser.open("https://www.amavita.ch/de/lactease-4500-fcc-kautabl-40-stk.html")
elif drug == "Glycerin":
webbrowser.open("https://www.amavita.ch/de/glycerin-elk-pharma-supp-18-stk.html")
elif drug == "Normolytoral":
webbrowser.open("https://www.amavita.ch/de/normolytoral-plv-btl-10-stk.html")
elif drug == "Riopan Gel":
webbrowser.open("https://www.amavita.ch/de/riopan-gel-800-mg-fl-250-ml.html")
elif drug == "Itinerol":
webbrowser.open("https://www.amavita.ch/de/itinerol-b6-supp-erw-10-stk.html")
elif drug == "Disflatyl drops":
webbrowser.open("https://www.amavita.ch/de/disflatyl-tropfen-fl-30-ml.html")
elif drug == "Vitamin C + zinc":
webbrowser.open("https://www.amavita.ch/de/redoxon-zinc-brausetabl-30-stk.html")
elif drug == "Vitamin D3":
webbrowser.open("https://www.amavita.ch/de/vita-d3-protect-losung-zum-einnehmen-fl-20-ml.html")
elif drug == "Supradyn Gummies":
webbrowser.open("https://www.amavita.ch/de/supradyn-junior-gummies-ds-60-stk.html")
elif drug == "Power essence":
webbrowser.open("https://www.amavita.ch/de/wonnensteiner-kraftessenz-liq-fl-750-ml.html")
elif drug == "Dibase solution":
webbrowser.open("https://www.amavita.ch/de/dibase-los-25000-ie-fl-2-5-ml.html")
elif drug == "Vitamin B6 tablets":
webbrowser.open("https://www.amavita.ch/de/vitamin-b6-streuli-tabl-300-mg-ds-20-stk.html")
elif drug == "Mint Spray":
webbrowser.open("https://www.amavita.ch/de/nicorette-mint-spray-zur-anwendung-in-der-mundhohle-150-dos.html")
elif drug == "Nail polish":
webbrowser.open("https://www.amavita.ch/de/kloril-p-nagellack-fl-3-3-ml.html")
elif drug == "Nicotinell Gum":
webbrowser.open("https://www.amavita.ch/de/nicotinell-gum-4-mg-fruit-96-stk.html")
elif drug == "Biotin Merz":
webbrowser.open("https://www.amavita.ch/de/biotin-merz-tabl-5-mg-25-stk.html")
elif drug == "Nasal rinsing salt":
webbrowser.open("https://www.amavita.ch/de/emser-r-nasenspulsalz-2-5-g-50-beutel.html")
else:
return "We cannot seem to find your requested item. Please try again."
return "We opened the website for you."
# Manual reorder request:
# This function lets the user order new items manually.
def manual_reorder_request(drug):
return website_forward(drug)
# Request inventory:
# This function lets the user check the current inventory.
def request_inventory(drug_inventory):
html_return = "<table border='1'><thead><tr><th>Name</th><th>Purchase Date</th><th>Expiry Date</th></tr></thead>"
for drug in drug_inventory:
html_return += "<tr><td>" + drug.name + "</td><td>" + str(drug.purchase_date) + "</td><td>" + str(drug.expiry_date) + "</td></tr>"
return html_return + """</table><br><a href="/">Return to homepage</a>"""
# Search inventory:
# This function lets the user search for specific items in his current inventory.
def search_inventory(drug_inventory, drug):
html_return = "<table border='1'><thead><tr><th>Name</th><th>Purchase Date</th><th>Expiry Date</th></tr></thead>"
for drug_in_inventory in drug_inventory:
if drug == drug_in_inventory.name:
html_return += "<tr><td>" + drug_in_inventory.name + "</td><td>" + str(drug_in_inventory.purchase_date) + "</td><td>" + str(drug_in_inventory.expiry_date) + "</td></tr>"
return html_return + """</table><br><a href="/">Return to homepage</a>"""
return """This drug does not exist in your inventory.<br><a href="/">Return to homepage</a>"""
# Check expiry date:
# This function lets the user check the expiry dates of his items.
def check_expiry_date(drug_inventory):
html_return = ""
for drug in drug_inventory:
day_difference = abs(drug.expiry_date - datetime.today()).days
if datetime.today() > drug.expiry_date:
html_return += "You should have disposed of your item '" + drug.name + "' " + str(day_difference) + " days ago. Please go to your local pharmacy and dispose of it there.<br>"
elif day_difference < 14:
html_return += "Your item '" + drug.name + "' expires in " + str(day_difference) + " days.<br>"
return html_return + """<br><a href="/">Return to homepage</a>"""
# Remove drugs from inventory:
# This function lets the user remove items from his current inventory when he has disposed of them.
def remove_drugs(drug_inventory, drug):
for drug_in_inventory in drug_inventory:
if drug == drug_in_inventory.name:
drug_inventory.remove(drug_in_inventory)
return """The item was removed from your inventary.<br><a href="/check-inventory">Go to inventory</a>"""
return """No such drug currently exists in your inventory.<br><a href="/check-inventory">Go to inventory</a>"""
# Object converter:
# This function converts the objects back into a file to save them after the program is closed.
def convert_objects_into_file(file, drug_inventory):
with open(file, "w") as f:
for drug in drug_inventory:
f.write(drug.name + "\n")
f.seek
f.write(str(drug.purchase_date) + "\n")
f.write(str(drug.expiry_date) + "\n") | webbrowser.open("https://www.amavita.ch/de/bisolvon-dextromethorphan-pastillen-10-5-mg-20-stk.html")
elif drug == "Effervescent salt":
| random_line_split |
Website_Version.py | # Overview
# Instructions [Line 17]
# Imported modules [Line 36]
# Item scanner [Line 44]
# Drug class [Line 79]
# File converter [Line 78]
# Date transformer [Line 95]
# Redirection to pharmacy website [Line 105]
# Manual reorder request [Line 190]
# Request inventory [Line 197]
# Search inventory [Line 207]
# Check expiry date [Line 219]
# Remove drugs from inventory [Line 233]
# Object converter [Line 244]
# Instructions
# 1) This code requires to be in the same folder as the corresponding Website_Server.py file.
# Furthermore, it requires the MyMedibox.html file to be in another 'templates' folder.
# Furthermore, it requires the cc_style.css file to be in another 'static' folder.
# 2) Another condition exists in regard to the scanner. QR- and barcodes must contain the following data structure:
# name_of_drug
# purchase_date in format: %Y-%m-%d %H:%M:%S
# expiry_date in format: %Y-%m-%d %H:%M:%S
# An example: Adrenalin
# 2021-09-10 12:00:00
# 2021-09-20 12:00:00
# 3) If these conditions are fulfilled the user can simply run the Website_Server.py file to get to the website.
# Imported modules
from datetime import datetime
from pyzbar import pyzbar
import webbrowser
import cv2
# Item scanner:
# This function scans the QR- or barcode and writes its contents into a file.
def read_barcodes(frame):
barcodes = pyzbar.decode(frame)
for barcode in barcodes:
barcode_info = barcode.data.decode('utf-8')
with open("barcode_file.txt", mode ='a') as file:
file.write(barcode_info + "\n" )
return len(barcodes)
def scan_item():
camera = cv2.VideoCapture(0)
ret, frame = camera.read()
while ret:
ret, frame = camera.read()
cv2.imshow('Barcode/QR code reader', frame)
number_barcodes = read_barcodes(frame)
if cv2.waitKey(1) & 0xFF == 27 or number_barcodes > 0:
break
camera.release()
cv2.destroyAllWindows()
# Drug class
class Drug:
def __init__(self, name, purchase_date, expiry_date):
self.name = name
self.purchase_date = purchase_date
self.expiry_date = expiry_date
# File converter:
# This function converts the contents of the file into objects.
def convert_file_into_objects(file):
drug_inventory = []
with open(file, "r") as f:
while True:
line1 = f.readline().rstrip()
if line1 == "":
break
line2 = f.readline().rstrip()
line3 = f.readline().rstrip()
drug_name = Drug(line1, line2, line3)
drug_inventory.append(drug_name)
return drug_inventory
# Date transformer:
# This function transforms the dates of type string into the datetime format.
def transform_dates(drug_inventory):
for drug in drug_inventory:
drug.purchase_date = datetime.strptime(drug.purchase_date, "%Y-%m-%d %H:%M:%S")
drug.expiry_date = datetime.strptime(drug.expiry_date, "%Y-%m-%d %H:%M:%S")
return drug_inventory
# Redirection to pharmacy website:
# This function redirects the user to the website of a pharmacy to buy new items.
def website_forward(drug):
|
# Manual reorder request:
# This function lets the user order new items manually.
def manual_reorder_request(drug):
return website_forward(drug)
# Request inventory:
# This function lets the user check the current inventory.
def request_inventory(drug_inventory):
html_return = "<table border='1'><thead><tr><th>Name</th><th>Purchase Date</th><th>Expiry Date</th></tr></thead>"
for drug in drug_inventory:
html_return += "<tr><td>" + drug.name + "</td><td>" + str(drug.purchase_date) + "</td><td>" + str(drug.expiry_date) + "</td></tr>"
return html_return + """</table><br><a href="/">Return to homepage</a>"""
# Search inventory:
# This function lets the user search for specific items in his current inventory.
def search_inventory(drug_inventory, drug):
html_return = "<table border='1'><thead><tr><th>Name</th><th>Purchase Date</th><th>Expiry Date</th></tr></thead>"
for drug_in_inventory in drug_inventory:
if drug == drug_in_inventory.name:
html_return += "<tr><td>" + drug_in_inventory.name + "</td><td>" + str(drug_in_inventory.purchase_date) + "</td><td>" + str(drug_in_inventory.expiry_date) + "</td></tr>"
return html_return + """</table><br><a href="/">Return to homepage</a>"""
return """This drug does not exist in your inventory.<br><a href="/">Return to homepage</a>"""
# Check expiry date:
# This function lets the user check the expiry dates of his items.
def check_expiry_date(drug_inventory):
html_return = ""
for drug in drug_inventory:
day_difference = abs(drug.expiry_date - datetime.today()).days
if datetime.today() > drug.expiry_date:
html_return += "You should have disposed of your item '" + drug.name + "' " + str(day_difference) + " days ago. Please go to your local pharmacy and dispose of it there.<br>"
elif day_difference < 14:
html_return += "Your item '" + drug.name + "' expires in " + str(day_difference) + " days.<br>"
return html_return + """<br><a href="/">Return to homepage</a>"""
# Remove drugs from inventory:
# This function lets the user remove items from his current inventory when he has disposed of them.
def remove_drugs(drug_inventory, drug):
for drug_in_inventory in drug_inventory:
if drug == drug_in_inventory.name:
drug_inventory.remove(drug_in_inventory)
return """The item was removed from your inventary.<br><a href="/check-inventory">Go to inventory</a>"""
return """No such drug currently exists in your inventory.<br><a href="/check-inventory">Go to inventory</a>"""
# Object converter:
# This function converts the objects back into a file to save them after the program is closed.
def convert_objects_into_file(file, drug_inventory):
with open(file, "w") as f:
for drug in drug_inventory:
f.write(drug.name + "\n")
f.seek
f.write(str(drug.purchase_date) + "\n")
f.write(str(drug.expiry_date) + "\n") | if drug == "Aspirin":
webbrowser.open("https://www.amavita.ch/de/aspirin-s-tabl-500-mg-20-stk.html")
elif drug == "Ibuprofen":
webbrowser.open("https://www.amavita.ch/de/amavita-ibuprofen-filmtabl-400-mg-10-stk.html")
elif drug == "Eye drops":
webbrowser.open("https://www.amavita.ch/de/bepanthenr-augentropfen-fur-trockene-und-irritierte-augen-10-ml.html")
elif drug == "Ear drops":
webbrowser.open("https://www.amavita.ch/de/similasan-ohrentropfen-10-ml.html")
elif drug == "Nasal spray":
webbrowser.open("https://www.amavita.ch/de/rinosedin-nasenspray-0-1-10-ml.html")
elif drug == "Paracetamol":
webbrowser.open("https://www.amavita.ch/de/amavita-paracetamol-tabl-500-mg-20-stk.html")
elif drug == "Voltaren":
webbrowser.open("https://www.amavita.ch/de/voltaren-dolo-forte-drag-25-mg-10-stk.html")
elif drug == "Imodium":
webbrowser.open("https://www.amavita.ch/de/imodium-lingual-schmelztabl-2-mg-20-stk.html")
elif drug == "Bepanthen":
webbrowser.open("https://www.amavita.ch/de/bepanthen-plus-creme-5-tb-30-g.html")
elif drug == "Adrenalin":
webbrowser.open("https://www.amavita.ch/de/rubimed-adrenalin-comp-glob-45-g.html")
elif drug == "Effervescent tablets":
webbrowser.open("https://www.amavita.ch/de/nasobol-inhalo-brausetabl-30-stk.html")
elif drug == "Bandage":
webbrowser.open("https://www.amavita.ch/de/emosan-medi-ellbogen-bandage-l.html")
elif drug == "Syrup":
webbrowser.open("https://www.amavita.ch/de/supradynr-junior-sirup-325-ml.html")
elif drug == "Magnesium":
webbrowser.open("https://www.amavita.ch/de/magnesium-vital-complex-kaps-1-25-mmol-100-stk.html")
elif drug == "NeoCitran":
webbrowser.open("https://www.amavita.ch/de/neocitran-schnupfen-erkaltung-filmtabl-12-stk.html")
elif drug == "Cough Syrup":
webbrowser.open("https://www.amavita.ch/de/prospanex-hustensaft-fl-200-ml.html")
elif drug == "Nasal ointment":
webbrowser.open("https://www.amavita.ch/de/amavita-panthoben-nasensalbe-10-g.html")
elif drug == "Eukalyptus":
webbrowser.open("https://www.amavita.ch/de/otrivin-natural-plus-mit-eukalyptus-spray-20-ml.html")
elif drug == "Lozenges":
webbrowser.open("https://www.amavita.ch/de/solmucol-erkaltungshusten-lutschtabl-100-mg-24-stk.html")
elif drug == "Dextromethorphan":
webbrowser.open("https://www.amavita.ch/de/bisolvon-dextromethorphan-pastillen-10-5-mg-20-stk.html")
elif drug == "Effervescent salt":
webbrowser.open("https://www.amavita.ch/de/siesta-1-brausesalz-150-g.html")
elif drug == "Lactease":
webbrowser.open("https://www.amavita.ch/de/lactease-4500-fcc-kautabl-40-stk.html")
elif drug == "Glycerin":
webbrowser.open("https://www.amavita.ch/de/glycerin-elk-pharma-supp-18-stk.html")
elif drug == "Normolytoral":
webbrowser.open("https://www.amavita.ch/de/normolytoral-plv-btl-10-stk.html")
elif drug == "Riopan Gel":
webbrowser.open("https://www.amavita.ch/de/riopan-gel-800-mg-fl-250-ml.html")
elif drug == "Itinerol":
webbrowser.open("https://www.amavita.ch/de/itinerol-b6-supp-erw-10-stk.html")
elif drug == "Disflatyl drops":
webbrowser.open("https://www.amavita.ch/de/disflatyl-tropfen-fl-30-ml.html")
elif drug == "Vitamin C + zinc":
webbrowser.open("https://www.amavita.ch/de/redoxon-zinc-brausetabl-30-stk.html")
elif drug == "Vitamin D3":
webbrowser.open("https://www.amavita.ch/de/vita-d3-protect-losung-zum-einnehmen-fl-20-ml.html")
elif drug == "Supradyn Gummies":
webbrowser.open("https://www.amavita.ch/de/supradyn-junior-gummies-ds-60-stk.html")
elif drug == "Power essence":
webbrowser.open("https://www.amavita.ch/de/wonnensteiner-kraftessenz-liq-fl-750-ml.html")
elif drug == "Dibase solution":
webbrowser.open("https://www.amavita.ch/de/dibase-los-25000-ie-fl-2-5-ml.html")
elif drug == "Vitamin B6 tablets":
webbrowser.open("https://www.amavita.ch/de/vitamin-b6-streuli-tabl-300-mg-ds-20-stk.html")
elif drug == "Mint Spray":
webbrowser.open("https://www.amavita.ch/de/nicorette-mint-spray-zur-anwendung-in-der-mundhohle-150-dos.html")
elif drug == "Nail polish":
webbrowser.open("https://www.amavita.ch/de/kloril-p-nagellack-fl-3-3-ml.html")
elif drug == "Nicotinell Gum":
webbrowser.open("https://www.amavita.ch/de/nicotinell-gum-4-mg-fruit-96-stk.html")
elif drug == "Biotin Merz":
webbrowser.open("https://www.amavita.ch/de/biotin-merz-tabl-5-mg-25-stk.html")
elif drug == "Nasal rinsing salt":
webbrowser.open("https://www.amavita.ch/de/emser-r-nasenspulsalz-2-5-g-50-beutel.html")
else:
return "We cannot seem to find your requested item. Please try again."
return "We opened the website for you." | identifier_body |
Website_Version.py | # Overview
# Instructions [Line 17]
# Imported modules [Line 36]
# Item scanner [Line 44]
# Drug class [Line 79]
# File converter [Line 78]
# Date transformer [Line 95]
# Redirection to pharmacy website [Line 105]
# Manual reorder request [Line 190]
# Request inventory [Line 197]
# Search inventory [Line 207]
# Check expiry date [Line 219]
# Remove drugs from inventory [Line 233]
# Object converter [Line 244]
# Instructions
# 1) This code requires to be in the same folder as the corresponding Website_Server.py file.
# Furthermore, it requires the MyMedibox.html file to be in another 'templates' folder.
# Furthermore, it requires the cc_style.css file to be in another 'static' folder.
# 2) Another condition exists in regard to the scanner. QR- and barcodes must contain the following data structure:
# name_of_drug
# purchase_date in format: %Y-%m-%d %H:%M:%S
# expiry_date in format: %Y-%m-%d %H:%M:%S
# An example: Adrenalin
# 2021-09-10 12:00:00
# 2021-09-20 12:00:00
# 3) If these conditions are fulfilled the user can simply run the Website_Server.py file to get to the website.
# Imported modules
from datetime import datetime
from pyzbar import pyzbar
import webbrowser
import cv2
# Item scanner:
# This function scans the QR- or barcode and writes its contents into a file.
def read_barcodes(frame):
barcodes = pyzbar.decode(frame)
for barcode in barcodes:
barcode_info = barcode.data.decode('utf-8')
with open("barcode_file.txt", mode ='a') as file:
file.write(barcode_info + "\n" )
return len(barcodes)
def scan_item():
camera = cv2.VideoCapture(0)
ret, frame = camera.read()
while ret:
ret, frame = camera.read()
cv2.imshow('Barcode/QR code reader', frame)
number_barcodes = read_barcodes(frame)
if cv2.waitKey(1) & 0xFF == 27 or number_barcodes > 0:
break
camera.release()
cv2.destroyAllWindows()
# Drug class
class Drug:
def __init__(self, name, purchase_date, expiry_date):
self.name = name
self.purchase_date = purchase_date
self.expiry_date = expiry_date
# File converter:
# This function converts the contents of the file into objects.
def convert_file_into_objects(file):
drug_inventory = []
with open(file, "r") as f:
while True:
line1 = f.readline().rstrip()
if line1 == "":
break
line2 = f.readline().rstrip()
line3 = f.readline().rstrip()
drug_name = Drug(line1, line2, line3)
drug_inventory.append(drug_name)
return drug_inventory
# Date transformer:
# This function transforms the dates of type string into the datetime format.
def | (drug_inventory):
for drug in drug_inventory:
drug.purchase_date = datetime.strptime(drug.purchase_date, "%Y-%m-%d %H:%M:%S")
drug.expiry_date = datetime.strptime(drug.expiry_date, "%Y-%m-%d %H:%M:%S")
return drug_inventory
# Redirection to pharmacy website:
# This function redirects the user to the website of a pharmacy to buy new items.
def website_forward(drug):
if drug == "Aspirin":
webbrowser.open("https://www.amavita.ch/de/aspirin-s-tabl-500-mg-20-stk.html")
elif drug == "Ibuprofen":
webbrowser.open("https://www.amavita.ch/de/amavita-ibuprofen-filmtabl-400-mg-10-stk.html")
elif drug == "Eye drops":
webbrowser.open("https://www.amavita.ch/de/bepanthenr-augentropfen-fur-trockene-und-irritierte-augen-10-ml.html")
elif drug == "Ear drops":
webbrowser.open("https://www.amavita.ch/de/similasan-ohrentropfen-10-ml.html")
elif drug == "Nasal spray":
webbrowser.open("https://www.amavita.ch/de/rinosedin-nasenspray-0-1-10-ml.html")
elif drug == "Paracetamol":
webbrowser.open("https://www.amavita.ch/de/amavita-paracetamol-tabl-500-mg-20-stk.html")
elif drug == "Voltaren":
webbrowser.open("https://www.amavita.ch/de/voltaren-dolo-forte-drag-25-mg-10-stk.html")
elif drug == "Imodium":
webbrowser.open("https://www.amavita.ch/de/imodium-lingual-schmelztabl-2-mg-20-stk.html")
elif drug == "Bepanthen":
webbrowser.open("https://www.amavita.ch/de/bepanthen-plus-creme-5-tb-30-g.html")
elif drug == "Adrenalin":
webbrowser.open("https://www.amavita.ch/de/rubimed-adrenalin-comp-glob-45-g.html")
elif drug == "Effervescent tablets":
webbrowser.open("https://www.amavita.ch/de/nasobol-inhalo-brausetabl-30-stk.html")
elif drug == "Bandage":
webbrowser.open("https://www.amavita.ch/de/emosan-medi-ellbogen-bandage-l.html")
elif drug == "Syrup":
webbrowser.open("https://www.amavita.ch/de/supradynr-junior-sirup-325-ml.html")
elif drug == "Magnesium":
webbrowser.open("https://www.amavita.ch/de/magnesium-vital-complex-kaps-1-25-mmol-100-stk.html")
elif drug == "NeoCitran":
webbrowser.open("https://www.amavita.ch/de/neocitran-schnupfen-erkaltung-filmtabl-12-stk.html")
elif drug == "Cough Syrup":
webbrowser.open("https://www.amavita.ch/de/prospanex-hustensaft-fl-200-ml.html")
elif drug == "Nasal ointment":
webbrowser.open("https://www.amavita.ch/de/amavita-panthoben-nasensalbe-10-g.html")
elif drug == "Eukalyptus":
webbrowser.open("https://www.amavita.ch/de/otrivin-natural-plus-mit-eukalyptus-spray-20-ml.html")
elif drug == "Lozenges":
webbrowser.open("https://www.amavita.ch/de/solmucol-erkaltungshusten-lutschtabl-100-mg-24-stk.html")
elif drug == "Dextromethorphan":
webbrowser.open("https://www.amavita.ch/de/bisolvon-dextromethorphan-pastillen-10-5-mg-20-stk.html")
elif drug == "Effervescent salt":
webbrowser.open("https://www.amavita.ch/de/siesta-1-brausesalz-150-g.html")
elif drug == "Lactease":
webbrowser.open("https://www.amavita.ch/de/lactease-4500-fcc-kautabl-40-stk.html")
elif drug == "Glycerin":
webbrowser.open("https://www.amavita.ch/de/glycerin-elk-pharma-supp-18-stk.html")
elif drug == "Normolytoral":
webbrowser.open("https://www.amavita.ch/de/normolytoral-plv-btl-10-stk.html")
elif drug == "Riopan Gel":
webbrowser.open("https://www.amavita.ch/de/riopan-gel-800-mg-fl-250-ml.html")
elif drug == "Itinerol":
webbrowser.open("https://www.amavita.ch/de/itinerol-b6-supp-erw-10-stk.html")
elif drug == "Disflatyl drops":
webbrowser.open("https://www.amavita.ch/de/disflatyl-tropfen-fl-30-ml.html")
elif drug == "Vitamin C + zinc":
webbrowser.open("https://www.amavita.ch/de/redoxon-zinc-brausetabl-30-stk.html")
elif drug == "Vitamin D3":
webbrowser.open("https://www.amavita.ch/de/vita-d3-protect-losung-zum-einnehmen-fl-20-ml.html")
elif drug == "Supradyn Gummies":
webbrowser.open("https://www.amavita.ch/de/supradyn-junior-gummies-ds-60-stk.html")
elif drug == "Power essence":
webbrowser.open("https://www.amavita.ch/de/wonnensteiner-kraftessenz-liq-fl-750-ml.html")
elif drug == "Dibase solution":
webbrowser.open("https://www.amavita.ch/de/dibase-los-25000-ie-fl-2-5-ml.html")
elif drug == "Vitamin B6 tablets":
webbrowser.open("https://www.amavita.ch/de/vitamin-b6-streuli-tabl-300-mg-ds-20-stk.html")
elif drug == "Mint Spray":
webbrowser.open("https://www.amavita.ch/de/nicorette-mint-spray-zur-anwendung-in-der-mundhohle-150-dos.html")
elif drug == "Nail polish":
webbrowser.open("https://www.amavita.ch/de/kloril-p-nagellack-fl-3-3-ml.html")
elif drug == "Nicotinell Gum":
webbrowser.open("https://www.amavita.ch/de/nicotinell-gum-4-mg-fruit-96-stk.html")
elif drug == "Biotin Merz":
webbrowser.open("https://www.amavita.ch/de/biotin-merz-tabl-5-mg-25-stk.html")
elif drug == "Nasal rinsing salt":
webbrowser.open("https://www.amavita.ch/de/emser-r-nasenspulsalz-2-5-g-50-beutel.html")
else:
return "We cannot seem to find your requested item. Please try again."
return "We opened the website for you."
# Manual reorder request:
# This function lets the user order new items manually.
def manual_reorder_request(drug):
return website_forward(drug)
# Request inventory:
# This function lets the user check the current inventory.
def request_inventory(drug_inventory):
html_return = "<table border='1'><thead><tr><th>Name</th><th>Purchase Date</th><th>Expiry Date</th></tr></thead>"
for drug in drug_inventory:
html_return += "<tr><td>" + drug.name + "</td><td>" + str(drug.purchase_date) + "</td><td>" + str(drug.expiry_date) + "</td></tr>"
return html_return + """</table><br><a href="/">Return to homepage</a>"""
# Search inventory:
# This function lets the user search for specific items in his current inventory.
def search_inventory(drug_inventory, drug):
html_return = "<table border='1'><thead><tr><th>Name</th><th>Purchase Date</th><th>Expiry Date</th></tr></thead>"
for drug_in_inventory in drug_inventory:
if drug == drug_in_inventory.name:
html_return += "<tr><td>" + drug_in_inventory.name + "</td><td>" + str(drug_in_inventory.purchase_date) + "</td><td>" + str(drug_in_inventory.expiry_date) + "</td></tr>"
return html_return + """</table><br><a href="/">Return to homepage</a>"""
return """This drug does not exist in your inventory.<br><a href="/">Return to homepage</a>"""
# Check expiry date:
# This function lets the user check the expiry dates of his items.
def check_expiry_date(drug_inventory):
html_return = ""
for drug in drug_inventory:
day_difference = abs(drug.expiry_date - datetime.today()).days
if datetime.today() > drug.expiry_date:
html_return += "You should have disposed of your item '" + drug.name + "' " + str(day_difference) + " days ago. Please go to your local pharmacy and dispose of it there.<br>"
elif day_difference < 14:
html_return += "Your item '" + drug.name + "' expires in " + str(day_difference) + " days.<br>"
return html_return + """<br><a href="/">Return to homepage</a>"""
# Remove drugs from inventory:
# This function lets the user remove items from his current inventory when he has disposed of them.
def remove_drugs(drug_inventory, drug):
for drug_in_inventory in drug_inventory:
if drug == drug_in_inventory.name:
drug_inventory.remove(drug_in_inventory)
return """The item was removed from your inventary.<br><a href="/check-inventory">Go to inventory</a>"""
return """No such drug currently exists in your inventory.<br><a href="/check-inventory">Go to inventory</a>"""
# Object converter:
# This function converts the objects back into a file to save them after the program is closed.
def convert_objects_into_file(file, drug_inventory):
with open(file, "w") as f:
for drug in drug_inventory:
f.write(drug.name + "\n")
f.seek
f.write(str(drug.purchase_date) + "\n")
f.write(str(drug.expiry_date) + "\n") | transform_dates | identifier_name |
utils.py | import numpy as np
from dateutil import parser
import pandas as pd
from datetime import datetime
def equal_interval_creation(
particles,
fields,
time_intervals,
time_horizon,
time_passed):
"""Instrumental function for inference evaluation and pllotting in the artificial experiment.
Create subsets of particles with growing length,
assigning a time-passed variable based splitting this time on the overall samples set.
Return a list of different samples subsets."""
ret = []
time_ratio = time_horizon / time_passed
for i in range(1, time_intervals + 1):
w = {}
for key in particles.keys():
if (key in fields) | (fields is None):
x = particles[key]
# N0 = int(x.shape[0] * time_ratio * ((i - 1) / time_intervals))
N1 = int(x.shape[0] * time_ratio * (i / time_intervals))
N0 = int(N1 * 0.1)
w[key] = x[N0:N1]
t_i = time_passed * (N1 / x.shape[0])
w = {'particles': w}
w['time'] = t_i
w['epoch'] = i
ret.append(w)
return ret
def asymmetric_mix_dict(d1, d2):
"""Merge the keys of d2 into d1. Common keys are overrided."""
ret = d1
for k in d2:
ret[k] = d2[k]
return ret
def concat_dict(d1, d2):
"""Apply np.concatenate to every key in common between two dictionaries."""
if d1 is None:
return d2
if d2 is None:
return d1
else:
assert set(d1.keys()) == set(d2.keys())
return {k: np.concatenate([d1[k], d2[k]], axis=0) for k in d1}
def dict_tf2numpy(self):
'''
Transform a dictionary of list of tensor into a dict or list of numpy objects.
The single objects transformations happens through .numpy method.
:param self: Any dict or list of objects with a .numpy method
:type self: dict or list
:return: dict or list of objects of class numpy
:rtype: dict or list
'''
ret = {}
for k in self:
x = self[k]
if type(x) == dict:
ret[k] = {v: dict_tf2numpy(x[v]) for v in x}
elif type(x) == list:
ret[k] = [dict_tf2numpy(v) for v in x]
else:
ret[k] = x.numpy()
return ret
def inv_softplus(x, _limit_upper=30, _limit_lower=1e-12):
'''
Returns y (float32), s.t. softplus(y)=x
'''
if isinstance(x, np.float) or isinstance(x, np.int):
if x < _limit_upper:
ret = np.log(np.exp(x) - 1)
else:
ret = x
else:
ret = np.zeros(x.shape, dtype=np.float32)
under_limit = x < _limit_upper
over_limit = np.logical_not(under_limit)
ret[under_limit] = np.float32(np.log(np.exp(x[under_limit]) - 1 + _limit_lower))
ret[over_limit] = x[over_limit]
return ret
def safe_softplus(x, limit=10):
"""Softplus function correction to avoid numeric overflow."""
ret = x
_under_limit = x < limit
ret[_under_limit] = np.log(1.0 + np.exp(x[_under_limit]))
return ret
def lagify(y, p):
'''
Taken time series y (vertical), returns columns with the last p lags of y.
Returns both y and ylag, aligned so that ylag sees just until yesterday.
'''
T, N = y.shape
ylag = np.ones([T, N * p + 1])
for pp in range(p):
ylag[pp + 1:T, N * pp + 1:pp * N + N + 1] = y[:T - pp - 1, :]
return np.float32(y[p:, :]), np.float32(ylag[p:, :])
def VAR_data_generation(T, N, par_p, cov_wn, const_terms):
'''
generates T x N data, with par_p VAR structure, cov_wn noise covariance and a vector of constant terms cont_terms.
'''
p = int(par_p.shape[0] / N)
eps = np.random.multivariate_normal(np.zeros(N), cov_wn, size=T)
y = np.zeros([T, N])
last_y = np.zeros([p, N])
ylag = np.zeros([T, N * p + 1])
for t in range(T):
ylag[t] = np.concatenate([np.ones([1, 1]), last_y.reshape(1, -1)], axis=1)
y[t, :] = const_terms + np.matmul(last_y.reshape(1, -1), par_p) + eps[t]
last_y[:p - 1] = last_y[1:]
last_y[p - 1] = y[t]
return y, ylag
def spiral_indexes(N):
'''
return the indexes of a line vector that corresponds to the elements of a triangular matrix.
spiral means that the elements in the matrix are inserted using a spiral sequence (as tensorflow.fill_triangular does).
'''
spiral_matrix = np.zeros([N, N], dtype=np.int)
spiral_line_tril = np.zeros(int(N * (N + 1) / 2), dtype=np.int)
last_num = 0
ln = 0
for n in range(N):
if (n % 2) == 0:
# assigns the inverted rows
val_n = N - int(n / 2)
spiral_matrix[N - 1 - int(n / 2), :N - int(n / 2)] = np.flip(last_num + np.arange(val_n))
# print(ln,ln+N-int(n/2))
qn = N ** 2 - int(n / 2) * N
inds = (np.arange(qn - N, qn - int(n / 2)))
spiral_line_tril[ln:ln + N - int(n / 2)] = np.flip(inds)
last_num += val_n
ln += N - (int(n / 2))
else:
# assign the rows
val_n = int((n + 1) / 2)
spiral_matrix[int((n - 1) / 2), :int((n + 1) / 2)] = last_num + np.arange(val_n)
last_num += val_n
qn = (val_n - 1) * N # int(val_n*(val_n-1)/2)
inds = np.arange(qn, qn + val_n)
spiral_line_tril[ln:ln + val_n] = inds
ln += val_n
return spiral_matrix[np.diag_indices(N)], spiral_matrix[np.tril_indices(N, -1)], spiral_matrix[
np.tril_indices(N)], spiral_matrix, spiral_line_tril
def fromMat2diag_udiag(mat):
'''
Given a matrix returns the diagonal and the strictly lower triangular part of Cholesky(mat).
The strict lower matrix returned is normalized per the diagonal elements corresponding.
'''
N = mat.shape[0]
cholmat = np.linalg.cholesky(mat)
choldiag = np.diag(cholmat)
normmat = np.tile(np.reshape(choldiag, [1, N]), [N, 1])
choludiag = (cholmat / normmat)[np.tril_indices(N, -1)]
return choldiag, choludiag
def arctanh(x):
'''
returns arctanh(x), doesn't check for nans.
'''
ret = 0.5 * np.log((1 + x) / (1 - x))
if (np.sum(np.isnan(ret)) > 0):
print(x)
ret[np.isnan(ret)] = 0.0
return ret
class indexes_librarian:
'''
A single class that collects different set of indexes, useful to gather ndarrays.
'''
def __init__(self, N):
self.spiral_diag, self.spiral_udiag, self.spiral_tril, self.spiral_matrix, self.spiral_line = spiral_indexes(N)
self.diag = np.diag_indices(N)
self.udiag = np.tril_indices(N, -1)
self.tril = np.tril_indices(N)
def from_daily2_monthly(y, log_returns=False):
'''
Transform the pandas Dataframe with time-index to a montly series. log_returns parameter controls if log-returns must be computed.
'''
ind_dates = np.zeros(y.shape[0], dtype=np.int)
last_date = None
jj = 0
for ii in range(y.index.shape[0]):
date_ii = parser.parse(y.index[ii])
if ii == 0 or not date_ii.month == last_date.month:
ind_dates[jj] = ii
jj += 1
last_date = date_ii
ind_dates = ind_dates[:jj]
ret = y.iloc[ind_dates, :].values
if log_returns:
ret = np.log(ret[1:, :]) - np.log(ret[:-1, :])
ret = pd.DataFrame(ret, y.index[ind_dates[1:]])
return ret
def init_BetaPdfLowVariance_fromPoint(x, b=10.0, _min_a=1e-1):
'''
Given x, a ndarray of observed vaues from different Beta distributions, returns a pair of parameters a,b that corresponds to Beta distributions with expected value equal to x and variance controlled by b (bigger the b, lower the variance).
'''
xb = np.ones(x.shape, dtype=np.float32) * b
xa = xb * (x) / (1.0 - x)
if isinstance(x, np.float):
if xa / xb < _min_a:
xa = _min_a * xb
else:
under_min = xa / xb < _min_a
xa[under_min] = _min_a * xb[under_min]
return xa, xb
def init_GammaPdfLowVariance_fromPoint(x, b=10.0):
'''
Given x, a ndarray of observed vaues from different Gamma distributions, returns a pair of parameters a,b that corresponds to Gamma distributions with expected value equal to x and variance controlled by b (bigger the b, lower the variance).
'''
xb = np.ones(x.shape, dtype=np.float32) * b
xa = xb * x
return xa, xb
def view_stats(x, axis=None):
if axis is None:
print(f'min: {np.min(x)}\nmean: {np.mean(x)}\nmax: {np.max(x)}\nstd: {np.std(x)}')
else:
print(
f'min: {np.min(x, axis=axis)}\nmean: {np.mean(x, axis=axis)}\nmax: {np.max(x, axis=axis)}\nstd: {np.std(x, axis=axis)}')
def matrix2line_diagFunc(M, inv_func=inv_softplus):
'''
Takes a matrix and extract a line with the coefficient of the Cholesky decomposition.
The order is spyral, so this function is the numpy inverse of tensorflow.fill_triangular.
:param M: The matrix
:type M: np.array
:param inv_func: The function to apply to the coefficients on the diagonal
:type inv_func: tf.function with domain positive number and codomain the real line
:return: A line with the coefficients.
:rtype: np.array
'''
N = M.shape[0]
assert N == M.shape[1]
ind = indexes_librarian(N)
_cholvcv = np.linalg.cholesky(M)
diag_cholvcv = _cholvcv[ind.diag]
_cholvcv[ind.diag] = inv_func(diag_cholvcv)
ret = _cholvcv[ind.tril]
ret[ind.spiral_diag] = _cholvcv[ind.diag]
ret[ind.spiral_udiag] = _cholvcv[ind.udiag]
return ret
def matrix2line_diagFunc_timeseries(M, inv_func=inv_softplus):
'''
Takes a matrix and extract a line with the coefficient of the Cholesky decomposition.
The order is spyral, so this function is the numpy inverse of tensorflow.fill_triangular.
:param M: The matrix
:type M: np.array
:param inv_func: The function to apply to the coefficients on the diagonal
:type inv_func: tf.function with domain positive number and codomain the real line
:return: A line with the coefficients.
:rtype: np.array
'''
t, n, _ = M.shape
assert n == _
ind = indexes_librarian(n)
_cholvcv = np.linalg.cholesky(M)
diag_cholvcv = _cholvcv[:, ind.diag[0], ind.diag[1]]
_cholvcv[:, ind.diag[0], ind.diag[1]] = inv_func(diag_cholvcv)
ret = _cholvcv[:, ind.tril[0], ind.tril[1]]
ret[:, ind.spiral_diag] = _cholvcv[:, ind.diag[0], ind.diag[1]]
ret[:, ind.spiral_udiag] = _cholvcv[:, ind.udiag[0], ind.udiag[1]]
return ret
def decorator_multilevel(f):
"""Decorator the apply hierarchically the decorated function to every element in:
-dictionaries
-list
-single elements"""
def f_decorated(x):
if type(x) == dict:
fx = {k: f_decorated(x[k]) for k in x}
elif (type(x) == list) | (type(x) == tuple):
fx = [f_decorated(k) for k in x]
else:
fx = f(x)
return fx
return f_decorated
def format_number(x):
return np.round(x, 2)
def runtime_print(f):
"""Decorate a function to print its runtime"""
def decorated_fun(*args, **kwargs):
t0 = datetime.now()
ret = f(*args, **kwargs)
t1 = datetime.now() | return decorated_fun
def print_formatted_values(**kwargs):
"""Print all the values with ',' as a separator."""
string = ', '.join([f'{k}: {format_number(kwargs[k])}' for k in kwargs])
print(string)
if __name__ == '__main__':
M = np.repeat(np.eye(3)[np.newaxis, :, :], 5, axis=0)
print(M)
M_line = matrix2line_diagFunc_timeseries(M)
print(M_line) | print(f'Runtime: {t1 - t0}')
return ret
| random_line_split |
utils.py | import numpy as np
from dateutil import parser
import pandas as pd
from datetime import datetime
def equal_interval_creation(
particles,
fields,
time_intervals,
time_horizon,
time_passed):
"""Instrumental function for inference evaluation and pllotting in the artificial experiment.
Create subsets of particles with growing length,
assigning a time-passed variable based splitting this time on the overall samples set.
Return a list of different samples subsets."""
ret = []
time_ratio = time_horizon / time_passed
for i in range(1, time_intervals + 1):
w = {}
for key in particles.keys():
if (key in fields) | (fields is None):
x = particles[key]
# N0 = int(x.shape[0] * time_ratio * ((i - 1) / time_intervals))
N1 = int(x.shape[0] * time_ratio * (i / time_intervals))
N0 = int(N1 * 0.1)
w[key] = x[N0:N1]
t_i = time_passed * (N1 / x.shape[0])
w = {'particles': w}
w['time'] = t_i
w['epoch'] = i
ret.append(w)
return ret
def asymmetric_mix_dict(d1, d2):
"""Merge the keys of d2 into d1. Common keys are overrided."""
ret = d1
for k in d2:
ret[k] = d2[k]
return ret
def concat_dict(d1, d2):
"""Apply np.concatenate to every key in common between two dictionaries."""
if d1 is None:
return d2
if d2 is None:
return d1
else:
assert set(d1.keys()) == set(d2.keys())
return {k: np.concatenate([d1[k], d2[k]], axis=0) for k in d1}
def dict_tf2numpy(self):
'''
Transform a dictionary of list of tensor into a dict or list of numpy objects.
The single objects transformations happens through .numpy method.
:param self: Any dict or list of objects with a .numpy method
:type self: dict or list
:return: dict or list of objects of class numpy
:rtype: dict or list
'''
ret = {}
for k in self:
x = self[k]
if type(x) == dict:
ret[k] = {v: dict_tf2numpy(x[v]) for v in x}
elif type(x) == list:
ret[k] = [dict_tf2numpy(v) for v in x]
else:
ret[k] = x.numpy()
return ret
def inv_softplus(x, _limit_upper=30, _limit_lower=1e-12):
'''
Returns y (float32), s.t. softplus(y)=x
'''
if isinstance(x, np.float) or isinstance(x, np.int):
if x < _limit_upper:
ret = np.log(np.exp(x) - 1)
else:
ret = x
else:
ret = np.zeros(x.shape, dtype=np.float32)
under_limit = x < _limit_upper
over_limit = np.logical_not(under_limit)
ret[under_limit] = np.float32(np.log(np.exp(x[under_limit]) - 1 + _limit_lower))
ret[over_limit] = x[over_limit]
return ret
def safe_softplus(x, limit=10):
"""Softplus function correction to avoid numeric overflow."""
ret = x
_under_limit = x < limit
ret[_under_limit] = np.log(1.0 + np.exp(x[_under_limit]))
return ret
def lagify(y, p):
'''
Taken time series y (vertical), returns columns with the last p lags of y.
Returns both y and ylag, aligned so that ylag sees just until yesterday.
'''
T, N = y.shape
ylag = np.ones([T, N * p + 1])
for pp in range(p):
ylag[pp + 1:T, N * pp + 1:pp * N + N + 1] = y[:T - pp - 1, :]
return np.float32(y[p:, :]), np.float32(ylag[p:, :])
def VAR_data_generation(T, N, par_p, cov_wn, const_terms):
'''
generates T x N data, with par_p VAR structure, cov_wn noise covariance and a vector of constant terms cont_terms.
'''
p = int(par_p.shape[0] / N)
eps = np.random.multivariate_normal(np.zeros(N), cov_wn, size=T)
y = np.zeros([T, N])
last_y = np.zeros([p, N])
ylag = np.zeros([T, N * p + 1])
for t in range(T):
ylag[t] = np.concatenate([np.ones([1, 1]), last_y.reshape(1, -1)], axis=1)
y[t, :] = const_terms + np.matmul(last_y.reshape(1, -1), par_p) + eps[t]
last_y[:p - 1] = last_y[1:]
last_y[p - 1] = y[t]
return y, ylag
def spiral_indexes(N):
'''
return the indexes of a line vector that corresponds to the elements of a triangular matrix.
spiral means that the elements in the matrix are inserted using a spiral sequence (as tensorflow.fill_triangular does).
'''
spiral_matrix = np.zeros([N, N], dtype=np.int)
spiral_line_tril = np.zeros(int(N * (N + 1) / 2), dtype=np.int)
last_num = 0
ln = 0
for n in range(N):
if (n % 2) == 0:
# assigns the inverted rows
val_n = N - int(n / 2)
spiral_matrix[N - 1 - int(n / 2), :N - int(n / 2)] = np.flip(last_num + np.arange(val_n))
# print(ln,ln+N-int(n/2))
qn = N ** 2 - int(n / 2) * N
inds = (np.arange(qn - N, qn - int(n / 2)))
spiral_line_tril[ln:ln + N - int(n / 2)] = np.flip(inds)
last_num += val_n
ln += N - (int(n / 2))
else:
# assign the rows
val_n = int((n + 1) / 2)
spiral_matrix[int((n - 1) / 2), :int((n + 1) / 2)] = last_num + np.arange(val_n)
last_num += val_n
qn = (val_n - 1) * N # int(val_n*(val_n-1)/2)
inds = np.arange(qn, qn + val_n)
spiral_line_tril[ln:ln + val_n] = inds
ln += val_n
return spiral_matrix[np.diag_indices(N)], spiral_matrix[np.tril_indices(N, -1)], spiral_matrix[
np.tril_indices(N)], spiral_matrix, spiral_line_tril
def fromMat2diag_udiag(mat):
'''
Given a matrix returns the diagonal and the strictly lower triangular part of Cholesky(mat).
The strict lower matrix returned is normalized per the diagonal elements corresponding.
'''
N = mat.shape[0]
cholmat = np.linalg.cholesky(mat)
choldiag = np.diag(cholmat)
normmat = np.tile(np.reshape(choldiag, [1, N]), [N, 1])
choludiag = (cholmat / normmat)[np.tril_indices(N, -1)]
return choldiag, choludiag
def arctanh(x):
'''
returns arctanh(x), doesn't check for nans.
'''
ret = 0.5 * np.log((1 + x) / (1 - x))
if (np.sum(np.isnan(ret)) > 0):
|
return ret
class indexes_librarian:
'''
A single class that collects different set of indexes, useful to gather ndarrays.
'''
def __init__(self, N):
self.spiral_diag, self.spiral_udiag, self.spiral_tril, self.spiral_matrix, self.spiral_line = spiral_indexes(N)
self.diag = np.diag_indices(N)
self.udiag = np.tril_indices(N, -1)
self.tril = np.tril_indices(N)
def from_daily2_monthly(y, log_returns=False):
'''
Transform the pandas Dataframe with time-index to a montly series. log_returns parameter controls if log-returns must be computed.
'''
ind_dates = np.zeros(y.shape[0], dtype=np.int)
last_date = None
jj = 0
for ii in range(y.index.shape[0]):
date_ii = parser.parse(y.index[ii])
if ii == 0 or not date_ii.month == last_date.month:
ind_dates[jj] = ii
jj += 1
last_date = date_ii
ind_dates = ind_dates[:jj]
ret = y.iloc[ind_dates, :].values
if log_returns:
ret = np.log(ret[1:, :]) - np.log(ret[:-1, :])
ret = pd.DataFrame(ret, y.index[ind_dates[1:]])
return ret
def init_BetaPdfLowVariance_fromPoint(x, b=10.0, _min_a=1e-1):
'''
Given x, a ndarray of observed vaues from different Beta distributions, returns a pair of parameters a,b that corresponds to Beta distributions with expected value equal to x and variance controlled by b (bigger the b, lower the variance).
'''
xb = np.ones(x.shape, dtype=np.float32) * b
xa = xb * (x) / (1.0 - x)
if isinstance(x, np.float):
if xa / xb < _min_a:
xa = _min_a * xb
else:
under_min = xa / xb < _min_a
xa[under_min] = _min_a * xb[under_min]
return xa, xb
def init_GammaPdfLowVariance_fromPoint(x, b=10.0):
'''
Given x, a ndarray of observed vaues from different Gamma distributions, returns a pair of parameters a,b that corresponds to Gamma distributions with expected value equal to x and variance controlled by b (bigger the b, lower the variance).
'''
xb = np.ones(x.shape, dtype=np.float32) * b
xa = xb * x
return xa, xb
def view_stats(x, axis=None):
if axis is None:
print(f'min: {np.min(x)}\nmean: {np.mean(x)}\nmax: {np.max(x)}\nstd: {np.std(x)}')
else:
print(
f'min: {np.min(x, axis=axis)}\nmean: {np.mean(x, axis=axis)}\nmax: {np.max(x, axis=axis)}\nstd: {np.std(x, axis=axis)}')
def matrix2line_diagFunc(M, inv_func=inv_softplus):
'''
Takes a matrix and extract a line with the coefficient of the Cholesky decomposition.
The order is spyral, so this function is the numpy inverse of tensorflow.fill_triangular.
:param M: The matrix
:type M: np.array
:param inv_func: The function to apply to the coefficients on the diagonal
:type inv_func: tf.function with domain positive number and codomain the real line
:return: A line with the coefficients.
:rtype: np.array
'''
N = M.shape[0]
assert N == M.shape[1]
ind = indexes_librarian(N)
_cholvcv = np.linalg.cholesky(M)
diag_cholvcv = _cholvcv[ind.diag]
_cholvcv[ind.diag] = inv_func(diag_cholvcv)
ret = _cholvcv[ind.tril]
ret[ind.spiral_diag] = _cholvcv[ind.diag]
ret[ind.spiral_udiag] = _cholvcv[ind.udiag]
return ret
def matrix2line_diagFunc_timeseries(M, inv_func=inv_softplus):
'''
Takes a matrix and extract a line with the coefficient of the Cholesky decomposition.
The order is spyral, so this function is the numpy inverse of tensorflow.fill_triangular.
:param M: The matrix
:type M: np.array
:param inv_func: The function to apply to the coefficients on the diagonal
:type inv_func: tf.function with domain positive number and codomain the real line
:return: A line with the coefficients.
:rtype: np.array
'''
t, n, _ = M.shape
assert n == _
ind = indexes_librarian(n)
_cholvcv = np.linalg.cholesky(M)
diag_cholvcv = _cholvcv[:, ind.diag[0], ind.diag[1]]
_cholvcv[:, ind.diag[0], ind.diag[1]] = inv_func(diag_cholvcv)
ret = _cholvcv[:, ind.tril[0], ind.tril[1]]
ret[:, ind.spiral_diag] = _cholvcv[:, ind.diag[0], ind.diag[1]]
ret[:, ind.spiral_udiag] = _cholvcv[:, ind.udiag[0], ind.udiag[1]]
return ret
def decorator_multilevel(f):
"""Decorator the apply hierarchically the decorated function to every element in:
-dictionaries
-list
-single elements"""
def f_decorated(x):
if type(x) == dict:
fx = {k: f_decorated(x[k]) for k in x}
elif (type(x) == list) | (type(x) == tuple):
fx = [f_decorated(k) for k in x]
else:
fx = f(x)
return fx
return f_decorated
def format_number(x):
return np.round(x, 2)
def runtime_print(f):
"""Decorate a function to print its runtime"""
def decorated_fun(*args, **kwargs):
t0 = datetime.now()
ret = f(*args, **kwargs)
t1 = datetime.now()
print(f'Runtime: {t1 - t0}')
return ret
return decorated_fun
def print_formatted_values(**kwargs):
"""Print all the values with ',' as a separator."""
string = ', '.join([f'{k}: {format_number(kwargs[k])}' for k in kwargs])
print(string)
if __name__ == '__main__':
M = np.repeat(np.eye(3)[np.newaxis, :, :], 5, axis=0)
print(M)
M_line = matrix2line_diagFunc_timeseries(M)
print(M_line)
| print(x)
ret[np.isnan(ret)] = 0.0 | conditional_block |
utils.py | import numpy as np
from dateutil import parser
import pandas as pd
from datetime import datetime
def equal_interval_creation(
particles,
fields,
time_intervals,
time_horizon,
time_passed):
"""Instrumental function for inference evaluation and pllotting in the artificial experiment.
Create subsets of particles with growing length,
assigning a time-passed variable based splitting this time on the overall samples set.
Return a list of different samples subsets."""
ret = []
time_ratio = time_horizon / time_passed
for i in range(1, time_intervals + 1):
w = {}
for key in particles.keys():
if (key in fields) | (fields is None):
x = particles[key]
# N0 = int(x.shape[0] * time_ratio * ((i - 1) / time_intervals))
N1 = int(x.shape[0] * time_ratio * (i / time_intervals))
N0 = int(N1 * 0.1)
w[key] = x[N0:N1]
t_i = time_passed * (N1 / x.shape[0])
w = {'particles': w}
w['time'] = t_i
w['epoch'] = i
ret.append(w)
return ret
def asymmetric_mix_dict(d1, d2):
"""Merge the keys of d2 into d1. Common keys are overrided."""
ret = d1
for k in d2:
ret[k] = d2[k]
return ret
def concat_dict(d1, d2):
"""Apply np.concatenate to every key in common between two dictionaries."""
if d1 is None:
return d2
if d2 is None:
return d1
else:
assert set(d1.keys()) == set(d2.keys())
return {k: np.concatenate([d1[k], d2[k]], axis=0) for k in d1}
def dict_tf2numpy(self):
'''
Transform a dictionary of list of tensor into a dict or list of numpy objects.
The single objects transformations happens through .numpy method.
:param self: Any dict or list of objects with a .numpy method
:type self: dict or list
:return: dict or list of objects of class numpy
:rtype: dict or list
'''
ret = {}
for k in self:
x = self[k]
if type(x) == dict:
ret[k] = {v: dict_tf2numpy(x[v]) for v in x}
elif type(x) == list:
ret[k] = [dict_tf2numpy(v) for v in x]
else:
ret[k] = x.numpy()
return ret
def inv_softplus(x, _limit_upper=30, _limit_lower=1e-12):
'''
Returns y (float32), s.t. softplus(y)=x
'''
if isinstance(x, np.float) or isinstance(x, np.int):
if x < _limit_upper:
ret = np.log(np.exp(x) - 1)
else:
ret = x
else:
ret = np.zeros(x.shape, dtype=np.float32)
under_limit = x < _limit_upper
over_limit = np.logical_not(under_limit)
ret[under_limit] = np.float32(np.log(np.exp(x[under_limit]) - 1 + _limit_lower))
ret[over_limit] = x[over_limit]
return ret
def safe_softplus(x, limit=10):
"""Softplus function correction to avoid numeric overflow."""
ret = x
_under_limit = x < limit
ret[_under_limit] = np.log(1.0 + np.exp(x[_under_limit]))
return ret
def lagify(y, p):
'''
Taken time series y (vertical), returns columns with the last p lags of y.
Returns both y and ylag, aligned so that ylag sees just until yesterday.
'''
T, N = y.shape
ylag = np.ones([T, N * p + 1])
for pp in range(p):
ylag[pp + 1:T, N * pp + 1:pp * N + N + 1] = y[:T - pp - 1, :]
return np.float32(y[p:, :]), np.float32(ylag[p:, :])
def VAR_data_generation(T, N, par_p, cov_wn, const_terms):
'''
generates T x N data, with par_p VAR structure, cov_wn noise covariance and a vector of constant terms cont_terms.
'''
p = int(par_p.shape[0] / N)
eps = np.random.multivariate_normal(np.zeros(N), cov_wn, size=T)
y = np.zeros([T, N])
last_y = np.zeros([p, N])
ylag = np.zeros([T, N * p + 1])
for t in range(T):
ylag[t] = np.concatenate([np.ones([1, 1]), last_y.reshape(1, -1)], axis=1)
y[t, :] = const_terms + np.matmul(last_y.reshape(1, -1), par_p) + eps[t]
last_y[:p - 1] = last_y[1:]
last_y[p - 1] = y[t]
return y, ylag
def spiral_indexes(N):
'''
return the indexes of a line vector that corresponds to the elements of a triangular matrix.
spiral means that the elements in the matrix are inserted using a spiral sequence (as tensorflow.fill_triangular does).
'''
spiral_matrix = np.zeros([N, N], dtype=np.int)
spiral_line_tril = np.zeros(int(N * (N + 1) / 2), dtype=np.int)
last_num = 0
ln = 0
for n in range(N):
if (n % 2) == 0:
# assigns the inverted rows
val_n = N - int(n / 2)
spiral_matrix[N - 1 - int(n / 2), :N - int(n / 2)] = np.flip(last_num + np.arange(val_n))
# print(ln,ln+N-int(n/2))
qn = N ** 2 - int(n / 2) * N
inds = (np.arange(qn - N, qn - int(n / 2)))
spiral_line_tril[ln:ln + N - int(n / 2)] = np.flip(inds)
last_num += val_n
ln += N - (int(n / 2))
else:
# assign the rows
val_n = int((n + 1) / 2)
spiral_matrix[int((n - 1) / 2), :int((n + 1) / 2)] = last_num + np.arange(val_n)
last_num += val_n
qn = (val_n - 1) * N # int(val_n*(val_n-1)/2)
inds = np.arange(qn, qn + val_n)
spiral_line_tril[ln:ln + val_n] = inds
ln += val_n
return spiral_matrix[np.diag_indices(N)], spiral_matrix[np.tril_indices(N, -1)], spiral_matrix[
np.tril_indices(N)], spiral_matrix, spiral_line_tril
def fromMat2diag_udiag(mat):
'''
Given a matrix returns the diagonal and the strictly lower triangular part of Cholesky(mat).
The strict lower matrix returned is normalized per the diagonal elements corresponding.
'''
N = mat.shape[0]
cholmat = np.linalg.cholesky(mat)
choldiag = np.diag(cholmat)
normmat = np.tile(np.reshape(choldiag, [1, N]), [N, 1])
choludiag = (cholmat / normmat)[np.tril_indices(N, -1)]
return choldiag, choludiag
def arctanh(x):
'''
returns arctanh(x), doesn't check for nans.
'''
ret = 0.5 * np.log((1 + x) / (1 - x))
if (np.sum(np.isnan(ret)) > 0):
print(x)
ret[np.isnan(ret)] = 0.0
return ret
class | :
'''
A single class that collects different set of indexes, useful to gather ndarrays.
'''
def __init__(self, N):
self.spiral_diag, self.spiral_udiag, self.spiral_tril, self.spiral_matrix, self.spiral_line = spiral_indexes(N)
self.diag = np.diag_indices(N)
self.udiag = np.tril_indices(N, -1)
self.tril = np.tril_indices(N)
def from_daily2_monthly(y, log_returns=False):
'''
Transform the pandas Dataframe with time-index to a montly series. log_returns parameter controls if log-returns must be computed.
'''
ind_dates = np.zeros(y.shape[0], dtype=np.int)
last_date = None
jj = 0
for ii in range(y.index.shape[0]):
date_ii = parser.parse(y.index[ii])
if ii == 0 or not date_ii.month == last_date.month:
ind_dates[jj] = ii
jj += 1
last_date = date_ii
ind_dates = ind_dates[:jj]
ret = y.iloc[ind_dates, :].values
if log_returns:
ret = np.log(ret[1:, :]) - np.log(ret[:-1, :])
ret = pd.DataFrame(ret, y.index[ind_dates[1:]])
return ret
def init_BetaPdfLowVariance_fromPoint(x, b=10.0, _min_a=1e-1):
'''
Given x, a ndarray of observed vaues from different Beta distributions, returns a pair of parameters a,b that corresponds to Beta distributions with expected value equal to x and variance controlled by b (bigger the b, lower the variance).
'''
xb = np.ones(x.shape, dtype=np.float32) * b
xa = xb * (x) / (1.0 - x)
if isinstance(x, np.float):
if xa / xb < _min_a:
xa = _min_a * xb
else:
under_min = xa / xb < _min_a
xa[under_min] = _min_a * xb[under_min]
return xa, xb
def init_GammaPdfLowVariance_fromPoint(x, b=10.0):
'''
Given x, a ndarray of observed vaues from different Gamma distributions, returns a pair of parameters a,b that corresponds to Gamma distributions with expected value equal to x and variance controlled by b (bigger the b, lower the variance).
'''
xb = np.ones(x.shape, dtype=np.float32) * b
xa = xb * x
return xa, xb
def view_stats(x, axis=None):
if axis is None:
print(f'min: {np.min(x)}\nmean: {np.mean(x)}\nmax: {np.max(x)}\nstd: {np.std(x)}')
else:
print(
f'min: {np.min(x, axis=axis)}\nmean: {np.mean(x, axis=axis)}\nmax: {np.max(x, axis=axis)}\nstd: {np.std(x, axis=axis)}')
def matrix2line_diagFunc(M, inv_func=inv_softplus):
'''
Takes a matrix and extract a line with the coefficient of the Cholesky decomposition.
The order is spyral, so this function is the numpy inverse of tensorflow.fill_triangular.
:param M: The matrix
:type M: np.array
:param inv_func: The function to apply to the coefficients on the diagonal
:type inv_func: tf.function with domain positive number and codomain the real line
:return: A line with the coefficients.
:rtype: np.array
'''
N = M.shape[0]
assert N == M.shape[1]
ind = indexes_librarian(N)
_cholvcv = np.linalg.cholesky(M)
diag_cholvcv = _cholvcv[ind.diag]
_cholvcv[ind.diag] = inv_func(diag_cholvcv)
ret = _cholvcv[ind.tril]
ret[ind.spiral_diag] = _cholvcv[ind.diag]
ret[ind.spiral_udiag] = _cholvcv[ind.udiag]
return ret
def matrix2line_diagFunc_timeseries(M, inv_func=inv_softplus):
'''
Takes a matrix and extract a line with the coefficient of the Cholesky decomposition.
The order is spyral, so this function is the numpy inverse of tensorflow.fill_triangular.
:param M: The matrix
:type M: np.array
:param inv_func: The function to apply to the coefficients on the diagonal
:type inv_func: tf.function with domain positive number and codomain the real line
:return: A line with the coefficients.
:rtype: np.array
'''
t, n, _ = M.shape
assert n == _
ind = indexes_librarian(n)
_cholvcv = np.linalg.cholesky(M)
diag_cholvcv = _cholvcv[:, ind.diag[0], ind.diag[1]]
_cholvcv[:, ind.diag[0], ind.diag[1]] = inv_func(diag_cholvcv)
ret = _cholvcv[:, ind.tril[0], ind.tril[1]]
ret[:, ind.spiral_diag] = _cholvcv[:, ind.diag[0], ind.diag[1]]
ret[:, ind.spiral_udiag] = _cholvcv[:, ind.udiag[0], ind.udiag[1]]
return ret
def decorator_multilevel(f):
"""Decorator the apply hierarchically the decorated function to every element in:
-dictionaries
-list
-single elements"""
def f_decorated(x):
if type(x) == dict:
fx = {k: f_decorated(x[k]) for k in x}
elif (type(x) == list) | (type(x) == tuple):
fx = [f_decorated(k) for k in x]
else:
fx = f(x)
return fx
return f_decorated
def format_number(x):
return np.round(x, 2)
def runtime_print(f):
"""Decorate a function to print its runtime"""
def decorated_fun(*args, **kwargs):
t0 = datetime.now()
ret = f(*args, **kwargs)
t1 = datetime.now()
print(f'Runtime: {t1 - t0}')
return ret
return decorated_fun
def print_formatted_values(**kwargs):
"""Print all the values with ',' as a separator."""
string = ', '.join([f'{k}: {format_number(kwargs[k])}' for k in kwargs])
print(string)
if __name__ == '__main__':
M = np.repeat(np.eye(3)[np.newaxis, :, :], 5, axis=0)
print(M)
M_line = matrix2line_diagFunc_timeseries(M)
print(M_line)
| indexes_librarian | identifier_name |
utils.py | import numpy as np
from dateutil import parser
import pandas as pd
from datetime import datetime
def equal_interval_creation(
particles,
fields,
time_intervals,
time_horizon,
time_passed):
"""Instrumental function for inference evaluation and pllotting in the artificial experiment.
Create subsets of particles with growing length,
assigning a time-passed variable based splitting this time on the overall samples set.
Return a list of different samples subsets."""
ret = []
time_ratio = time_horizon / time_passed
for i in range(1, time_intervals + 1):
w = {}
for key in particles.keys():
if (key in fields) | (fields is None):
x = particles[key]
# N0 = int(x.shape[0] * time_ratio * ((i - 1) / time_intervals))
N1 = int(x.shape[0] * time_ratio * (i / time_intervals))
N0 = int(N1 * 0.1)
w[key] = x[N0:N1]
t_i = time_passed * (N1 / x.shape[0])
w = {'particles': w}
w['time'] = t_i
w['epoch'] = i
ret.append(w)
return ret
def asymmetric_mix_dict(d1, d2):
"""Merge the keys of d2 into d1. Common keys are overrided."""
ret = d1
for k in d2:
ret[k] = d2[k]
return ret
def concat_dict(d1, d2):
"""Apply np.concatenate to every key in common between two dictionaries."""
if d1 is None:
return d2
if d2 is None:
return d1
else:
assert set(d1.keys()) == set(d2.keys())
return {k: np.concatenate([d1[k], d2[k]], axis=0) for k in d1}
def dict_tf2numpy(self):
'''
Transform a dictionary of list of tensor into a dict or list of numpy objects.
The single objects transformations happens through .numpy method.
:param self: Any dict or list of objects with a .numpy method
:type self: dict or list
:return: dict or list of objects of class numpy
:rtype: dict or list
'''
ret = {}
for k in self:
x = self[k]
if type(x) == dict:
ret[k] = {v: dict_tf2numpy(x[v]) for v in x}
elif type(x) == list:
ret[k] = [dict_tf2numpy(v) for v in x]
else:
ret[k] = x.numpy()
return ret
def inv_softplus(x, _limit_upper=30, _limit_lower=1e-12):
'''
Returns y (float32), s.t. softplus(y)=x
'''
if isinstance(x, np.float) or isinstance(x, np.int):
if x < _limit_upper:
ret = np.log(np.exp(x) - 1)
else:
ret = x
else:
ret = np.zeros(x.shape, dtype=np.float32)
under_limit = x < _limit_upper
over_limit = np.logical_not(under_limit)
ret[under_limit] = np.float32(np.log(np.exp(x[under_limit]) - 1 + _limit_lower))
ret[over_limit] = x[over_limit]
return ret
def safe_softplus(x, limit=10):
"""Softplus function correction to avoid numeric overflow."""
ret = x
_under_limit = x < limit
ret[_under_limit] = np.log(1.0 + np.exp(x[_under_limit]))
return ret
def lagify(y, p):
'''
Taken time series y (vertical), returns columns with the last p lags of y.
Returns both y and ylag, aligned so that ylag sees just until yesterday.
'''
T, N = y.shape
ylag = np.ones([T, N * p + 1])
for pp in range(p):
ylag[pp + 1:T, N * pp + 1:pp * N + N + 1] = y[:T - pp - 1, :]
return np.float32(y[p:, :]), np.float32(ylag[p:, :])
def VAR_data_generation(T, N, par_p, cov_wn, const_terms):
|
def spiral_indexes(N):
'''
return the indexes of a line vector that corresponds to the elements of a triangular matrix.
spiral means that the elements in the matrix are inserted using a spiral sequence (as tensorflow.fill_triangular does).
'''
spiral_matrix = np.zeros([N, N], dtype=np.int)
spiral_line_tril = np.zeros(int(N * (N + 1) / 2), dtype=np.int)
last_num = 0
ln = 0
for n in range(N):
if (n % 2) == 0:
# assigns the inverted rows
val_n = N - int(n / 2)
spiral_matrix[N - 1 - int(n / 2), :N - int(n / 2)] = np.flip(last_num + np.arange(val_n))
# print(ln,ln+N-int(n/2))
qn = N ** 2 - int(n / 2) * N
inds = (np.arange(qn - N, qn - int(n / 2)))
spiral_line_tril[ln:ln + N - int(n / 2)] = np.flip(inds)
last_num += val_n
ln += N - (int(n / 2))
else:
# assign the rows
val_n = int((n + 1) / 2)
spiral_matrix[int((n - 1) / 2), :int((n + 1) / 2)] = last_num + np.arange(val_n)
last_num += val_n
qn = (val_n - 1) * N # int(val_n*(val_n-1)/2)
inds = np.arange(qn, qn + val_n)
spiral_line_tril[ln:ln + val_n] = inds
ln += val_n
return spiral_matrix[np.diag_indices(N)], spiral_matrix[np.tril_indices(N, -1)], spiral_matrix[
np.tril_indices(N)], spiral_matrix, spiral_line_tril
def fromMat2diag_udiag(mat):
'''
Given a matrix returns the diagonal and the strictly lower triangular part of Cholesky(mat).
The strict lower matrix returned is normalized per the diagonal elements corresponding.
'''
N = mat.shape[0]
cholmat = np.linalg.cholesky(mat)
choldiag = np.diag(cholmat)
normmat = np.tile(np.reshape(choldiag, [1, N]), [N, 1])
choludiag = (cholmat / normmat)[np.tril_indices(N, -1)]
return choldiag, choludiag
def arctanh(x):
'''
returns arctanh(x), doesn't check for nans.
'''
ret = 0.5 * np.log((1 + x) / (1 - x))
if (np.sum(np.isnan(ret)) > 0):
print(x)
ret[np.isnan(ret)] = 0.0
return ret
class indexes_librarian:
'''
A single class that collects different set of indexes, useful to gather ndarrays.
'''
def __init__(self, N):
self.spiral_diag, self.spiral_udiag, self.spiral_tril, self.spiral_matrix, self.spiral_line = spiral_indexes(N)
self.diag = np.diag_indices(N)
self.udiag = np.tril_indices(N, -1)
self.tril = np.tril_indices(N)
def from_daily2_monthly(y, log_returns=False):
'''
Transform the pandas Dataframe with time-index to a montly series. log_returns parameter controls if log-returns must be computed.
'''
ind_dates = np.zeros(y.shape[0], dtype=np.int)
last_date = None
jj = 0
for ii in range(y.index.shape[0]):
date_ii = parser.parse(y.index[ii])
if ii == 0 or not date_ii.month == last_date.month:
ind_dates[jj] = ii
jj += 1
last_date = date_ii
ind_dates = ind_dates[:jj]
ret = y.iloc[ind_dates, :].values
if log_returns:
ret = np.log(ret[1:, :]) - np.log(ret[:-1, :])
ret = pd.DataFrame(ret, y.index[ind_dates[1:]])
return ret
def init_BetaPdfLowVariance_fromPoint(x, b=10.0, _min_a=1e-1):
'''
Given x, a ndarray of observed vaues from different Beta distributions, returns a pair of parameters a,b that corresponds to Beta distributions with expected value equal to x and variance controlled by b (bigger the b, lower the variance).
'''
xb = np.ones(x.shape, dtype=np.float32) * b
xa = xb * (x) / (1.0 - x)
if isinstance(x, np.float):
if xa / xb < _min_a:
xa = _min_a * xb
else:
under_min = xa / xb < _min_a
xa[under_min] = _min_a * xb[under_min]
return xa, xb
def init_GammaPdfLowVariance_fromPoint(x, b=10.0):
'''
Given x, a ndarray of observed vaues from different Gamma distributions, returns a pair of parameters a,b that corresponds to Gamma distributions with expected value equal to x and variance controlled by b (bigger the b, lower the variance).
'''
xb = np.ones(x.shape, dtype=np.float32) * b
xa = xb * x
return xa, xb
def view_stats(x, axis=None):
if axis is None:
print(f'min: {np.min(x)}\nmean: {np.mean(x)}\nmax: {np.max(x)}\nstd: {np.std(x)}')
else:
print(
f'min: {np.min(x, axis=axis)}\nmean: {np.mean(x, axis=axis)}\nmax: {np.max(x, axis=axis)}\nstd: {np.std(x, axis=axis)}')
def matrix2line_diagFunc(M, inv_func=inv_softplus):
'''
Takes a matrix and extract a line with the coefficient of the Cholesky decomposition.
The order is spyral, so this function is the numpy inverse of tensorflow.fill_triangular.
:param M: The matrix
:type M: np.array
:param inv_func: The function to apply to the coefficients on the diagonal
:type inv_func: tf.function with domain positive number and codomain the real line
:return: A line with the coefficients.
:rtype: np.array
'''
N = M.shape[0]
assert N == M.shape[1]
ind = indexes_librarian(N)
_cholvcv = np.linalg.cholesky(M)
diag_cholvcv = _cholvcv[ind.diag]
_cholvcv[ind.diag] = inv_func(diag_cholvcv)
ret = _cholvcv[ind.tril]
ret[ind.spiral_diag] = _cholvcv[ind.diag]
ret[ind.spiral_udiag] = _cholvcv[ind.udiag]
return ret
def matrix2line_diagFunc_timeseries(M, inv_func=inv_softplus):
'''
Takes a matrix and extract a line with the coefficient of the Cholesky decomposition.
The order is spyral, so this function is the numpy inverse of tensorflow.fill_triangular.
:param M: The matrix
:type M: np.array
:param inv_func: The function to apply to the coefficients on the diagonal
:type inv_func: tf.function with domain positive number and codomain the real line
:return: A line with the coefficients.
:rtype: np.array
'''
t, n, _ = M.shape
assert n == _
ind = indexes_librarian(n)
_cholvcv = np.linalg.cholesky(M)
diag_cholvcv = _cholvcv[:, ind.diag[0], ind.diag[1]]
_cholvcv[:, ind.diag[0], ind.diag[1]] = inv_func(diag_cholvcv)
ret = _cholvcv[:, ind.tril[0], ind.tril[1]]
ret[:, ind.spiral_diag] = _cholvcv[:, ind.diag[0], ind.diag[1]]
ret[:, ind.spiral_udiag] = _cholvcv[:, ind.udiag[0], ind.udiag[1]]
return ret
def decorator_multilevel(f):
"""Decorator the apply hierarchically the decorated function to every element in:
-dictionaries
-list
-single elements"""
def f_decorated(x):
if type(x) == dict:
fx = {k: f_decorated(x[k]) for k in x}
elif (type(x) == list) | (type(x) == tuple):
fx = [f_decorated(k) for k in x]
else:
fx = f(x)
return fx
return f_decorated
def format_number(x):
return np.round(x, 2)
def runtime_print(f):
"""Decorate a function to print its runtime"""
def decorated_fun(*args, **kwargs):
t0 = datetime.now()
ret = f(*args, **kwargs)
t1 = datetime.now()
print(f'Runtime: {t1 - t0}')
return ret
return decorated_fun
def print_formatted_values(**kwargs):
"""Print all the values with ',' as a separator."""
string = ', '.join([f'{k}: {format_number(kwargs[k])}' for k in kwargs])
print(string)
if __name__ == '__main__':
M = np.repeat(np.eye(3)[np.newaxis, :, :], 5, axis=0)
print(M)
M_line = matrix2line_diagFunc_timeseries(M)
print(M_line)
| '''
generates T x N data, with par_p VAR structure, cov_wn noise covariance and a vector of constant terms cont_terms.
'''
p = int(par_p.shape[0] / N)
eps = np.random.multivariate_normal(np.zeros(N), cov_wn, size=T)
y = np.zeros([T, N])
last_y = np.zeros([p, N])
ylag = np.zeros([T, N * p + 1])
for t in range(T):
ylag[t] = np.concatenate([np.ones([1, 1]), last_y.reshape(1, -1)], axis=1)
y[t, :] = const_terms + np.matmul(last_y.reshape(1, -1), par_p) + eps[t]
last_y[:p - 1] = last_y[1:]
last_y[p - 1] = y[t]
return y, ylag | identifier_body |
main.py | import argparse
from pytorch_msssim import SSIM, MS_SSIM
import torch
import torchvision as tv
import torchvision.transforms as transforms
import torch.nn as nn
from torch.autograd import Variable
from torchvision.utils import save_image
from torch.utils.data import SubsetRandomSampler
from tensorboardX import SummaryWriter
from adv_pred_model import AdversaryModelPred
from AEs import Autoencoder, MinimalDecoder
from dataset_utils import ImageTensorFolder, TensorPredictionData
import os
import numpy as np
from shutil import copytree, copy2
from glob import glob
# from generate_ir import get_client_model
random_seed = 100
torch.manual_seed(random_seed)
np.random.seed(random_seed)
FAIRFACE_SPLIT_LAYER_INPUT_NC = {1: 64, 2: 64, 3: 64, 4: 64, 5: 128, 6:256, 7:512}
def apply_transform(batch_size, train_split, goal_data_dir, tensor_data_dir,
img_fmt, tns_fmt, task):
if task == 'gender':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_gender=True, tns_fmt=tns_fmt)
elif task == 'smile':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_smile=True, tns_fmt=tns_fmt)
elif task == 'race':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_race=True, tns_fmt=tns_fmt)
else:
trainTransform = transforms.Compose([transforms.ToTensor(),])
dataset = ImageTensorFolder(img_path=goal_data_dir, tensor_path=tensor_data_dir,
img_fmt=img_fmt, tns_fmt=tns_fmt, transform=trainTransform)
dataset_size = len(dataset)
indices = list(range(dataset_size))
split = int(np.floor(train_split * dataset_size))
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, test_indices = indices[:split], indices[split:]
train_sampler = SubsetRandomSampler(train_indices)
test_sampler = SubsetRandomSampler(test_indices)
trainloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False, num_workers=4,
sampler=train_sampler)
testloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False, num_workers=4,
sampler=test_sampler)
return trainloader, testloader
def denormalize(img, dataset="imagenet"):
|
def save_images(input_imgs, output_imgs, epoch, path, img_nums, offset=0, batch_size=64):
"""
"""
input_prefix = "inp_"
output_prefix = "out_"
out_folder = "{}/{}".format(path, epoch)
out_folder = os.path.abspath(out_folder)
if not os.path.isdir(out_folder):
os.makedirs(out_folder)
for img_idx in range(input_imgs.shape[0]):
inp_img_path = "{}/{}{}.jpg".format(out_folder, input_prefix, img_nums[img_idx]) # offset * batch_size + img_idx)
out_img_path = "{}/{}{}.jpg".format(out_folder, output_prefix, img_nums[img_idx]) # offset * batch_size + img_idx)
#inp_img = denormalize(input_imgs[img_idx])
#out_img = denormalize(output_imgs[img_idx])
save_image(input_imgs[img_idx], inp_img_path)
save_image(output_imgs[img_idx], out_img_path)
def copy_source_code(path):
if not os.path.isdir(path):
os.makedirs(path)
for file_ in glob(r'./*.py'):
copy2(file_, path)
copytree("clients/", path + "clients/")
def main(architecture, task, goal_data_dir, tensor_data_dir, img_fmt, tns_fmt,
loss_fn, train_split, batch_size, num_epochs, train_output_freq,
test_output_freq, split_layer, gpu_id):
device = torch.device('cuda:{}'.format(gpu_id)) if torch.cuda.is_available() else torch.device('cpu')
print("Using device as {}".format(device))
output_path = "./output/{}".format(architecture)
train_output_path = "{}/train".format(output_path)
test_output_path = "{}/test".format(output_path)
tensorboard_path = "{}/tensorboard/".format(output_path)
source_code_path = "{}/sourcecode/".format(output_path)
model_path = "{}/model.pt".format(output_path)
writer = SummaryWriter(logdir=tensorboard_path)
if task == 'gender' or task == 'smile' or task == 'race':
decoder = AdversaryModelPred(split_layer).to(device)
train_loss_fn = nn.CrossEntropyLoss()
else:
input_nc = FAIRFACE_SPLIT_LAYER_INPUT_NC[split_layer]
decoder = Autoencoder(input_nc=input_nc, output_nc=3, split_layer=split_layer).to(device)
#decoder = MinimalDecoder(input_nc=64, output_nc=3, input_dim=112, output_dim=224).to(device)
torch.save(decoder.state_dict(), model_path)
# decoder.load_state_dict(torch.load(model_path))
# copy_source_code(source_code_path)
if (loss_fn.lower() == 'mse'):
train_loss_fn = nn.MSELoss()
elif (loss_fn.lower() == 'ssim'):
ssim_value = SSIM(data_range=255, size_average=True, channel=3)
train_loss_fn = lambda x, y: 1 - ssim_value(x, y)
elif (loss_fn.lower() == 'ms_ssim'):
ssim_loss_fn = MS_SSIM(data_range=255, size_average=True, channel=3)
train_loss_fn = lambda x, y: 1 - ssim_value(x, y)
else:
raise ValueError("Loss function {} not recognized".format(loss_fn.lower()))
trainloader, testloader = apply_transform(batch_size, train_split,
goal_data_dir, tensor_data_dir,
img_fmt, tns_fmt, task)
optimizer = torch.optim.Adam(decoder.parameters(), lr=1e-3)
#client_model = get_client_model().to(device)
#for param in client_model.parameters():
# param.requires_grad = False
round_ = 0
for epoch in range(round_ * num_epochs, (round_ + 1) * num_epochs):
for num, data in enumerate(trainloader, 1):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race': # for gender 'img' is actually male/female label
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output = decoder(ir)
reconstruction_loss = train_loss_fn(output, img)
train_loss = reconstruction_loss
writer.add_scalar('loss/train', train_loss.item(), len(trainloader) * epoch + num)
writer.add_scalar('loss/train_loss/reconstruction', reconstruction_loss.item(), len(trainloader) * epoch + num)
optimizer.zero_grad()
train_loss.backward()
optimizer.step()
if (epoch + 1) % train_output_freq == 0 and task != 'gender' and task != 'smile' and task != 'race':
save_images(img, output, epoch, train_output_path, img_nums) # offset=0, batch_size=batch_size)
pred_correct = 0
total = 0
for num, data in enumerate(testloader, 1):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race':
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output = decoder(ir)
if task == 'gender' or task == 'smile' or task == 'race':
pred_correct += (output.argmax(dim=1) == img).sum().item()
total += int(ir.shape[0])
reconstruction_loss = train_loss_fn(output, img)
test_loss = reconstruction_loss
writer.add_scalar('loss/test', test_loss.item(), len(testloader) * epoch + num)
writer.add_scalar('loss/test_loss/reconstruction', reconstruction_loss.item(), len(testloader) * epoch + num)
pred_acc = pred_correct / total
writer.add_scalar("test/pred_accuracy", pred_acc, len(testloader) * epoch)
if (epoch + 1) % test_output_freq == 0 and task != 'gender' and task != 'smile' and task != 'race':
for num, data in enumerate(testloader):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race':
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output_imgs = decoder(ir)
save_images(img, output_imgs, epoch, test_output_path, img_nums) # offset=num, batch_size=batch_size)
for name, param in decoder.named_parameters():
writer.add_histogram("params/{}".format(name), param.clone().cpu().data.numpy(), epoch)
model_path = "{}/model_{}.pt".format(output_path, epoch)
torch.save(decoder.state_dict(), model_path)
print("epoch [{}/{}], train_loss {:.4f}, test_loss {:.4f}, pred_acc {:.4f}".format(epoch + 1,
num_epochs, train_loss.item(), test_loss.item(), pred_acc))
writer.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--architecture', type=str, default='pruning-network-fairface-reconstruction-split6-ratio0.3-1')
parser.add_argument('--split_layer', type=int, default=1)
parser.add_argument('--tensor_data_dir', type=str, default='/home/emizhang/chap/experiments/pruning_network_fairface_resnet18_scratch_split5_ratio0.9_data_2/challenge',
help='intermediate image data directory')
parser.add_argument('--goal_data_dir', type=str, default='/mas/camera/Datasets/Faces/fairface/val/',
help='training image data directory or data labels')
parser.add_argument('--task', type=str, default="data", help='choose between data, and gender (for celeba)')
parser.add_argument('--img_fmt', type=str, default='jpg', help='format of training images, one of png, jpg, jpeg, npy, pt')
parser.add_argument('--tns_fmt', type=str, default='pt', help='format of tensor data, one of png, jpg, jpeg, npy, pt')
parser.add_argument('--train_split', type=float, default=0.9, help='ratio of data to use for training')
parser.add_argument('--loss_fn', type=str, default='mse', help='loss function to use for training, one of mse, ssim, ms_ssim')
parser.add_argument('--batch_size', type=int, default=32, help='size of each image batch')
# parser.add_argument('--optimizer_pick', type=str, default="Adam", help='choose optimizer between Adam and SGD')
parser.add_argument('--num_epochs', type=int, default=500, help='maximum number of epochs')
parser.add_argument("--train_output_freq", type=int, default=10, help="interval between saving model weights")
parser.add_argument("--test_output_freq", type=int, default=50, help="interval between saving model weights")
parser.add_argument("--gpu_id", type=int, default=0, help="gpu id to use if training on cuda")
opt = parser.parse_args()
main(architecture=opt.architecture,
task=opt.task,
goal_data_dir=opt.goal_data_dir,
tensor_data_dir=opt.tensor_data_dir,
img_fmt=opt.img_fmt,
tns_fmt=opt.tns_fmt,
loss_fn=opt.loss_fn,
train_split=opt.train_split,
batch_size=opt.batch_size,
num_epochs=opt.num_epochs,
train_output_freq=opt.train_output_freq,
test_output_freq=opt.test_output_freq,
split_layer=opt.split_layer,
gpu_id=opt.gpu_id)
| """
data is normalized with mu and sigma, this function puts it back
"""
if dataset == "cifar10":
c_std = [0.247, 0.243, 0.261]
c_mean = [0.4914, 0.4822, 0.4466]
elif dataset == "imagenet":
c_std = [0.229, 0.224, 0.225]
c_mean = [0.485, 0.456, 0.406]
for i in [0, 1, 2]:
img[i] = img[i] * c_std[i] + c_mean[i]
return img | identifier_body |
main.py | import argparse
from pytorch_msssim import SSIM, MS_SSIM
import torch
import torchvision as tv
import torchvision.transforms as transforms
import torch.nn as nn
from torch.autograd import Variable
from torchvision.utils import save_image
from torch.utils.data import SubsetRandomSampler
from tensorboardX import SummaryWriter
from adv_pred_model import AdversaryModelPred
from AEs import Autoencoder, MinimalDecoder
from dataset_utils import ImageTensorFolder, TensorPredictionData
import os
import numpy as np
from shutil import copytree, copy2
from glob import glob
# from generate_ir import get_client_model
random_seed = 100
torch.manual_seed(random_seed)
np.random.seed(random_seed)
FAIRFACE_SPLIT_LAYER_INPUT_NC = {1: 64, 2: 64, 3: 64, 4: 64, 5: 128, 6:256, 7:512}
def apply_transform(batch_size, train_split, goal_data_dir, tensor_data_dir,
img_fmt, tns_fmt, task):
if task == 'gender':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_gender=True, tns_fmt=tns_fmt)
elif task == 'smile':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_smile=True, tns_fmt=tns_fmt)
elif task == 'race':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_race=True, tns_fmt=tns_fmt)
else:
trainTransform = transforms.Compose([transforms.ToTensor(),])
dataset = ImageTensorFolder(img_path=goal_data_dir, tensor_path=tensor_data_dir,
img_fmt=img_fmt, tns_fmt=tns_fmt, transform=trainTransform)
dataset_size = len(dataset)
indices = list(range(dataset_size))
split = int(np.floor(train_split * dataset_size))
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, test_indices = indices[:split], indices[split:]
train_sampler = SubsetRandomSampler(train_indices)
test_sampler = SubsetRandomSampler(test_indices)
trainloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False, num_workers=4,
sampler=train_sampler)
testloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False, num_workers=4,
sampler=test_sampler)
return trainloader, testloader
def denormalize(img, dataset="imagenet"):
"""
data is normalized with mu and sigma, this function puts it back
"""
if dataset == "cifar10":
c_std = [0.247, 0.243, 0.261]
c_mean = [0.4914, 0.4822, 0.4466]
elif dataset == "imagenet":
c_std = [0.229, 0.224, 0.225]
c_mean = [0.485, 0.456, 0.406]
for i in [0, 1, 2]:
img[i] = img[i] * c_std[i] + c_mean[i]
return img
def save_images(input_imgs, output_imgs, epoch, path, img_nums, offset=0, batch_size=64):
"""
"""
input_prefix = "inp_"
output_prefix = "out_"
out_folder = "{}/{}".format(path, epoch)
out_folder = os.path.abspath(out_folder)
if not os.path.isdir(out_folder):
os.makedirs(out_folder)
for img_idx in range(input_imgs.shape[0]):
inp_img_path = "{}/{}{}.jpg".format(out_folder, input_prefix, img_nums[img_idx]) # offset * batch_size + img_idx)
out_img_path = "{}/{}{}.jpg".format(out_folder, output_prefix, img_nums[img_idx]) # offset * batch_size + img_idx)
#inp_img = denormalize(input_imgs[img_idx])
#out_img = denormalize(output_imgs[img_idx])
save_image(input_imgs[img_idx], inp_img_path)
save_image(output_imgs[img_idx], out_img_path)
def | (path):
if not os.path.isdir(path):
os.makedirs(path)
for file_ in glob(r'./*.py'):
copy2(file_, path)
copytree("clients/", path + "clients/")
def main(architecture, task, goal_data_dir, tensor_data_dir, img_fmt, tns_fmt,
loss_fn, train_split, batch_size, num_epochs, train_output_freq,
test_output_freq, split_layer, gpu_id):
device = torch.device('cuda:{}'.format(gpu_id)) if torch.cuda.is_available() else torch.device('cpu')
print("Using device as {}".format(device))
output_path = "./output/{}".format(architecture)
train_output_path = "{}/train".format(output_path)
test_output_path = "{}/test".format(output_path)
tensorboard_path = "{}/tensorboard/".format(output_path)
source_code_path = "{}/sourcecode/".format(output_path)
model_path = "{}/model.pt".format(output_path)
writer = SummaryWriter(logdir=tensorboard_path)
if task == 'gender' or task == 'smile' or task == 'race':
decoder = AdversaryModelPred(split_layer).to(device)
train_loss_fn = nn.CrossEntropyLoss()
else:
input_nc = FAIRFACE_SPLIT_LAYER_INPUT_NC[split_layer]
decoder = Autoencoder(input_nc=input_nc, output_nc=3, split_layer=split_layer).to(device)
#decoder = MinimalDecoder(input_nc=64, output_nc=3, input_dim=112, output_dim=224).to(device)
torch.save(decoder.state_dict(), model_path)
# decoder.load_state_dict(torch.load(model_path))
# copy_source_code(source_code_path)
if (loss_fn.lower() == 'mse'):
train_loss_fn = nn.MSELoss()
elif (loss_fn.lower() == 'ssim'):
ssim_value = SSIM(data_range=255, size_average=True, channel=3)
train_loss_fn = lambda x, y: 1 - ssim_value(x, y)
elif (loss_fn.lower() == 'ms_ssim'):
ssim_loss_fn = MS_SSIM(data_range=255, size_average=True, channel=3)
train_loss_fn = lambda x, y: 1 - ssim_value(x, y)
else:
raise ValueError("Loss function {} not recognized".format(loss_fn.lower()))
trainloader, testloader = apply_transform(batch_size, train_split,
goal_data_dir, tensor_data_dir,
img_fmt, tns_fmt, task)
optimizer = torch.optim.Adam(decoder.parameters(), lr=1e-3)
#client_model = get_client_model().to(device)
#for param in client_model.parameters():
# param.requires_grad = False
round_ = 0
for epoch in range(round_ * num_epochs, (round_ + 1) * num_epochs):
for num, data in enumerate(trainloader, 1):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race': # for gender 'img' is actually male/female label
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output = decoder(ir)
reconstruction_loss = train_loss_fn(output, img)
train_loss = reconstruction_loss
writer.add_scalar('loss/train', train_loss.item(), len(trainloader) * epoch + num)
writer.add_scalar('loss/train_loss/reconstruction', reconstruction_loss.item(), len(trainloader) * epoch + num)
optimizer.zero_grad()
train_loss.backward()
optimizer.step()
if (epoch + 1) % train_output_freq == 0 and task != 'gender' and task != 'smile' and task != 'race':
save_images(img, output, epoch, train_output_path, img_nums) # offset=0, batch_size=batch_size)
pred_correct = 0
total = 0
for num, data in enumerate(testloader, 1):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race':
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output = decoder(ir)
if task == 'gender' or task == 'smile' or task == 'race':
pred_correct += (output.argmax(dim=1) == img).sum().item()
total += int(ir.shape[0])
reconstruction_loss = train_loss_fn(output, img)
test_loss = reconstruction_loss
writer.add_scalar('loss/test', test_loss.item(), len(testloader) * epoch + num)
writer.add_scalar('loss/test_loss/reconstruction', reconstruction_loss.item(), len(testloader) * epoch + num)
pred_acc = pred_correct / total
writer.add_scalar("test/pred_accuracy", pred_acc, len(testloader) * epoch)
if (epoch + 1) % test_output_freq == 0 and task != 'gender' and task != 'smile' and task != 'race':
for num, data in enumerate(testloader):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race':
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output_imgs = decoder(ir)
save_images(img, output_imgs, epoch, test_output_path, img_nums) # offset=num, batch_size=batch_size)
for name, param in decoder.named_parameters():
writer.add_histogram("params/{}".format(name), param.clone().cpu().data.numpy(), epoch)
model_path = "{}/model_{}.pt".format(output_path, epoch)
torch.save(decoder.state_dict(), model_path)
print("epoch [{}/{}], train_loss {:.4f}, test_loss {:.4f}, pred_acc {:.4f}".format(epoch + 1,
num_epochs, train_loss.item(), test_loss.item(), pred_acc))
writer.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--architecture', type=str, default='pruning-network-fairface-reconstruction-split6-ratio0.3-1')
parser.add_argument('--split_layer', type=int, default=1)
parser.add_argument('--tensor_data_dir', type=str, default='/home/emizhang/chap/experiments/pruning_network_fairface_resnet18_scratch_split5_ratio0.9_data_2/challenge',
help='intermediate image data directory')
parser.add_argument('--goal_data_dir', type=str, default='/mas/camera/Datasets/Faces/fairface/val/',
help='training image data directory or data labels')
parser.add_argument('--task', type=str, default="data", help='choose between data, and gender (for celeba)')
parser.add_argument('--img_fmt', type=str, default='jpg', help='format of training images, one of png, jpg, jpeg, npy, pt')
parser.add_argument('--tns_fmt', type=str, default='pt', help='format of tensor data, one of png, jpg, jpeg, npy, pt')
parser.add_argument('--train_split', type=float, default=0.9, help='ratio of data to use for training')
parser.add_argument('--loss_fn', type=str, default='mse', help='loss function to use for training, one of mse, ssim, ms_ssim')
parser.add_argument('--batch_size', type=int, default=32, help='size of each image batch')
# parser.add_argument('--optimizer_pick', type=str, default="Adam", help='choose optimizer between Adam and SGD')
parser.add_argument('--num_epochs', type=int, default=500, help='maximum number of epochs')
parser.add_argument("--train_output_freq", type=int, default=10, help="interval between saving model weights")
parser.add_argument("--test_output_freq", type=int, default=50, help="interval between saving model weights")
parser.add_argument("--gpu_id", type=int, default=0, help="gpu id to use if training on cuda")
opt = parser.parse_args()
main(architecture=opt.architecture,
task=opt.task,
goal_data_dir=opt.goal_data_dir,
tensor_data_dir=opt.tensor_data_dir,
img_fmt=opt.img_fmt,
tns_fmt=opt.tns_fmt,
loss_fn=opt.loss_fn,
train_split=opt.train_split,
batch_size=opt.batch_size,
num_epochs=opt.num_epochs,
train_output_freq=opt.train_output_freq,
test_output_freq=opt.test_output_freq,
split_layer=opt.split_layer,
gpu_id=opt.gpu_id)
| copy_source_code | identifier_name |
main.py | import argparse
from pytorch_msssim import SSIM, MS_SSIM
import torch
import torchvision as tv
import torchvision.transforms as transforms
import torch.nn as nn
from torch.autograd import Variable
from torchvision.utils import save_image
from torch.utils.data import SubsetRandomSampler
from tensorboardX import SummaryWriter
from adv_pred_model import AdversaryModelPred
from AEs import Autoencoder, MinimalDecoder
from dataset_utils import ImageTensorFolder, TensorPredictionData
import os
import numpy as np
from shutil import copytree, copy2
from glob import glob
# from generate_ir import get_client_model
random_seed = 100
torch.manual_seed(random_seed)
np.random.seed(random_seed)
FAIRFACE_SPLIT_LAYER_INPUT_NC = {1: 64, 2: 64, 3: 64, 4: 64, 5: 128, 6:256, 7:512}
def apply_transform(batch_size, train_split, goal_data_dir, tensor_data_dir,
img_fmt, tns_fmt, task):
if task == 'gender':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_gender=True, tns_fmt=tns_fmt)
elif task == 'smile':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_smile=True, tns_fmt=tns_fmt)
elif task == 'race':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_race=True, tns_fmt=tns_fmt)
else:
trainTransform = transforms.Compose([transforms.ToTensor(),])
dataset = ImageTensorFolder(img_path=goal_data_dir, tensor_path=tensor_data_dir,
img_fmt=img_fmt, tns_fmt=tns_fmt, transform=trainTransform)
dataset_size = len(dataset)
indices = list(range(dataset_size))
split = int(np.floor(train_split * dataset_size))
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, test_indices = indices[:split], indices[split:]
train_sampler = SubsetRandomSampler(train_indices)
test_sampler = SubsetRandomSampler(test_indices)
trainloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False, num_workers=4,
sampler=train_sampler)
testloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False, num_workers=4,
sampler=test_sampler)
return trainloader, testloader
def denormalize(img, dataset="imagenet"):
"""
data is normalized with mu and sigma, this function puts it back
"""
if dataset == "cifar10":
c_std = [0.247, 0.243, 0.261]
c_mean = [0.4914, 0.4822, 0.4466]
elif dataset == "imagenet":
c_std = [0.229, 0.224, 0.225]
c_mean = [0.485, 0.456, 0.406]
for i in [0, 1, 2]:
img[i] = img[i] * c_std[i] + c_mean[i]
return img
def save_images(input_imgs, output_imgs, epoch, path, img_nums, offset=0, batch_size=64):
"""
"""
input_prefix = "inp_"
output_prefix = "out_"
out_folder = "{}/{}".format(path, epoch)
out_folder = os.path.abspath(out_folder)
if not os.path.isdir(out_folder):
os.makedirs(out_folder)
for img_idx in range(input_imgs.shape[0]):
inp_img_path = "{}/{}{}.jpg".format(out_folder, input_prefix, img_nums[img_idx]) # offset * batch_size + img_idx)
out_img_path = "{}/{}{}.jpg".format(out_folder, output_prefix, img_nums[img_idx]) # offset * batch_size + img_idx)
#inp_img = denormalize(input_imgs[img_idx])
#out_img = denormalize(output_imgs[img_idx])
save_image(input_imgs[img_idx], inp_img_path)
save_image(output_imgs[img_idx], out_img_path)
def copy_source_code(path):
if not os.path.isdir(path):
os.makedirs(path)
for file_ in glob(r'./*.py'):
copy2(file_, path)
copytree("clients/", path + "clients/")
def main(architecture, task, goal_data_dir, tensor_data_dir, img_fmt, tns_fmt,
loss_fn, train_split, batch_size, num_epochs, train_output_freq,
test_output_freq, split_layer, gpu_id):
device = torch.device('cuda:{}'.format(gpu_id)) if torch.cuda.is_available() else torch.device('cpu')
print("Using device as {}".format(device))
output_path = "./output/{}".format(architecture)
train_output_path = "{}/train".format(output_path)
test_output_path = "{}/test".format(output_path)
tensorboard_path = "{}/tensorboard/".format(output_path)
source_code_path = "{}/sourcecode/".format(output_path)
model_path = "{}/model.pt".format(output_path)
writer = SummaryWriter(logdir=tensorboard_path)
if task == 'gender' or task == 'smile' or task == 'race':
decoder = AdversaryModelPred(split_layer).to(device)
train_loss_fn = nn.CrossEntropyLoss()
else:
input_nc = FAIRFACE_SPLIT_LAYER_INPUT_NC[split_layer]
decoder = Autoencoder(input_nc=input_nc, output_nc=3, split_layer=split_layer).to(device)
#decoder = MinimalDecoder(input_nc=64, output_nc=3, input_dim=112, output_dim=224).to(device)
torch.save(decoder.state_dict(), model_path)
# decoder.load_state_dict(torch.load(model_path))
# copy_source_code(source_code_path)
if (loss_fn.lower() == 'mse'):
train_loss_fn = nn.MSELoss()
elif (loss_fn.lower() == 'ssim'):
ssim_value = SSIM(data_range=255, size_average=True, channel=3)
train_loss_fn = lambda x, y: 1 - ssim_value(x, y)
elif (loss_fn.lower() == 'ms_ssim'):
ssim_loss_fn = MS_SSIM(data_range=255, size_average=True, channel=3)
train_loss_fn = lambda x, y: 1 - ssim_value(x, y)
else:
raise ValueError("Loss function {} not recognized".format(loss_fn.lower()))
trainloader, testloader = apply_transform(batch_size, train_split,
goal_data_dir, tensor_data_dir,
img_fmt, tns_fmt, task)
optimizer = torch.optim.Adam(decoder.parameters(), lr=1e-3)
#client_model = get_client_model().to(device)
#for param in client_model.parameters():
# param.requires_grad = False
round_ = 0
for epoch in range(round_ * num_epochs, (round_ + 1) * num_epochs):
for num, data in enumerate(trainloader, 1):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race': # for gender 'img' is actually male/female label
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output = decoder(ir)
reconstruction_loss = train_loss_fn(output, img)
train_loss = reconstruction_loss
writer.add_scalar('loss/train', train_loss.item(), len(trainloader) * epoch + num)
writer.add_scalar('loss/train_loss/reconstruction', reconstruction_loss.item(), len(trainloader) * epoch + num)
optimizer.zero_grad()
train_loss.backward()
optimizer.step()
if (epoch + 1) % train_output_freq == 0 and task != 'gender' and task != 'smile' and task != 'race':
save_images(img, output, epoch, train_output_path, img_nums) # offset=0, batch_size=batch_size)
pred_correct = 0
total = 0
for num, data in enumerate(testloader, 1):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race':
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output = decoder(ir)
if task == 'gender' or task == 'smile' or task == 'race':
pred_correct += (output.argmax(dim=1) == img).sum().item()
total += int(ir.shape[0])
reconstruction_loss = train_loss_fn(output, img)
test_loss = reconstruction_loss
writer.add_scalar('loss/test', test_loss.item(), len(testloader) * epoch + num)
writer.add_scalar('loss/test_loss/reconstruction', reconstruction_loss.item(), len(testloader) * epoch + num)
pred_acc = pred_correct / total
writer.add_scalar("test/pred_accuracy", pred_acc, len(testloader) * epoch)
if (epoch + 1) % test_output_freq == 0 and task != 'gender' and task != 'smile' and task != 'race':
for num, data in enumerate(testloader):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race':
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output_imgs = decoder(ir)
save_images(img, output_imgs, epoch, test_output_path, img_nums) # offset=num, batch_size=batch_size)
for name, param in decoder.named_parameters():
writer.add_histogram("params/{}".format(name), param.clone().cpu().data.numpy(), epoch)
model_path = "{}/model_{}.pt".format(output_path, epoch)
torch.save(decoder.state_dict(), model_path)
print("epoch [{}/{}], train_loss {:.4f}, test_loss {:.4f}, pred_acc {:.4f}".format(epoch + 1,
num_epochs, train_loss.item(), test_loss.item(), pred_acc))
writer.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--architecture', type=str, default='pruning-network-fairface-reconstruction-split6-ratio0.3-1')
parser.add_argument('--split_layer', type=int, default=1)
parser.add_argument('--tensor_data_dir', type=str, default='/home/emizhang/chap/experiments/pruning_network_fairface_resnet18_scratch_split5_ratio0.9_data_2/challenge',
help='intermediate image data directory')
parser.add_argument('--goal_data_dir', type=str, default='/mas/camera/Datasets/Faces/fairface/val/',
help='training image data directory or data labels')
parser.add_argument('--task', type=str, default="data", help='choose between data, and gender (for celeba)')
parser.add_argument('--img_fmt', type=str, default='jpg', help='format of training images, one of png, jpg, jpeg, npy, pt')
parser.add_argument('--tns_fmt', type=str, default='pt', help='format of tensor data, one of png, jpg, jpeg, npy, pt')
parser.add_argument('--train_split', type=float, default=0.9, help='ratio of data to use for training')
parser.add_argument('--loss_fn', type=str, default='mse', help='loss function to use for training, one of mse, ssim, ms_ssim')
| parser.add_argument('--num_epochs', type=int, default=500, help='maximum number of epochs')
parser.add_argument("--train_output_freq", type=int, default=10, help="interval between saving model weights")
parser.add_argument("--test_output_freq", type=int, default=50, help="interval between saving model weights")
parser.add_argument("--gpu_id", type=int, default=0, help="gpu id to use if training on cuda")
opt = parser.parse_args()
main(architecture=opt.architecture,
task=opt.task,
goal_data_dir=opt.goal_data_dir,
tensor_data_dir=opt.tensor_data_dir,
img_fmt=opt.img_fmt,
tns_fmt=opt.tns_fmt,
loss_fn=opt.loss_fn,
train_split=opt.train_split,
batch_size=opt.batch_size,
num_epochs=opt.num_epochs,
train_output_freq=opt.train_output_freq,
test_output_freq=opt.test_output_freq,
split_layer=opt.split_layer,
gpu_id=opt.gpu_id) | parser.add_argument('--batch_size', type=int, default=32, help='size of each image batch')
# parser.add_argument('--optimizer_pick', type=str, default="Adam", help='choose optimizer between Adam and SGD')
| random_line_split |
main.py | import argparse
from pytorch_msssim import SSIM, MS_SSIM
import torch
import torchvision as tv
import torchvision.transforms as transforms
import torch.nn as nn
from torch.autograd import Variable
from torchvision.utils import save_image
from torch.utils.data import SubsetRandomSampler
from tensorboardX import SummaryWriter
from adv_pred_model import AdversaryModelPred
from AEs import Autoencoder, MinimalDecoder
from dataset_utils import ImageTensorFolder, TensorPredictionData
import os
import numpy as np
from shutil import copytree, copy2
from glob import glob
# from generate_ir import get_client_model
random_seed = 100
torch.manual_seed(random_seed)
np.random.seed(random_seed)
FAIRFACE_SPLIT_LAYER_INPUT_NC = {1: 64, 2: 64, 3: 64, 4: 64, 5: 128, 6:256, 7:512}
def apply_transform(batch_size, train_split, goal_data_dir, tensor_data_dir,
img_fmt, tns_fmt, task):
if task == 'gender':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_gender=True, tns_fmt=tns_fmt)
elif task == 'smile':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_smile=True, tns_fmt=tns_fmt)
elif task == 'race':
dataset = TensorPredictionData(tensor_data_dir, goal_data_dir,
pred_race=True, tns_fmt=tns_fmt)
else:
|
dataset_size = len(dataset)
indices = list(range(dataset_size))
split = int(np.floor(train_split * dataset_size))
np.random.seed(random_seed)
np.random.shuffle(indices)
train_indices, test_indices = indices[:split], indices[split:]
train_sampler = SubsetRandomSampler(train_indices)
test_sampler = SubsetRandomSampler(test_indices)
trainloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False, num_workers=4,
sampler=train_sampler)
testloader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False, num_workers=4,
sampler=test_sampler)
return trainloader, testloader
def denormalize(img, dataset="imagenet"):
"""
data is normalized with mu and sigma, this function puts it back
"""
if dataset == "cifar10":
c_std = [0.247, 0.243, 0.261]
c_mean = [0.4914, 0.4822, 0.4466]
elif dataset == "imagenet":
c_std = [0.229, 0.224, 0.225]
c_mean = [0.485, 0.456, 0.406]
for i in [0, 1, 2]:
img[i] = img[i] * c_std[i] + c_mean[i]
return img
def save_images(input_imgs, output_imgs, epoch, path, img_nums, offset=0, batch_size=64):
"""
"""
input_prefix = "inp_"
output_prefix = "out_"
out_folder = "{}/{}".format(path, epoch)
out_folder = os.path.abspath(out_folder)
if not os.path.isdir(out_folder):
os.makedirs(out_folder)
for img_idx in range(input_imgs.shape[0]):
inp_img_path = "{}/{}{}.jpg".format(out_folder, input_prefix, img_nums[img_idx]) # offset * batch_size + img_idx)
out_img_path = "{}/{}{}.jpg".format(out_folder, output_prefix, img_nums[img_idx]) # offset * batch_size + img_idx)
#inp_img = denormalize(input_imgs[img_idx])
#out_img = denormalize(output_imgs[img_idx])
save_image(input_imgs[img_idx], inp_img_path)
save_image(output_imgs[img_idx], out_img_path)
def copy_source_code(path):
if not os.path.isdir(path):
os.makedirs(path)
for file_ in glob(r'./*.py'):
copy2(file_, path)
copytree("clients/", path + "clients/")
def main(architecture, task, goal_data_dir, tensor_data_dir, img_fmt, tns_fmt,
loss_fn, train_split, batch_size, num_epochs, train_output_freq,
test_output_freq, split_layer, gpu_id):
device = torch.device('cuda:{}'.format(gpu_id)) if torch.cuda.is_available() else torch.device('cpu')
print("Using device as {}".format(device))
output_path = "./output/{}".format(architecture)
train_output_path = "{}/train".format(output_path)
test_output_path = "{}/test".format(output_path)
tensorboard_path = "{}/tensorboard/".format(output_path)
source_code_path = "{}/sourcecode/".format(output_path)
model_path = "{}/model.pt".format(output_path)
writer = SummaryWriter(logdir=tensorboard_path)
if task == 'gender' or task == 'smile' or task == 'race':
decoder = AdversaryModelPred(split_layer).to(device)
train_loss_fn = nn.CrossEntropyLoss()
else:
input_nc = FAIRFACE_SPLIT_LAYER_INPUT_NC[split_layer]
decoder = Autoencoder(input_nc=input_nc, output_nc=3, split_layer=split_layer).to(device)
#decoder = MinimalDecoder(input_nc=64, output_nc=3, input_dim=112, output_dim=224).to(device)
torch.save(decoder.state_dict(), model_path)
# decoder.load_state_dict(torch.load(model_path))
# copy_source_code(source_code_path)
if (loss_fn.lower() == 'mse'):
train_loss_fn = nn.MSELoss()
elif (loss_fn.lower() == 'ssim'):
ssim_value = SSIM(data_range=255, size_average=True, channel=3)
train_loss_fn = lambda x, y: 1 - ssim_value(x, y)
elif (loss_fn.lower() == 'ms_ssim'):
ssim_loss_fn = MS_SSIM(data_range=255, size_average=True, channel=3)
train_loss_fn = lambda x, y: 1 - ssim_value(x, y)
else:
raise ValueError("Loss function {} not recognized".format(loss_fn.lower()))
trainloader, testloader = apply_transform(batch_size, train_split,
goal_data_dir, tensor_data_dir,
img_fmt, tns_fmt, task)
optimizer = torch.optim.Adam(decoder.parameters(), lr=1e-3)
#client_model = get_client_model().to(device)
#for param in client_model.parameters():
# param.requires_grad = False
round_ = 0
for epoch in range(round_ * num_epochs, (round_ + 1) * num_epochs):
for num, data in enumerate(trainloader, 1):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race': # for gender 'img' is actually male/female label
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output = decoder(ir)
reconstruction_loss = train_loss_fn(output, img)
train_loss = reconstruction_loss
writer.add_scalar('loss/train', train_loss.item(), len(trainloader) * epoch + num)
writer.add_scalar('loss/train_loss/reconstruction', reconstruction_loss.item(), len(trainloader) * epoch + num)
optimizer.zero_grad()
train_loss.backward()
optimizer.step()
if (epoch + 1) % train_output_freq == 0 and task != 'gender' and task != 'smile' and task != 'race':
save_images(img, output, epoch, train_output_path, img_nums) # offset=0, batch_size=batch_size)
pred_correct = 0
total = 0
for num, data in enumerate(testloader, 1):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race':
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output = decoder(ir)
if task == 'gender' or task == 'smile' or task == 'race':
pred_correct += (output.argmax(dim=1) == img).sum().item()
total += int(ir.shape[0])
reconstruction_loss = train_loss_fn(output, img)
test_loss = reconstruction_loss
writer.add_scalar('loss/test', test_loss.item(), len(testloader) * epoch + num)
writer.add_scalar('loss/test_loss/reconstruction', reconstruction_loss.item(), len(testloader) * epoch + num)
pred_acc = pred_correct / total
writer.add_scalar("test/pred_accuracy", pred_acc, len(testloader) * epoch)
if (epoch + 1) % test_output_freq == 0 and task != 'gender' and task != 'smile' and task != 'race':
for num, data in enumerate(testloader):
img, ir, img_nums = data
if task != 'gender' and task != 'smile' and task != 'race':
img, ir = img.type(torch.FloatTensor), ir.type(torch.FloatTensor)
img, ir = Variable(img).to(device), Variable(ir).to(device)
#ir = client_model(img)
output_imgs = decoder(ir)
save_images(img, output_imgs, epoch, test_output_path, img_nums) # offset=num, batch_size=batch_size)
for name, param in decoder.named_parameters():
writer.add_histogram("params/{}".format(name), param.clone().cpu().data.numpy(), epoch)
model_path = "{}/model_{}.pt".format(output_path, epoch)
torch.save(decoder.state_dict(), model_path)
print("epoch [{}/{}], train_loss {:.4f}, test_loss {:.4f}, pred_acc {:.4f}".format(epoch + 1,
num_epochs, train_loss.item(), test_loss.item(), pred_acc))
writer.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--architecture', type=str, default='pruning-network-fairface-reconstruction-split6-ratio0.3-1')
parser.add_argument('--split_layer', type=int, default=1)
parser.add_argument('--tensor_data_dir', type=str, default='/home/emizhang/chap/experiments/pruning_network_fairface_resnet18_scratch_split5_ratio0.9_data_2/challenge',
help='intermediate image data directory')
parser.add_argument('--goal_data_dir', type=str, default='/mas/camera/Datasets/Faces/fairface/val/',
help='training image data directory or data labels')
parser.add_argument('--task', type=str, default="data", help='choose between data, and gender (for celeba)')
parser.add_argument('--img_fmt', type=str, default='jpg', help='format of training images, one of png, jpg, jpeg, npy, pt')
parser.add_argument('--tns_fmt', type=str, default='pt', help='format of tensor data, one of png, jpg, jpeg, npy, pt')
parser.add_argument('--train_split', type=float, default=0.9, help='ratio of data to use for training')
parser.add_argument('--loss_fn', type=str, default='mse', help='loss function to use for training, one of mse, ssim, ms_ssim')
parser.add_argument('--batch_size', type=int, default=32, help='size of each image batch')
# parser.add_argument('--optimizer_pick', type=str, default="Adam", help='choose optimizer between Adam and SGD')
parser.add_argument('--num_epochs', type=int, default=500, help='maximum number of epochs')
parser.add_argument("--train_output_freq", type=int, default=10, help="interval between saving model weights")
parser.add_argument("--test_output_freq", type=int, default=50, help="interval between saving model weights")
parser.add_argument("--gpu_id", type=int, default=0, help="gpu id to use if training on cuda")
opt = parser.parse_args()
main(architecture=opt.architecture,
task=opt.task,
goal_data_dir=opt.goal_data_dir,
tensor_data_dir=opt.tensor_data_dir,
img_fmt=opt.img_fmt,
tns_fmt=opt.tns_fmt,
loss_fn=opt.loss_fn,
train_split=opt.train_split,
batch_size=opt.batch_size,
num_epochs=opt.num_epochs,
train_output_freq=opt.train_output_freq,
test_output_freq=opt.test_output_freq,
split_layer=opt.split_layer,
gpu_id=opt.gpu_id)
| trainTransform = transforms.Compose([transforms.ToTensor(),])
dataset = ImageTensorFolder(img_path=goal_data_dir, tensor_path=tensor_data_dir,
img_fmt=img_fmt, tns_fmt=tns_fmt, transform=trainTransform) | conditional_block |
rsa-fdh-vrf.rs | // This crate implements RSA-FDH-VRF based on section 4 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
// The ciphersuite is RSA-FDH-VRF-SHA256, suite string can be changed if other hash function is desired
// The step comments refer to the corresponding steps in the IETF pseudocode for comparison with hacspec
use hacspec_lib::*;
use hacspec_sha256::*;
use hacspec_rsa_pkcs1::*;
bytes!(IntByte, 1);
#[rustfmt::skip]
const ONE: IntByte = IntByte(secret_array!(U8, [0x01u8]));
#[rustfmt::skip]
const TWO: IntByte = IntByte(secret_array!(U8, [0x02u8]));
const SUITE_STRING: IntByte = ONE;
// Helper function used by prove and verify to compute mgf1 of alpha
// mgf_salt currently part of cipher suite, could be optional input
fn vrf_mgf1(n: RSAInt, alpha: &ByteSeq) -> ByteSeqResult {
let mgf_salt1 = i2osp(RSAInt::from_literal(BYTE_SIZE as u128), 4u32)?;
let mgf_salt2 = i2osp(n, BYTE_SIZE)?;
let mgf_salt = mgf_salt1.concat(&mgf_salt2);
let mgf_string = SUITE_STRING
.concat(&ONE)
.concat(&mgf_salt)
.concat(alpha);
let mgf = mgf1(&mgf_string, BYTE_SIZE as usize - 1usize)?;
ByteSeqResult::Ok(mgf)
}
// Based on section 4.1 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn prove(sk: SK, alpha: &ByteSeq) -> ByteSeqResult {
let (n, _d) = sk.clone();
// STEP 1 and 2
let em = vrf_mgf1(n, alpha)?;
// STEP 3
let m = os2ip(&em);
// STEP 4
let s = rsasp1(sk, m)?;
// STEP 5 and 6
i2osp(s, BYTE_SIZE) | // STEP 1 and 2
let hash_string = SUITE_STRING.concat(&TWO.concat(pi_string));
// STEP 3
ByteSeqResult::Ok(sha256(&hash_string).slice(0,32))
}
// Based on section 4.3 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn verify(pk: PK, alpha: &ByteSeq, pi_string: &ByteSeq) -> ByteSeqResult {
let (n, _e) = pk.clone();
// STEP 1
let s = os2ip(pi_string);
// STEP 2
let m = rsavp1(pk, s)?;
// STEP 3 and 4
let em_prime = vrf_mgf1(n, alpha)?;
// STEP 5
let m_prime = os2ip(&em_prime);
// STEP 6
if m == m_prime {
proof_to_hash(pi_string)
} else {
ByteSeqResult::Err(Error::VerificationFailed)
}
}
#[cfg(test)]
mod tests {
use super::*;
use num_bigint::{BigInt,Sign};
use glass_pumpkin::prime;
use quickcheck::*;
// RSA key generation
// Taken from https://asecuritysite.com/rust/rsa01/
fn modinv(a0: BigInt, m0: BigInt) -> BigInt {
if m0 == one() { return one() }
let (mut a, mut m, mut x0, mut inv) =
(a0, m0.clone(), zero(), one());
while a > one() {
inv -= (&a / &m) * &x0;
a = &a % &m;
std::mem::swap(&mut a, &mut m);
std::mem::swap(&mut x0, &mut inv)
}
if inv < zero() { inv += m0 }
inv
}
fn rsa_key_gen() -> Keyp {
let p = BigInt::from_biguint(Sign::Plus,
prime::new((BIT_SIZE / 2) as usize).unwrap());
let q = BigInt::from_biguint(Sign::Plus,
prime::new((BIT_SIZE / 2) as usize).unwrap());
let n = RSAInt::from(p.clone()* q.clone());
let e = BigInt::parse_bytes(b"65537", 10).unwrap();
let totient = (p - BigInt::one()) * (q - BigInt::one());
let d = modinv(e.clone(), totient.clone());
Keyp { n, d: RSAInt::from(d), e: RSAInt::from(e) }
}
// quickcheck generation
#[derive(Clone, Copy, Debug)]
struct Keyp {n: RSAInt, d: RSAInt, e: RSAInt}
#[derive(Clone, Copy, Debug)]
struct Wrapper(RSAInt);
impl Arbitrary for Wrapper {
fn arbitrary(g: &mut Gen) -> Wrapper {
const NUM_BYTES: u32 = 127;
let mut a: [u8; NUM_BYTES as usize] = [0; NUM_BYTES as usize];
for i in 0..NUM_BYTES as usize {
a[i] = u8::arbitrary(g);
}
Wrapper(RSAInt::from_byte_seq_be(&Seq::<U8>::from_public_slice(&a)))
}
}
impl Arbitrary for Keyp {
fn arbitrary(_g: &mut Gen) -> Keyp {
rsa_key_gen()
}
}
// quickcheck tests
const NUM_TESTS: u64 = 5;
#[test]
fn test_rsafdhvrf() {
fn rsafdhvrf(kp: Keyp, alpha: Wrapper) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &alpha).unwrap();
let beta = proof_to_hash(&pi).unwrap();
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
beta_prime == beta
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(rsafdhvrf as fn(Keyp, Wrapper) -> bool);
}
#[test]
fn test_neg_rsafdhvrf() {
fn neg_rsafdhvrf(kp: Keyp, fake: Keyp, alpha: Wrapper) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &alpha).unwrap();
match verify((fake.n, fake.e), &alpha, &pi) {
Ok(_beta_prime) => false,
Err(e) => matches!(e, Error::VerificationFailed
| Error::MessageTooLarge),
}
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(neg_rsafdhvrf as fn(Keyp, Keyp, Wrapper) -> bool);
}
#[test]
fn test_neg_alpha_rsafdhvrf() {
fn neg_alpha_rsafdhvrf(
kp: Keyp, alpha: Wrapper, fake_alpha: Wrapper
) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let fake_alpha = i2osp(fake_alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &fake_alpha).unwrap();
match verify((kp.n, kp.e), &alpha, &pi) {
Ok(_beta_prime) => false,
Err(e) => matches!(e, Error::VerificationFailed
| Error::MessageTooLarge),
}
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(neg_alpha_rsafdhvrf as
fn(Keyp, Wrapper, Wrapper) -> bool);
}
// Test vector generation
// Strings should be given in hexadecimal
fn generate_test_vector(
alpha: &str, kp: Keyp
) -> Result<(String, String), Error> {
let alpha = ByteSeq::from_hex(&alpha);
let pi = prove((kp.n, kp.d), &alpha).unwrap();
let beta = proof_to_hash(&pi).unwrap();
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
let n = i2osp(kp.n, BYTE_SIZE)?;
let d = i2osp(kp.d, BYTE_SIZE)?;
let e = i2osp(kp.e, BYTE_SIZE)?;
println!("n:\n{}", ByteSeq::to_hex(&n));
println!("d:\n{}", ByteSeq::to_hex(&d));
println!("e:\n{}", ByteSeq::to_hex(&e));
println!("alpha:\n{}", ByteSeq::to_hex(&alpha));
println!("pi:\n{}", ByteSeq::to_hex(&pi));
println!("beta:\n{}", ByteSeq::to_hex(&beta));
Result::Ok((ByteSeq::to_hex(&pi), ByteSeq::to_hex(&beta)))
}
// Run with cargo test test_vector -- --ignored --nocapture in this crate
#[test]
#[ignore]
fn test_vector() {
// Pass alpha in hexadecimal
let kp = rsa_key_gen();
// let kp = get_test_key();
assert!(!generate_test_vector("af82", kp).is_err());
}
fn get_test_key() -> Keyp {
let n = RSAInt::from_hex("64f70acdc41c0ee7cb4961760368e34889c058ad3c7e578e8e72ed0d2fd1c7cfbb8beffd107204d544919db9d2470669c969e178d4deb8393daec4584ca9f162805c9ba46e617d89d4ab6480b0873b1cb92cf7232c88f013931ffe30f8ddf2cddbff4402bcb721985d2bb2eee5382dd09210b5d1da6b6b8207fe3e526de54efb55b56cd52d97cd77df6315569d5b59823c85ad99c57ad2959ec7d12cdf0c3e66cc57eaa1e644da9b0ca69b0df43945b0bd88ac66903ec98fe0e770b683ca7a332e69cba9229115a5295273aeeb4af2662063a312cbb4b871323f71888fd39557a5f4610ea7a590b021d43e5a89b69de68c728ce147f2743e0b97a5b3eb0d6ab1");
let d = RSAInt::from_hex("39134e9033a488e8900ad3859b37d804519ae2864c04400ade8c2965a2fabc31ba9bc8f70e2ce67e895ca8053bd1dad6427e106ff626518e4a4859c670d0411ca5e3b438a80d84a23e0f05a99a2158514c7d16d8537cb5fadad8e3215c0e5c0bf3a9c210aa0dfc77dd73ae9b4e090c1d33f52e538b5dde508ba43626f2e906546773ba7401aa6b68ab1151da528336ddafc9a6f2995d89ec282bc555fe41e776216576c0aafb66ef00b718e6c62afd51faf82e7b5a1d430591465b2188fa286ce778eb6a1b346b58331c7820b4142fb808e59ec910aa9b6d340dea673ae7be2d9e1fa91494e40372bcfb92da5fe236dc93b30b0a59b20af8edf3a10e3ea6dfe1");
let e = RSAInt::from_hex("010001");
Keyp {n, d, e}
}
// Note that the test vectors have been generated using this code
#[test]
fn test_empty() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("");
let pi = ByteSeq::from_hex("406581e350c601d6d7518ac928f6753929c56a7480a4a3d011ed65e5f61ca033accd45c03cac2dddcd61b909cedd0df517a1bba4705c9d04a2a8c7d735d24bc3e59b263cc8c18d5f6e2712747d809df7868ac720f90ffd3d7c7b78f3d75f14a9755ea8138804806f4739429d1a313b3abaaf89ce97fbdf10bc01d66723b0b38ad5dc51c87e5f852e2c8fc923cf0f9c86bb7bf8ae808532fcb8a981338d5b13278e66e19915e41c6fbd09f1fce3300da422fbf46f706d1c79f298c740926e14069f83dae52a25bad684e420ad5fc8af3b02e0cf3f79782fb6e7e65abe5e1f6b4fe41f20339b2986fe39f7ce4ceb9c2490d5229e9bfda93150d6800880c411daae");
let beta = ByteSeq::from_hex("d065ca3be8716236e99f64139adf481090f0a0c839f86ffda3c4fad948166af0");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
#[test]
fn test_72() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("72");
let pi = ByteSeq::from_hex("3d396dc417bee1975ff63c4e8b43b9417be03a91d5eb47309790d74100271342d6dc11511333ec4bc42aea3e02640dc870665044e85085c3dea43eedeb266d9b2de3824aca18b8de3e4d198bde808d80a2a10f0f4bd73fbc7cc36da44cb68af3161b2264e737dcd2d669252abb29f275c971ff6b8234876b7d1ff3d4d05197fe563d6ae92685dccbbbb689b4837da42fe47433019d9bfc50001b11708bf9f656532febf674119c0d67e27714195722fd977e0fc35d7325b5fb3ecb54df53986e01a809d0e5ec442fdacc3d271e7ab5480b8eb18f25cd3baf6a47abc6bf027e8dedef911f2bec367fa5d65e106f314b64cc1d9534d4f26fa034035a43852be66a");
let beta = ByteSeq::from_hex("a229210b84f0bb43b296075f226dee433cf2727cd6c2e4871afdeb77414f6a47");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
#[test]
fn test_af82() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("af82");
let pi = ByteSeq::from_hex("57b07056abc6851330b21ae890fd43ea53b4435319748cf8dba82148ee381c11d21a8660a8714aa59abaac2b7d0141ac4e85b1113b144328eb11461a7f26086896036fc49579a58a2516cecd274946f8dd82fef31652dfe2e2b495966cd6193a1bd197ef6e3472f30bfe14827dd968ea3bf8310dc002a765a0d54b12c3c9627309800b74701a3f7d07a02db0a6ca3a639e60726059727313818a6b671bebe18f078713ced33e50acbfd1e661ec89c5e82b8e1e07f6293f45474aa57d084da46a2437932491d92a87b3393bb0ec62254a3eca19e1004756867839671f84f7a2378097f334832f4aa0442fc5f8637fb2220bb3f2dca247927f0d49ae1c1b2e7455");
let beta = ByteSeq::from_hex("ebc5582b6aaf23c424ec1c74e1b8250327c957967fa37566284dac8400e62032");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
} | }
// Based on section 4.2 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn proof_to_hash(pi_string: &ByteSeq) -> ByteSeqResult { | random_line_split |
rsa-fdh-vrf.rs | // This crate implements RSA-FDH-VRF based on section 4 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
// The ciphersuite is RSA-FDH-VRF-SHA256, suite string can be changed if other hash function is desired
// The step comments refer to the corresponding steps in the IETF pseudocode for comparison with hacspec
use hacspec_lib::*;
use hacspec_sha256::*;
use hacspec_rsa_pkcs1::*;
bytes!(IntByte, 1);
#[rustfmt::skip]
const ONE: IntByte = IntByte(secret_array!(U8, [0x01u8]));
#[rustfmt::skip]
const TWO: IntByte = IntByte(secret_array!(U8, [0x02u8]));
const SUITE_STRING: IntByte = ONE;
// Helper function used by prove and verify to compute mgf1 of alpha
// mgf_salt currently part of cipher suite, could be optional input
fn vrf_mgf1(n: RSAInt, alpha: &ByteSeq) -> ByteSeqResult {
let mgf_salt1 = i2osp(RSAInt::from_literal(BYTE_SIZE as u128), 4u32)?;
let mgf_salt2 = i2osp(n, BYTE_SIZE)?;
let mgf_salt = mgf_salt1.concat(&mgf_salt2);
let mgf_string = SUITE_STRING
.concat(&ONE)
.concat(&mgf_salt)
.concat(alpha);
let mgf = mgf1(&mgf_string, BYTE_SIZE as usize - 1usize)?;
ByteSeqResult::Ok(mgf)
}
// Based on section 4.1 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn | (sk: SK, alpha: &ByteSeq) -> ByteSeqResult {
let (n, _d) = sk.clone();
// STEP 1 and 2
let em = vrf_mgf1(n, alpha)?;
// STEP 3
let m = os2ip(&em);
// STEP 4
let s = rsasp1(sk, m)?;
// STEP 5 and 6
i2osp(s, BYTE_SIZE)
}
// Based on section 4.2 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn proof_to_hash(pi_string: &ByteSeq) -> ByteSeqResult {
// STEP 1 and 2
let hash_string = SUITE_STRING.concat(&TWO.concat(pi_string));
// STEP 3
ByteSeqResult::Ok(sha256(&hash_string).slice(0,32))
}
// Based on section 4.3 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn verify(pk: PK, alpha: &ByteSeq, pi_string: &ByteSeq) -> ByteSeqResult {
let (n, _e) = pk.clone();
// STEP 1
let s = os2ip(pi_string);
// STEP 2
let m = rsavp1(pk, s)?;
// STEP 3 and 4
let em_prime = vrf_mgf1(n, alpha)?;
// STEP 5
let m_prime = os2ip(&em_prime);
// STEP 6
if m == m_prime {
proof_to_hash(pi_string)
} else {
ByteSeqResult::Err(Error::VerificationFailed)
}
}
#[cfg(test)]
mod tests {
use super::*;
use num_bigint::{BigInt,Sign};
use glass_pumpkin::prime;
use quickcheck::*;
// RSA key generation
// Taken from https://asecuritysite.com/rust/rsa01/
fn modinv(a0: BigInt, m0: BigInt) -> BigInt {
if m0 == one() { return one() }
let (mut a, mut m, mut x0, mut inv) =
(a0, m0.clone(), zero(), one());
while a > one() {
inv -= (&a / &m) * &x0;
a = &a % &m;
std::mem::swap(&mut a, &mut m);
std::mem::swap(&mut x0, &mut inv)
}
if inv < zero() { inv += m0 }
inv
}
fn rsa_key_gen() -> Keyp {
let p = BigInt::from_biguint(Sign::Plus,
prime::new((BIT_SIZE / 2) as usize).unwrap());
let q = BigInt::from_biguint(Sign::Plus,
prime::new((BIT_SIZE / 2) as usize).unwrap());
let n = RSAInt::from(p.clone()* q.clone());
let e = BigInt::parse_bytes(b"65537", 10).unwrap();
let totient = (p - BigInt::one()) * (q - BigInt::one());
let d = modinv(e.clone(), totient.clone());
Keyp { n, d: RSAInt::from(d), e: RSAInt::from(e) }
}
// quickcheck generation
#[derive(Clone, Copy, Debug)]
struct Keyp {n: RSAInt, d: RSAInt, e: RSAInt}
#[derive(Clone, Copy, Debug)]
struct Wrapper(RSAInt);
impl Arbitrary for Wrapper {
fn arbitrary(g: &mut Gen) -> Wrapper {
const NUM_BYTES: u32 = 127;
let mut a: [u8; NUM_BYTES as usize] = [0; NUM_BYTES as usize];
for i in 0..NUM_BYTES as usize {
a[i] = u8::arbitrary(g);
}
Wrapper(RSAInt::from_byte_seq_be(&Seq::<U8>::from_public_slice(&a)))
}
}
impl Arbitrary for Keyp {
fn arbitrary(_g: &mut Gen) -> Keyp {
rsa_key_gen()
}
}
// quickcheck tests
const NUM_TESTS: u64 = 5;
#[test]
fn test_rsafdhvrf() {
fn rsafdhvrf(kp: Keyp, alpha: Wrapper) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &alpha).unwrap();
let beta = proof_to_hash(&pi).unwrap();
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
beta_prime == beta
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(rsafdhvrf as fn(Keyp, Wrapper) -> bool);
}
#[test]
fn test_neg_rsafdhvrf() {
fn neg_rsafdhvrf(kp: Keyp, fake: Keyp, alpha: Wrapper) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &alpha).unwrap();
match verify((fake.n, fake.e), &alpha, &pi) {
Ok(_beta_prime) => false,
Err(e) => matches!(e, Error::VerificationFailed
| Error::MessageTooLarge),
}
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(neg_rsafdhvrf as fn(Keyp, Keyp, Wrapper) -> bool);
}
#[test]
fn test_neg_alpha_rsafdhvrf() {
fn neg_alpha_rsafdhvrf(
kp: Keyp, alpha: Wrapper, fake_alpha: Wrapper
) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let fake_alpha = i2osp(fake_alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &fake_alpha).unwrap();
match verify((kp.n, kp.e), &alpha, &pi) {
Ok(_beta_prime) => false,
Err(e) => matches!(e, Error::VerificationFailed
| Error::MessageTooLarge),
}
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(neg_alpha_rsafdhvrf as
fn(Keyp, Wrapper, Wrapper) -> bool);
}
// Test vector generation
// Strings should be given in hexadecimal
fn generate_test_vector(
alpha: &str, kp: Keyp
) -> Result<(String, String), Error> {
let alpha = ByteSeq::from_hex(&alpha);
let pi = prove((kp.n, kp.d), &alpha).unwrap();
let beta = proof_to_hash(&pi).unwrap();
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
let n = i2osp(kp.n, BYTE_SIZE)?;
let d = i2osp(kp.d, BYTE_SIZE)?;
let e = i2osp(kp.e, BYTE_SIZE)?;
println!("n:\n{}", ByteSeq::to_hex(&n));
println!("d:\n{}", ByteSeq::to_hex(&d));
println!("e:\n{}", ByteSeq::to_hex(&e));
println!("alpha:\n{}", ByteSeq::to_hex(&alpha));
println!("pi:\n{}", ByteSeq::to_hex(&pi));
println!("beta:\n{}", ByteSeq::to_hex(&beta));
Result::Ok((ByteSeq::to_hex(&pi), ByteSeq::to_hex(&beta)))
}
// Run with cargo test test_vector -- --ignored --nocapture in this crate
#[test]
#[ignore]
fn test_vector() {
// Pass alpha in hexadecimal
let kp = rsa_key_gen();
// let kp = get_test_key();
assert!(!generate_test_vector("af82", kp).is_err());
}
fn get_test_key() -> Keyp {
let n = RSAInt::from_hex("64f70acdc41c0ee7cb4961760368e34889c058ad3c7e578e8e72ed0d2fd1c7cfbb8beffd107204d544919db9d2470669c969e178d4deb8393daec4584ca9f162805c9ba46e617d89d4ab6480b0873b1cb92cf7232c88f013931ffe30f8ddf2cddbff4402bcb721985d2bb2eee5382dd09210b5d1da6b6b8207fe3e526de54efb55b56cd52d97cd77df6315569d5b59823c85ad99c57ad2959ec7d12cdf0c3e66cc57eaa1e644da9b0ca69b0df43945b0bd88ac66903ec98fe0e770b683ca7a332e69cba9229115a5295273aeeb4af2662063a312cbb4b871323f71888fd39557a5f4610ea7a590b021d43e5a89b69de68c728ce147f2743e0b97a5b3eb0d6ab1");
let d = RSAInt::from_hex("39134e9033a488e8900ad3859b37d804519ae2864c04400ade8c2965a2fabc31ba9bc8f70e2ce67e895ca8053bd1dad6427e106ff626518e4a4859c670d0411ca5e3b438a80d84a23e0f05a99a2158514c7d16d8537cb5fadad8e3215c0e5c0bf3a9c210aa0dfc77dd73ae9b4e090c1d33f52e538b5dde508ba43626f2e906546773ba7401aa6b68ab1151da528336ddafc9a6f2995d89ec282bc555fe41e776216576c0aafb66ef00b718e6c62afd51faf82e7b5a1d430591465b2188fa286ce778eb6a1b346b58331c7820b4142fb808e59ec910aa9b6d340dea673ae7be2d9e1fa91494e40372bcfb92da5fe236dc93b30b0a59b20af8edf3a10e3ea6dfe1");
let e = RSAInt::from_hex("010001");
Keyp {n, d, e}
}
// Note that the test vectors have been generated using this code
#[test]
fn test_empty() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("");
let pi = ByteSeq::from_hex("406581e350c601d6d7518ac928f6753929c56a7480a4a3d011ed65e5f61ca033accd45c03cac2dddcd61b909cedd0df517a1bba4705c9d04a2a8c7d735d24bc3e59b263cc8c18d5f6e2712747d809df7868ac720f90ffd3d7c7b78f3d75f14a9755ea8138804806f4739429d1a313b3abaaf89ce97fbdf10bc01d66723b0b38ad5dc51c87e5f852e2c8fc923cf0f9c86bb7bf8ae808532fcb8a981338d5b13278e66e19915e41c6fbd09f1fce3300da422fbf46f706d1c79f298c740926e14069f83dae52a25bad684e420ad5fc8af3b02e0cf3f79782fb6e7e65abe5e1f6b4fe41f20339b2986fe39f7ce4ceb9c2490d5229e9bfda93150d6800880c411daae");
let beta = ByteSeq::from_hex("d065ca3be8716236e99f64139adf481090f0a0c839f86ffda3c4fad948166af0");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
#[test]
fn test_72() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("72");
let pi = ByteSeq::from_hex("3d396dc417bee1975ff63c4e8b43b9417be03a91d5eb47309790d74100271342d6dc11511333ec4bc42aea3e02640dc870665044e85085c3dea43eedeb266d9b2de3824aca18b8de3e4d198bde808d80a2a10f0f4bd73fbc7cc36da44cb68af3161b2264e737dcd2d669252abb29f275c971ff6b8234876b7d1ff3d4d05197fe563d6ae92685dccbbbb689b4837da42fe47433019d9bfc50001b11708bf9f656532febf674119c0d67e27714195722fd977e0fc35d7325b5fb3ecb54df53986e01a809d0e5ec442fdacc3d271e7ab5480b8eb18f25cd3baf6a47abc6bf027e8dedef911f2bec367fa5d65e106f314b64cc1d9534d4f26fa034035a43852be66a");
let beta = ByteSeq::from_hex("a229210b84f0bb43b296075f226dee433cf2727cd6c2e4871afdeb77414f6a47");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
#[test]
fn test_af82() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("af82");
let pi = ByteSeq::from_hex("57b07056abc6851330b21ae890fd43ea53b4435319748cf8dba82148ee381c11d21a8660a8714aa59abaac2b7d0141ac4e85b1113b144328eb11461a7f26086896036fc49579a58a2516cecd274946f8dd82fef31652dfe2e2b495966cd6193a1bd197ef6e3472f30bfe14827dd968ea3bf8310dc002a765a0d54b12c3c9627309800b74701a3f7d07a02db0a6ca3a639e60726059727313818a6b671bebe18f078713ced33e50acbfd1e661ec89c5e82b8e1e07f6293f45474aa57d084da46a2437932491d92a87b3393bb0ec62254a3eca19e1004756867839671f84f7a2378097f334832f4aa0442fc5f8637fb2220bb3f2dca247927f0d49ae1c1b2e7455");
let beta = ByteSeq::from_hex("ebc5582b6aaf23c424ec1c74e1b8250327c957967fa37566284dac8400e62032");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
}
| prove | identifier_name |
rsa-fdh-vrf.rs | // This crate implements RSA-FDH-VRF based on section 4 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
// The ciphersuite is RSA-FDH-VRF-SHA256, suite string can be changed if other hash function is desired
// The step comments refer to the corresponding steps in the IETF pseudocode for comparison with hacspec
use hacspec_lib::*;
use hacspec_sha256::*;
use hacspec_rsa_pkcs1::*;
bytes!(IntByte, 1);
#[rustfmt::skip]
const ONE: IntByte = IntByte(secret_array!(U8, [0x01u8]));
#[rustfmt::skip]
const TWO: IntByte = IntByte(secret_array!(U8, [0x02u8]));
const SUITE_STRING: IntByte = ONE;
// Helper function used by prove and verify to compute mgf1 of alpha
// mgf_salt currently part of cipher suite, could be optional input
fn vrf_mgf1(n: RSAInt, alpha: &ByteSeq) -> ByteSeqResult {
let mgf_salt1 = i2osp(RSAInt::from_literal(BYTE_SIZE as u128), 4u32)?;
let mgf_salt2 = i2osp(n, BYTE_SIZE)?;
let mgf_salt = mgf_salt1.concat(&mgf_salt2);
let mgf_string = SUITE_STRING
.concat(&ONE)
.concat(&mgf_salt)
.concat(alpha);
let mgf = mgf1(&mgf_string, BYTE_SIZE as usize - 1usize)?;
ByteSeqResult::Ok(mgf)
}
// Based on section 4.1 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn prove(sk: SK, alpha: &ByteSeq) -> ByteSeqResult {
let (n, _d) = sk.clone();
// STEP 1 and 2
let em = vrf_mgf1(n, alpha)?;
// STEP 3
let m = os2ip(&em);
// STEP 4
let s = rsasp1(sk, m)?;
// STEP 5 and 6
i2osp(s, BYTE_SIZE)
}
// Based on section 4.2 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn proof_to_hash(pi_string: &ByteSeq) -> ByteSeqResult |
// Based on section 4.3 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn verify(pk: PK, alpha: &ByteSeq, pi_string: &ByteSeq) -> ByteSeqResult {
let (n, _e) = pk.clone();
// STEP 1
let s = os2ip(pi_string);
// STEP 2
let m = rsavp1(pk, s)?;
// STEP 3 and 4
let em_prime = vrf_mgf1(n, alpha)?;
// STEP 5
let m_prime = os2ip(&em_prime);
// STEP 6
if m == m_prime {
proof_to_hash(pi_string)
} else {
ByteSeqResult::Err(Error::VerificationFailed)
}
}
#[cfg(test)]
mod tests {
use super::*;
use num_bigint::{BigInt,Sign};
use glass_pumpkin::prime;
use quickcheck::*;
// RSA key generation
// Taken from https://asecuritysite.com/rust/rsa01/
fn modinv(a0: BigInt, m0: BigInt) -> BigInt {
if m0 == one() { return one() }
let (mut a, mut m, mut x0, mut inv) =
(a0, m0.clone(), zero(), one());
while a > one() {
inv -= (&a / &m) * &x0;
a = &a % &m;
std::mem::swap(&mut a, &mut m);
std::mem::swap(&mut x0, &mut inv)
}
if inv < zero() { inv += m0 }
inv
}
fn rsa_key_gen() -> Keyp {
let p = BigInt::from_biguint(Sign::Plus,
prime::new((BIT_SIZE / 2) as usize).unwrap());
let q = BigInt::from_biguint(Sign::Plus,
prime::new((BIT_SIZE / 2) as usize).unwrap());
let n = RSAInt::from(p.clone()* q.clone());
let e = BigInt::parse_bytes(b"65537", 10).unwrap();
let totient = (p - BigInt::one()) * (q - BigInt::one());
let d = modinv(e.clone(), totient.clone());
Keyp { n, d: RSAInt::from(d), e: RSAInt::from(e) }
}
// quickcheck generation
#[derive(Clone, Copy, Debug)]
struct Keyp {n: RSAInt, d: RSAInt, e: RSAInt}
#[derive(Clone, Copy, Debug)]
struct Wrapper(RSAInt);
impl Arbitrary for Wrapper {
fn arbitrary(g: &mut Gen) -> Wrapper {
const NUM_BYTES: u32 = 127;
let mut a: [u8; NUM_BYTES as usize] = [0; NUM_BYTES as usize];
for i in 0..NUM_BYTES as usize {
a[i] = u8::arbitrary(g);
}
Wrapper(RSAInt::from_byte_seq_be(&Seq::<U8>::from_public_slice(&a)))
}
}
impl Arbitrary for Keyp {
fn arbitrary(_g: &mut Gen) -> Keyp {
rsa_key_gen()
}
}
// quickcheck tests
const NUM_TESTS: u64 = 5;
#[test]
fn test_rsafdhvrf() {
fn rsafdhvrf(kp: Keyp, alpha: Wrapper) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &alpha).unwrap();
let beta = proof_to_hash(&pi).unwrap();
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
beta_prime == beta
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(rsafdhvrf as fn(Keyp, Wrapper) -> bool);
}
#[test]
fn test_neg_rsafdhvrf() {
fn neg_rsafdhvrf(kp: Keyp, fake: Keyp, alpha: Wrapper) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &alpha).unwrap();
match verify((fake.n, fake.e), &alpha, &pi) {
Ok(_beta_prime) => false,
Err(e) => matches!(e, Error::VerificationFailed
| Error::MessageTooLarge),
}
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(neg_rsafdhvrf as fn(Keyp, Keyp, Wrapper) -> bool);
}
#[test]
fn test_neg_alpha_rsafdhvrf() {
fn neg_alpha_rsafdhvrf(
kp: Keyp, alpha: Wrapper, fake_alpha: Wrapper
) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let fake_alpha = i2osp(fake_alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &fake_alpha).unwrap();
match verify((kp.n, kp.e), &alpha, &pi) {
Ok(_beta_prime) => false,
Err(e) => matches!(e, Error::VerificationFailed
| Error::MessageTooLarge),
}
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(neg_alpha_rsafdhvrf as
fn(Keyp, Wrapper, Wrapper) -> bool);
}
// Test vector generation
// Strings should be given in hexadecimal
fn generate_test_vector(
alpha: &str, kp: Keyp
) -> Result<(String, String), Error> {
let alpha = ByteSeq::from_hex(&alpha);
let pi = prove((kp.n, kp.d), &alpha).unwrap();
let beta = proof_to_hash(&pi).unwrap();
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
let n = i2osp(kp.n, BYTE_SIZE)?;
let d = i2osp(kp.d, BYTE_SIZE)?;
let e = i2osp(kp.e, BYTE_SIZE)?;
println!("n:\n{}", ByteSeq::to_hex(&n));
println!("d:\n{}", ByteSeq::to_hex(&d));
println!("e:\n{}", ByteSeq::to_hex(&e));
println!("alpha:\n{}", ByteSeq::to_hex(&alpha));
println!("pi:\n{}", ByteSeq::to_hex(&pi));
println!("beta:\n{}", ByteSeq::to_hex(&beta));
Result::Ok((ByteSeq::to_hex(&pi), ByteSeq::to_hex(&beta)))
}
// Run with cargo test test_vector -- --ignored --nocapture in this crate
#[test]
#[ignore]
fn test_vector() {
// Pass alpha in hexadecimal
let kp = rsa_key_gen();
// let kp = get_test_key();
assert!(!generate_test_vector("af82", kp).is_err());
}
fn get_test_key() -> Keyp {
let n = RSAInt::from_hex("64f70acdc41c0ee7cb4961760368e34889c058ad3c7e578e8e72ed0d2fd1c7cfbb8beffd107204d544919db9d2470669c969e178d4deb8393daec4584ca9f162805c9ba46e617d89d4ab6480b0873b1cb92cf7232c88f013931ffe30f8ddf2cddbff4402bcb721985d2bb2eee5382dd09210b5d1da6b6b8207fe3e526de54efb55b56cd52d97cd77df6315569d5b59823c85ad99c57ad2959ec7d12cdf0c3e66cc57eaa1e644da9b0ca69b0df43945b0bd88ac66903ec98fe0e770b683ca7a332e69cba9229115a5295273aeeb4af2662063a312cbb4b871323f71888fd39557a5f4610ea7a590b021d43e5a89b69de68c728ce147f2743e0b97a5b3eb0d6ab1");
let d = RSAInt::from_hex("39134e9033a488e8900ad3859b37d804519ae2864c04400ade8c2965a2fabc31ba9bc8f70e2ce67e895ca8053bd1dad6427e106ff626518e4a4859c670d0411ca5e3b438a80d84a23e0f05a99a2158514c7d16d8537cb5fadad8e3215c0e5c0bf3a9c210aa0dfc77dd73ae9b4e090c1d33f52e538b5dde508ba43626f2e906546773ba7401aa6b68ab1151da528336ddafc9a6f2995d89ec282bc555fe41e776216576c0aafb66ef00b718e6c62afd51faf82e7b5a1d430591465b2188fa286ce778eb6a1b346b58331c7820b4142fb808e59ec910aa9b6d340dea673ae7be2d9e1fa91494e40372bcfb92da5fe236dc93b30b0a59b20af8edf3a10e3ea6dfe1");
let e = RSAInt::from_hex("010001");
Keyp {n, d, e}
}
// Note that the test vectors have been generated using this code
#[test]
fn test_empty() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("");
let pi = ByteSeq::from_hex("406581e350c601d6d7518ac928f6753929c56a7480a4a3d011ed65e5f61ca033accd45c03cac2dddcd61b909cedd0df517a1bba4705c9d04a2a8c7d735d24bc3e59b263cc8c18d5f6e2712747d809df7868ac720f90ffd3d7c7b78f3d75f14a9755ea8138804806f4739429d1a313b3abaaf89ce97fbdf10bc01d66723b0b38ad5dc51c87e5f852e2c8fc923cf0f9c86bb7bf8ae808532fcb8a981338d5b13278e66e19915e41c6fbd09f1fce3300da422fbf46f706d1c79f298c740926e14069f83dae52a25bad684e420ad5fc8af3b02e0cf3f79782fb6e7e65abe5e1f6b4fe41f20339b2986fe39f7ce4ceb9c2490d5229e9bfda93150d6800880c411daae");
let beta = ByteSeq::from_hex("d065ca3be8716236e99f64139adf481090f0a0c839f86ffda3c4fad948166af0");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
#[test]
fn test_72() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("72");
let pi = ByteSeq::from_hex("3d396dc417bee1975ff63c4e8b43b9417be03a91d5eb47309790d74100271342d6dc11511333ec4bc42aea3e02640dc870665044e85085c3dea43eedeb266d9b2de3824aca18b8de3e4d198bde808d80a2a10f0f4bd73fbc7cc36da44cb68af3161b2264e737dcd2d669252abb29f275c971ff6b8234876b7d1ff3d4d05197fe563d6ae92685dccbbbb689b4837da42fe47433019d9bfc50001b11708bf9f656532febf674119c0d67e27714195722fd977e0fc35d7325b5fb3ecb54df53986e01a809d0e5ec442fdacc3d271e7ab5480b8eb18f25cd3baf6a47abc6bf027e8dedef911f2bec367fa5d65e106f314b64cc1d9534d4f26fa034035a43852be66a");
let beta = ByteSeq::from_hex("a229210b84f0bb43b296075f226dee433cf2727cd6c2e4871afdeb77414f6a47");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
#[test]
fn test_af82() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("af82");
let pi = ByteSeq::from_hex("57b07056abc6851330b21ae890fd43ea53b4435319748cf8dba82148ee381c11d21a8660a8714aa59abaac2b7d0141ac4e85b1113b144328eb11461a7f26086896036fc49579a58a2516cecd274946f8dd82fef31652dfe2e2b495966cd6193a1bd197ef6e3472f30bfe14827dd968ea3bf8310dc002a765a0d54b12c3c9627309800b74701a3f7d07a02db0a6ca3a639e60726059727313818a6b671bebe18f078713ced33e50acbfd1e661ec89c5e82b8e1e07f6293f45474aa57d084da46a2437932491d92a87b3393bb0ec62254a3eca19e1004756867839671f84f7a2378097f334832f4aa0442fc5f8637fb2220bb3f2dca247927f0d49ae1c1b2e7455");
let beta = ByteSeq::from_hex("ebc5582b6aaf23c424ec1c74e1b8250327c957967fa37566284dac8400e62032");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
}
| {
// STEP 1 and 2
let hash_string = SUITE_STRING.concat(&TWO.concat(pi_string));
// STEP 3
ByteSeqResult::Ok(sha256(&hash_string).slice(0,32))
} | identifier_body |
rsa-fdh-vrf.rs | // This crate implements RSA-FDH-VRF based on section 4 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
// The ciphersuite is RSA-FDH-VRF-SHA256, suite string can be changed if other hash function is desired
// The step comments refer to the corresponding steps in the IETF pseudocode for comparison with hacspec
use hacspec_lib::*;
use hacspec_sha256::*;
use hacspec_rsa_pkcs1::*;
bytes!(IntByte, 1);
#[rustfmt::skip]
const ONE: IntByte = IntByte(secret_array!(U8, [0x01u8]));
#[rustfmt::skip]
const TWO: IntByte = IntByte(secret_array!(U8, [0x02u8]));
const SUITE_STRING: IntByte = ONE;
// Helper function used by prove and verify to compute mgf1 of alpha
// mgf_salt currently part of cipher suite, could be optional input
fn vrf_mgf1(n: RSAInt, alpha: &ByteSeq) -> ByteSeqResult {
let mgf_salt1 = i2osp(RSAInt::from_literal(BYTE_SIZE as u128), 4u32)?;
let mgf_salt2 = i2osp(n, BYTE_SIZE)?;
let mgf_salt = mgf_salt1.concat(&mgf_salt2);
let mgf_string = SUITE_STRING
.concat(&ONE)
.concat(&mgf_salt)
.concat(alpha);
let mgf = mgf1(&mgf_string, BYTE_SIZE as usize - 1usize)?;
ByteSeqResult::Ok(mgf)
}
// Based on section 4.1 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn prove(sk: SK, alpha: &ByteSeq) -> ByteSeqResult {
let (n, _d) = sk.clone();
// STEP 1 and 2
let em = vrf_mgf1(n, alpha)?;
// STEP 3
let m = os2ip(&em);
// STEP 4
let s = rsasp1(sk, m)?;
// STEP 5 and 6
i2osp(s, BYTE_SIZE)
}
// Based on section 4.2 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn proof_to_hash(pi_string: &ByteSeq) -> ByteSeqResult {
// STEP 1 and 2
let hash_string = SUITE_STRING.concat(&TWO.concat(pi_string));
// STEP 3
ByteSeqResult::Ok(sha256(&hash_string).slice(0,32))
}
// Based on section 4.3 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-vrf/
pub fn verify(pk: PK, alpha: &ByteSeq, pi_string: &ByteSeq) -> ByteSeqResult {
let (n, _e) = pk.clone();
// STEP 1
let s = os2ip(pi_string);
// STEP 2
let m = rsavp1(pk, s)?;
// STEP 3 and 4
let em_prime = vrf_mgf1(n, alpha)?;
// STEP 5
let m_prime = os2ip(&em_prime);
// STEP 6
if m == m_prime | else {
ByteSeqResult::Err(Error::VerificationFailed)
}
}
#[cfg(test)]
mod tests {
use super::*;
use num_bigint::{BigInt,Sign};
use glass_pumpkin::prime;
use quickcheck::*;
// RSA key generation
// Taken from https://asecuritysite.com/rust/rsa01/
fn modinv(a0: BigInt, m0: BigInt) -> BigInt {
if m0 == one() { return one() }
let (mut a, mut m, mut x0, mut inv) =
(a0, m0.clone(), zero(), one());
while a > one() {
inv -= (&a / &m) * &x0;
a = &a % &m;
std::mem::swap(&mut a, &mut m);
std::mem::swap(&mut x0, &mut inv)
}
if inv < zero() { inv += m0 }
inv
}
fn rsa_key_gen() -> Keyp {
let p = BigInt::from_biguint(Sign::Plus,
prime::new((BIT_SIZE / 2) as usize).unwrap());
let q = BigInt::from_biguint(Sign::Plus,
prime::new((BIT_SIZE / 2) as usize).unwrap());
let n = RSAInt::from(p.clone()* q.clone());
let e = BigInt::parse_bytes(b"65537", 10).unwrap();
let totient = (p - BigInt::one()) * (q - BigInt::one());
let d = modinv(e.clone(), totient.clone());
Keyp { n, d: RSAInt::from(d), e: RSAInt::from(e) }
}
// quickcheck generation
#[derive(Clone, Copy, Debug)]
struct Keyp {n: RSAInt, d: RSAInt, e: RSAInt}
#[derive(Clone, Copy, Debug)]
struct Wrapper(RSAInt);
impl Arbitrary for Wrapper {
fn arbitrary(g: &mut Gen) -> Wrapper {
const NUM_BYTES: u32 = 127;
let mut a: [u8; NUM_BYTES as usize] = [0; NUM_BYTES as usize];
for i in 0..NUM_BYTES as usize {
a[i] = u8::arbitrary(g);
}
Wrapper(RSAInt::from_byte_seq_be(&Seq::<U8>::from_public_slice(&a)))
}
}
impl Arbitrary for Keyp {
fn arbitrary(_g: &mut Gen) -> Keyp {
rsa_key_gen()
}
}
// quickcheck tests
const NUM_TESTS: u64 = 5;
#[test]
fn test_rsafdhvrf() {
fn rsafdhvrf(kp: Keyp, alpha: Wrapper) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &alpha).unwrap();
let beta = proof_to_hash(&pi).unwrap();
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
beta_prime == beta
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(rsafdhvrf as fn(Keyp, Wrapper) -> bool);
}
#[test]
fn test_neg_rsafdhvrf() {
fn neg_rsafdhvrf(kp: Keyp, fake: Keyp, alpha: Wrapper) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &alpha).unwrap();
match verify((fake.n, fake.e), &alpha, &pi) {
Ok(_beta_prime) => false,
Err(e) => matches!(e, Error::VerificationFailed
| Error::MessageTooLarge),
}
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(neg_rsafdhvrf as fn(Keyp, Keyp, Wrapper) -> bool);
}
#[test]
fn test_neg_alpha_rsafdhvrf() {
fn neg_alpha_rsafdhvrf(
kp: Keyp, alpha: Wrapper, fake_alpha: Wrapper
) -> bool {
let alpha = i2osp(alpha.0, BYTE_SIZE).unwrap();
let fake_alpha = i2osp(fake_alpha.0, BYTE_SIZE).unwrap();
let pi = prove((kp.n, kp.d), &fake_alpha).unwrap();
match verify((kp.n, kp.e), &alpha, &pi) {
Ok(_beta_prime) => false,
Err(e) => matches!(e, Error::VerificationFailed
| Error::MessageTooLarge),
}
}
QuickCheck::new().tests(NUM_TESTS)
.quickcheck(neg_alpha_rsafdhvrf as
fn(Keyp, Wrapper, Wrapper) -> bool);
}
// Test vector generation
// Strings should be given in hexadecimal
fn generate_test_vector(
alpha: &str, kp: Keyp
) -> Result<(String, String), Error> {
let alpha = ByteSeq::from_hex(&alpha);
let pi = prove((kp.n, kp.d), &alpha).unwrap();
let beta = proof_to_hash(&pi).unwrap();
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
let n = i2osp(kp.n, BYTE_SIZE)?;
let d = i2osp(kp.d, BYTE_SIZE)?;
let e = i2osp(kp.e, BYTE_SIZE)?;
println!("n:\n{}", ByteSeq::to_hex(&n));
println!("d:\n{}", ByteSeq::to_hex(&d));
println!("e:\n{}", ByteSeq::to_hex(&e));
println!("alpha:\n{}", ByteSeq::to_hex(&alpha));
println!("pi:\n{}", ByteSeq::to_hex(&pi));
println!("beta:\n{}", ByteSeq::to_hex(&beta));
Result::Ok((ByteSeq::to_hex(&pi), ByteSeq::to_hex(&beta)))
}
// Run with cargo test test_vector -- --ignored --nocapture in this crate
#[test]
#[ignore]
fn test_vector() {
// Pass alpha in hexadecimal
let kp = rsa_key_gen();
// let kp = get_test_key();
assert!(!generate_test_vector("af82", kp).is_err());
}
fn get_test_key() -> Keyp {
let n = RSAInt::from_hex("64f70acdc41c0ee7cb4961760368e34889c058ad3c7e578e8e72ed0d2fd1c7cfbb8beffd107204d544919db9d2470669c969e178d4deb8393daec4584ca9f162805c9ba46e617d89d4ab6480b0873b1cb92cf7232c88f013931ffe30f8ddf2cddbff4402bcb721985d2bb2eee5382dd09210b5d1da6b6b8207fe3e526de54efb55b56cd52d97cd77df6315569d5b59823c85ad99c57ad2959ec7d12cdf0c3e66cc57eaa1e644da9b0ca69b0df43945b0bd88ac66903ec98fe0e770b683ca7a332e69cba9229115a5295273aeeb4af2662063a312cbb4b871323f71888fd39557a5f4610ea7a590b021d43e5a89b69de68c728ce147f2743e0b97a5b3eb0d6ab1");
let d = RSAInt::from_hex("39134e9033a488e8900ad3859b37d804519ae2864c04400ade8c2965a2fabc31ba9bc8f70e2ce67e895ca8053bd1dad6427e106ff626518e4a4859c670d0411ca5e3b438a80d84a23e0f05a99a2158514c7d16d8537cb5fadad8e3215c0e5c0bf3a9c210aa0dfc77dd73ae9b4e090c1d33f52e538b5dde508ba43626f2e906546773ba7401aa6b68ab1151da528336ddafc9a6f2995d89ec282bc555fe41e776216576c0aafb66ef00b718e6c62afd51faf82e7b5a1d430591465b2188fa286ce778eb6a1b346b58331c7820b4142fb808e59ec910aa9b6d340dea673ae7be2d9e1fa91494e40372bcfb92da5fe236dc93b30b0a59b20af8edf3a10e3ea6dfe1");
let e = RSAInt::from_hex("010001");
Keyp {n, d, e}
}
// Note that the test vectors have been generated using this code
#[test]
fn test_empty() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("");
let pi = ByteSeq::from_hex("406581e350c601d6d7518ac928f6753929c56a7480a4a3d011ed65e5f61ca033accd45c03cac2dddcd61b909cedd0df517a1bba4705c9d04a2a8c7d735d24bc3e59b263cc8c18d5f6e2712747d809df7868ac720f90ffd3d7c7b78f3d75f14a9755ea8138804806f4739429d1a313b3abaaf89ce97fbdf10bc01d66723b0b38ad5dc51c87e5f852e2c8fc923cf0f9c86bb7bf8ae808532fcb8a981338d5b13278e66e19915e41c6fbd09f1fce3300da422fbf46f706d1c79f298c740926e14069f83dae52a25bad684e420ad5fc8af3b02e0cf3f79782fb6e7e65abe5e1f6b4fe41f20339b2986fe39f7ce4ceb9c2490d5229e9bfda93150d6800880c411daae");
let beta = ByteSeq::from_hex("d065ca3be8716236e99f64139adf481090f0a0c839f86ffda3c4fad948166af0");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
#[test]
fn test_72() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("72");
let pi = ByteSeq::from_hex("3d396dc417bee1975ff63c4e8b43b9417be03a91d5eb47309790d74100271342d6dc11511333ec4bc42aea3e02640dc870665044e85085c3dea43eedeb266d9b2de3824aca18b8de3e4d198bde808d80a2a10f0f4bd73fbc7cc36da44cb68af3161b2264e737dcd2d669252abb29f275c971ff6b8234876b7d1ff3d4d05197fe563d6ae92685dccbbbb689b4837da42fe47433019d9bfc50001b11708bf9f656532febf674119c0d67e27714195722fd977e0fc35d7325b5fb3ecb54df53986e01a809d0e5ec442fdacc3d271e7ab5480b8eb18f25cd3baf6a47abc6bf027e8dedef911f2bec367fa5d65e106f314b64cc1d9534d4f26fa034035a43852be66a");
let beta = ByteSeq::from_hex("a229210b84f0bb43b296075f226dee433cf2727cd6c2e4871afdeb77414f6a47");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
#[test]
fn test_af82() {
let kp = get_test_key();
let alpha = ByteSeq::from_hex("af82");
let pi = ByteSeq::from_hex("57b07056abc6851330b21ae890fd43ea53b4435319748cf8dba82148ee381c11d21a8660a8714aa59abaac2b7d0141ac4e85b1113b144328eb11461a7f26086896036fc49579a58a2516cecd274946f8dd82fef31652dfe2e2b495966cd6193a1bd197ef6e3472f30bfe14827dd968ea3bf8310dc002a765a0d54b12c3c9627309800b74701a3f7d07a02db0a6ca3a639e60726059727313818a6b671bebe18f078713ced33e50acbfd1e661ec89c5e82b8e1e07f6293f45474aa57d084da46a2437932491d92a87b3393bb0ec62254a3eca19e1004756867839671f84f7a2378097f334832f4aa0442fc5f8637fb2220bb3f2dca247927f0d49ae1c1b2e7455");
let beta = ByteSeq::from_hex("ebc5582b6aaf23c424ec1c74e1b8250327c957967fa37566284dac8400e62032");
let pi_prime = prove((kp.n, kp.d), &alpha).unwrap();
assert_eq!(pi_prime, pi);
let beta_prime = proof_to_hash(&pi).unwrap();
assert_eq!(beta_prime, beta);
let beta_prime = verify((kp.n, kp.e), &alpha, &pi).unwrap();
assert_eq!(beta_prime, beta);
}
}
| {
proof_to_hash(pi_string)
} | conditional_block |
sgt.py | #!/usr/bin/python
# vim: set fileencoding=utf-8 :
"""
Simple good-turing estimation, as described in
W. Gale. Good-Turing smoothing without tears. Journal of
Quantitative Linguistics, 2:217-37, 1995.
"""
from __future__ import division
from collections import defaultdict
import unittest
import collections
import copy
from math import log, exp, fsum, floor
import numpy
from . import averaging_transform
from memo import instancememo
class Estimator(object):
"""
The estimator created by SGT method.
Slope and intercept define the linear estimate. The linear_cutoff parameter
defines the r value strictly below which the estimate will be computed with
the unsmoothed Turing estimate. N is the array of values N_r; N[0] should
be equal to the sum of all N values thereafter.
'b' is used as defined in the paper and is the log-log slope of N[r] with
respect to r. It needs to be < -1 for SGT smoothing to be applicable.
"""
def __init__(self, N, *args, **kwargs):
"""
Create a Simple Good Turing estimator for the given N_r values (as
specified in the 'N' kwarg). N[r] == N_r, so N[0] should be blank.
"""
self.N = copy.copy(N)
if isinstance(self.N, list):
self.max_r = len(self.N)
elif isinstance(self.N, dict) or isinstance(self.N, defaultdict):
self.max_r = max(self.N.keys())
super(Estimator, self).__init__(*args, **kwargs)
assert not self.N[0]
self._precompute()
def _precompute(self):
"""
Do the necessary precomputation to compute estimates
"""
N = self.N
# Store 'N' as used by Gale in N[0] --- this is the total # of
# occurrences.
N[0] = sum(N[r] * r for r in range(1, self.max_r + 1))
self.Z = Z = averaging_transform.transform(N, self.max_r) # Z[r] = Z_r
self.b, self.a = self._regress(Z) # 'a' and 'b' as used in (Gale); a
# is intercept and b is slope.
assert self.b < -1, ("Log-linear slope > -1 (%f); SGT not applicable" %
self.b)
# Find the transition point between linear Good-Turing estimate and the
# Turing estimate.
self.linear_cutoff = self._find_cutoff()
self.norm_constant = self._find_norm_constant()
def _find_norm_constant(self):
N = self.N
return ((1 - self.rstar_unnorm(0)) /
fsum(N[r] * self.p_unnorm(r)
for r in range(1, self.max_r + 1)))
def _regress(self, Z):
"""
Perform linear regression on the given points in loglog space, return
result
"""
# Make a set of the nonempty points in log scale
x, y = zip(*[(log(r), log(Z[r]))
for r in range(1, self.max_r + 1) if Z[r]])
self.x, self.y = x, y
matrix = numpy.array((x, numpy.ones(len(x)))).T
return numpy.linalg.lstsq(matrix, y, rcond=None)[0]
def _find_cutoff(self):
"""
Find first r value s.t. the linear and turing estimates of r* are not
significantly different.
"""
cutoff = 1
while ((self.linear_rstar_unnorm(cutoff) -
self.turing_rstar_unnorm(cutoff))**2
> self.approx_turing_variance(cutoff)):
cutoff += 1
return cutoff
def approx_turing_variance(self, r):
"""
Compute the approximate variance of the turing estimate for r* given r,
using the approximation given in (Gale):
var(r^{*}_T) ≈ (r + 1)^2 (N_{r+1}/N_r^2) (1 + N_{r+1}/N_r)
"""
N = self.N
return (r + 1)**2 * (N[r+1] / N[r]**2) * (1 + N[r+1] / N[r])
def linear_rstar_unnorm(self, r):
"""
Linear Good-Turing estimate of r* for given r:
log(N_r) = a + b log(r)
--> N_r = A * r^b
and r* = (r + 1)(N_{r+1})/N_r
--> r* = (r + 1)(A * (r+1)^b)/(A * r^b)
= (r+1)^{b+1) / r^b
= r (1 + 1/r)^{b+1}
"""
return r * (1 + 1/r)**(self.b + 1) if r > 0 else None
def turing_rstar_unnorm(self, r):
"""
simple Turing estimate of r* for given r (unsmoothed):
r* = (r + 1)(N_{r+1})/N_r
"""
return ((r + 1) * self.N[r + 1] / self.N[r]
if self.N[r + 1] > 0 and self.N[r] > 0
else None)
@instancememo
def rstar_unnorm(self, r):
return (self.linear_rstar_unnorm(r) if r >= self.linear_cutoff
else self.turing_rstar_unnorm(r))
@instancememo
def rstar(self, r):
return (self.rstar_unnorm(0) if r == 0
else self.rstar_unnorm(r) * self.norm_constant)
def p_unnorm(self, r):
return self.rstar_unnorm(r) / self.N[0]
def p(self, r):
return self.rstar(r) / self.N[0]
class TooFlatTest(unittest.TestCase):
input = collections.defaultdict(lambda:0)
input.update([
(1, 1),
(2, 1),
])
max_r = 2
def te | elf):
with self.assertRaises(AssertionError):
estimator = Estimator(N=self.input)
class ChinesePluralsTest(unittest.TestCase):
max_r = 1918
maxDiff = None
input = collections.defaultdict(lambda:0)
input.update([
(1, 268),
(2, 112),
(3, 70),
(4, 41),
(5, 24),
(6, 14),
(7, 15),
(8, 14),
(9, 8),
(10, 11),
(11, 9),
(12, 6),
(13, 6),
(14, 3),
(15, 7),
(16, 9),
(17, 4),
(18, 4),
(19, 8),
(20, 2),
(21, 4),
(22, 2),
(23, 2),
(24, 3),
(25, 4),
(26, 4),
(27, 4),
(28, 1),
(29, 1),
(31, 2),
(33, 1),
(39, 3),
(41, 1),
(46, 1),
(47, 1),
(50, 1),
(52, 2),
(53, 1),
(55, 1),
(57, 1),
(60, 1),
(74, 1),
(84, 1),
(108, 1),
(109, 1),
(177, 1),
(400, 1),
(1918, 1),
])
output = copy.copy(input)
output.update([
(0, 0.04090978),
(1, 0.8414893),
(2, 1.887716),
(3, 2.288452),
(4, 3.247259),
(5, 4.222094),
(6, 5.206074),
(7, 6.195765),
(8, 7.189259),
(9, 8.185414),
(10, 9.183503),
(11, 10.18304),
(12, 11.1837),
(13, 12.18523),
(14, 13.18746),
(15, 14.19026),
(16, 15.19353),
(17, 16.19719),
(18, 17.20118),
(19, 18.20545),
(20, 19.20996),
(21, 20.21467),
(22, 21.21956),
(23, 22.22462),
(24, 23.22981),
(25, 24.23512),
(26, 25.24054),
(27, 26.24606),
(28, 27.25167),
(29, 28.25735),
(31, 30.26893),
(33, 32.28073),
(39, 38.31721),
(41, 40.32964),
(46, 45.36113),
(47, 46.36749),
(50, 49.38667),
(52, 51.39953),
(53, 52.40597),
(55, 54.41891),
(57, 56.43188),
(60, 59.45142),
(74, 73.54344),
(84, 83.60977),
(108, 107.7701),
(109, 108.7768),
(177, 177.2346),
(400, 401.744),
(1918, 1930.037),
])
norm_constant = 1.006782
a, b = (6.683387, -1.964591)
def assertAlmostEqual(self, left, right, msg=None, places=None):
if msg:
msg = msg + (" (%r ≠ %r)" % (left, right))
if places is None:
# Six significant figures
places = 5 - int(floor(log(abs(right)) / log(10)))
unittest.TestCase.assertAlmostEqual(
self, left, right, msg=msg, places=places)
def test_unnorm_output(self):
estimator = Estimator(N=self.input)
keys = sorted(self.output.keys())
for key in keys :
self.assertAlmostEqual(estimator.rstar_unnorm(key),
self.output[key] /
(self.norm_constant if key > 0 else 1),
msg=("%d* (unnormalized)" % (key,)))
def test_output(self):
estimator = Estimator(N=self.input)
keys = sorted(self.output.keys())
for key in keys:
self.assertAlmostEqual(estimator.rstar(key),
self.output[key],
msg=("%d* (normalized)" % (key,)))
def test_constant(self):
estimator = Estimator(N=self.input)
self.assertAlmostEqual(estimator.norm_constant,
self.norm_constant,
msg="Normalization constant")
def test_linear(self):
estimator = Estimator(N=self.input)
self.assertAlmostEqual(estimator.a,
self.a,
msg="Linear regression intercept")
self.assertAlmostEqual(estimator.b,
self.b,
msg="Linear regression slope")
class ProsodyTest(ChinesePluralsTest):
max_r = 7846
input = collections.defaultdict(lambda:0)
input.update([
(1, 120),
(2, 40),
(3, 24),
(4, 13),
(5, 15),
(6, 5),
(7, 11),
(8, 2),
(9, 2),
(10, 1),
(12, 3),
(14, 2),
(15, 1),
(16, 1),
(17, 3),
(19, 1),
(20, 3),
(21, 2),
(23, 3),
(24, 3),
(25, 3),
(26, 2),
(27, 2),
(28, 1),
(31, 2),
(32, 2),
(33, 1),
(34, 2),
(36, 2),
(41, 3),
(43, 1),
(45, 3),
(46, 1),
(47, 1),
(50, 1),
(71, 1),
(84, 1),
(101, 1),
(105, 1),
(121, 1),
(124, 1),
(146, 1),
(162, 1),
(193, 1),
(199, 1),
(224, 1),
(226, 1),
(254, 1),
(257, 1),
(339, 1),
(421, 1),
(456, 1),
(481, 1),
(483, 1),
(1140, 1),
(1256, 1),
(1322, 1),
(1530, 1),
(2131, 1),
(2395, 1),
(6925, 1),
(7846, 1),
])
output = copy.copy(input)
output.update([
(0, 0.003883244),
(1, 0.7628079),
(2, 1.706448),
(3, 2.679796),
(4, 3.663988),
(5, 4.653366),
(6, 5.645628),
(7, 6.63966),
(8, 7.634856),
(9, 8.63086),
(10, 9.627446),
(12, 11.62182),
(14, 13.61725),
(15, 14.61524),
(16, 15.61336),
(17, 16.6116),
(19, 18.60836),
(20, 19.60685),
(21, 20.6054),
(23, 22.60264),
(24, 23.60133),
(25, 24.60005),
(26, 25.5988),
(27, 26.59759),
(28, 27.59639),
(31, 30.59294),
(32, 31.59183),
(33, 32.59073),
(34, 33.58964),
(36, 35.58751),
(41, 40.58235),
(43, 42.58035),
(45, 44.57836),
(46, 45.57738),
(47, 46.57641),
(50, 49.57351),
(71, 70.55399),
(84, 83.54229),
(101, 100.5272),
(105, 104.5237),
(121, 120.5097),
(124, 123.507),
(146, 145.4879),
(162, 161.474),
(193, 192.4472),
(199, 198.4421),
(224, 223.4205),
(226, 225.4188),
(254, 253.3947),
(257, 256.3922),
(339, 338.3218),
(421, 420.2514),
(456, 455.2215),
(481, 480.2),
(483, 482.1983),
(1140, 1138.636),
(1256, 1254.537),
(1322, 1320.48),
(1530, 1528.302),
(2131, 2128.788),
(2395, 2392.562),
(6925, 6918.687),
(7846, 7838.899),
])
norm_constant = 0.9991445
a, b = (4.468558, -1.389374)
class MadeUpTest(ChinesePluralsTest):
input = collections.defaultdict(lambda:0)
input.update([
(1, 200),
(2, 20),
(3, 12),
(4, 5),
(6, 7),
(8, 4),
(9, 2),
(10, 3),
(11, 1),
(13, 2),
(16, 1),
(19, 1),
(30, 1),
])
output = copy.copy(input)
output.update([
(0, 0.3846154),
(1, 0.2069015),
(2, 1.316592),
(3, 2.252004),
(4, 3.226675),
(6, 5.226975),
(8, 7.257669),
(9, 8.27874),
(10, 9.302245),
(11, 10.32758),
(13, 12.38216),
(16, 15.47039),
(19, 18.5632),
(30, 29.92122),
])
a, b = 4.933901, -2.114833
norm_constant = 1.034508
class TrivialTest(ChinesePluralsTest):
input = collections.defaultdict(lambda:0)
input.update([
(1, 4),
(2, 1),
])
max_r = 2
a, b = 1.386294, -2
norm_constant = 0.6
output = copy.copy(input)
output.update([
(0, 2/3),
(1, .3),
(2, .8),
])
| st_failure(s | identifier_name |
sgt.py | #!/usr/bin/python
# vim: set fileencoding=utf-8 :
"""
Simple good-turing estimation, as described in
W. Gale. Good-Turing smoothing without tears. Journal of
Quantitative Linguistics, 2:217-37, 1995.
"""
from __future__ import division
from collections import defaultdict
import unittest
import collections
import copy
from math import log, exp, fsum, floor
import numpy
from . import averaging_transform
from memo import instancememo
class Estimator(object):
"""
The estimator created by SGT method.
Slope and intercept define the linear estimate. The linear_cutoff parameter
defines the r value strictly below which the estimate will be computed with
the unsmoothed Turing estimate. N is the array of values N_r; N[0] should
be equal to the sum of all N values thereafter.
'b' is used as defined in the paper and is the log-log slope of N[r] with
respect to r. It needs to be < -1 for SGT smoothing to be applicable.
"""
def __init__(self, N, *args, **kwargs):
"""
Create a Simple Good Turing estimator for the given N_r values (as
specified in the 'N' kwarg). N[r] == N_r, so N[0] should be blank.
"""
self.N = copy.copy(N)
if isinstance(self.N, list):
self.max_r = len(self.N)
elif isinstance(self.N, dict) or isinstance(self.N, defaultdict):
self.max_r = max(self.N.keys())
super(Estimator, self).__init__(*args, **kwargs)
assert not self.N[0]
self._precompute()
def _precompute(self):
"""
Do the necessary precomputation to compute estimates
"""
N = self.N
# Store 'N' as used by Gale in N[0] --- this is the total # of
# occurrences.
N[0] = sum(N[r] * r for r in range(1, self.max_r + 1))
self.Z = Z = averaging_transform.transform(N, self.max_r) # Z[r] = Z_r
self.b, self.a = self._regress(Z) # 'a' and 'b' as used in (Gale); a
# is intercept and b is slope.
assert self.b < -1, ("Log-linear slope > -1 (%f); SGT not applicable" %
self.b)
# Find the transition point between linear Good-Turing estimate and the
# Turing estimate.
self.linear_cutoff = self._find_cutoff()
self.norm_constant = self._find_norm_constant()
def _find_norm_constant(self):
N = self.N
return ((1 - self.rstar_unnorm(0)) /
fsum(N[r] * self.p_unnorm(r)
for r in range(1, self.max_r + 1)))
def _regress(self, Z):
"""
Perform linear regression on the given points in loglog space, return
result
"""
# Make a set of the nonempty points in log scale
x, y = zip(*[(log(r), log(Z[r]))
for r in range(1, self.max_r + 1) if Z[r]])
self.x, self.y = x, y
matrix = numpy.array((x, numpy.ones(len(x)))).T
return numpy.linalg.lstsq(matrix, y, rcond=None)[0]
def _find_cutoff(self):
"""
Find first r value s.t. the linear and turing estimates of r* are not
significantly different.
"""
cutoff = 1
while ((self.linear_rstar_unnorm(cutoff) -
self.turing_rstar_unnorm(cutoff))**2
> self.approx_turing_variance(cutoff)):
cutoff += 1
return cutoff
def approx_turing_variance(self, r):
"""
Compute the approximate variance of the turing estimate for r* given r,
using the approximation given in (Gale):
var(r^{*}_T) ≈ (r + 1)^2 (N_{r+1}/N_r^2) (1 + N_{r+1}/N_r)
"""
N = self.N
return (r + 1)**2 * (N[r+1] / N[r]**2) * (1 + N[r+1] / N[r])
def linear_rstar_unnorm(self, r):
"""
Linear Good-Turing estimate of r* for given r:
log(N_r) = a + b log(r)
--> N_r = A * r^b
and r* = (r + 1)(N_{r+1})/N_r
--> r* = (r + 1)(A * (r+1)^b)/(A * r^b)
= (r+1)^{b+1) / r^b
= r (1 + 1/r)^{b+1}
"""
return r * (1 + 1/r)**(self.b + 1) if r > 0 else None
def turing_rstar_unnorm(self, r):
"""
simple Turing estimate of r* for given r (unsmoothed):
r* = (r + 1)(N_{r+1})/N_r
"""
return ((r + 1) * self.N[r + 1] / self.N[r]
if self.N[r + 1] > 0 and self.N[r] > 0
else None)
@instancememo
def rstar_unnorm(self, r):
return (self.linear_rstar_unnorm(r) if r >= self.linear_cutoff
else self.turing_rstar_unnorm(r))
@instancememo
def rstar(self, r):
return (self.rstar_unnorm(0) if r == 0
else self.rstar_unnorm(r) * self.norm_constant)
def p_unnorm(self, r):
return self.rstar_unnorm(r) / self.N[0]
def p(self, r):
return self.rstar(r) / self.N[0]
class TooFlatTest(unittest.TestCase):
input = collections.defaultdict(lambda:0)
input.update([
(1, 1),
(2, 1),
])
max_r = 2
def test_failure(self):
with self.assertRaises(AssertionError):
estimator = Estimator(N=self.input)
class ChinesePluralsTest(unittest.TestCase):
max_r = 1918
maxDiff = None
input = collections.defaultdict(lambda:0)
input.update([
(1, 268),
(2, 112),
(3, 70),
(4, 41),
(5, 24),
(6, 14),
(7, 15),
(8, 14),
(9, 8),
(10, 11),
(11, 9),
(12, 6),
(13, 6),
(14, 3),
(15, 7),
(16, 9),
(17, 4),
(18, 4),
(19, 8),
(20, 2),
(21, 4),
(22, 2),
(23, 2),
(24, 3),
(25, 4),
(26, 4),
(27, 4),
(28, 1),
(29, 1),
(31, 2),
(33, 1),
(39, 3),
(41, 1),
(46, 1),
(47, 1),
(50, 1),
(52, 2),
(53, 1),
(55, 1),
(57, 1),
(60, 1),
(74, 1),
(84, 1),
(108, 1),
(109, 1),
(177, 1),
(400, 1),
(1918, 1),
])
output = copy.copy(input)
output.update([
(0, 0.04090978),
(1, 0.8414893),
(2, 1.887716),
(3, 2.288452),
(4, 3.247259),
(5, 4.222094),
(6, 5.206074),
(7, 6.195765),
(8, 7.189259),
(9, 8.185414),
(10, 9.183503),
(11, 10.18304),
(12, 11.1837),
(13, 12.18523),
(14, 13.18746),
(15, 14.19026),
(16, 15.19353),
(17, 16.19719),
(18, 17.20118),
(19, 18.20545),
(20, 19.20996),
(21, 20.21467),
(22, 21.21956),
(23, 22.22462),
(24, 23.22981),
(25, 24.23512),
(26, 25.24054),
(27, 26.24606),
(28, 27.25167),
(29, 28.25735),
(31, 30.26893),
(33, 32.28073),
(39, 38.31721),
(41, 40.32964),
(46, 45.36113),
(47, 46.36749),
(50, 49.38667),
(52, 51.39953),
(53, 52.40597),
(55, 54.41891),
(57, 56.43188),
(60, 59.45142),
(74, 73.54344),
(84, 83.60977),
(108, 107.7701),
(109, 108.7768),
(177, 177.2346),
(400, 401.744),
(1918, 1930.037),
])
norm_constant = 1.006782
a, b = (6.683387, -1.964591)
def assertAlmostEqual(self, left, right, msg=None, places=None):
if msg:
ms | if places is None:
# Six significant figures
places = 5 - int(floor(log(abs(right)) / log(10)))
unittest.TestCase.assertAlmostEqual(
self, left, right, msg=msg, places=places)
def test_unnorm_output(self):
estimator = Estimator(N=self.input)
keys = sorted(self.output.keys())
for key in keys :
self.assertAlmostEqual(estimator.rstar_unnorm(key),
self.output[key] /
(self.norm_constant if key > 0 else 1),
msg=("%d* (unnormalized)" % (key,)))
def test_output(self):
estimator = Estimator(N=self.input)
keys = sorted(self.output.keys())
for key in keys:
self.assertAlmostEqual(estimator.rstar(key),
self.output[key],
msg=("%d* (normalized)" % (key,)))
def test_constant(self):
estimator = Estimator(N=self.input)
self.assertAlmostEqual(estimator.norm_constant,
self.norm_constant,
msg="Normalization constant")
def test_linear(self):
estimator = Estimator(N=self.input)
self.assertAlmostEqual(estimator.a,
self.a,
msg="Linear regression intercept")
self.assertAlmostEqual(estimator.b,
self.b,
msg="Linear regression slope")
class ProsodyTest(ChinesePluralsTest):
max_r = 7846
input = collections.defaultdict(lambda:0)
input.update([
(1, 120),
(2, 40),
(3, 24),
(4, 13),
(5, 15),
(6, 5),
(7, 11),
(8, 2),
(9, 2),
(10, 1),
(12, 3),
(14, 2),
(15, 1),
(16, 1),
(17, 3),
(19, 1),
(20, 3),
(21, 2),
(23, 3),
(24, 3),
(25, 3),
(26, 2),
(27, 2),
(28, 1),
(31, 2),
(32, 2),
(33, 1),
(34, 2),
(36, 2),
(41, 3),
(43, 1),
(45, 3),
(46, 1),
(47, 1),
(50, 1),
(71, 1),
(84, 1),
(101, 1),
(105, 1),
(121, 1),
(124, 1),
(146, 1),
(162, 1),
(193, 1),
(199, 1),
(224, 1),
(226, 1),
(254, 1),
(257, 1),
(339, 1),
(421, 1),
(456, 1),
(481, 1),
(483, 1),
(1140, 1),
(1256, 1),
(1322, 1),
(1530, 1),
(2131, 1),
(2395, 1),
(6925, 1),
(7846, 1),
])
output = copy.copy(input)
output.update([
(0, 0.003883244),
(1, 0.7628079),
(2, 1.706448),
(3, 2.679796),
(4, 3.663988),
(5, 4.653366),
(6, 5.645628),
(7, 6.63966),
(8, 7.634856),
(9, 8.63086),
(10, 9.627446),
(12, 11.62182),
(14, 13.61725),
(15, 14.61524),
(16, 15.61336),
(17, 16.6116),
(19, 18.60836),
(20, 19.60685),
(21, 20.6054),
(23, 22.60264),
(24, 23.60133),
(25, 24.60005),
(26, 25.5988),
(27, 26.59759),
(28, 27.59639),
(31, 30.59294),
(32, 31.59183),
(33, 32.59073),
(34, 33.58964),
(36, 35.58751),
(41, 40.58235),
(43, 42.58035),
(45, 44.57836),
(46, 45.57738),
(47, 46.57641),
(50, 49.57351),
(71, 70.55399),
(84, 83.54229),
(101, 100.5272),
(105, 104.5237),
(121, 120.5097),
(124, 123.507),
(146, 145.4879),
(162, 161.474),
(193, 192.4472),
(199, 198.4421),
(224, 223.4205),
(226, 225.4188),
(254, 253.3947),
(257, 256.3922),
(339, 338.3218),
(421, 420.2514),
(456, 455.2215),
(481, 480.2),
(483, 482.1983),
(1140, 1138.636),
(1256, 1254.537),
(1322, 1320.48),
(1530, 1528.302),
(2131, 2128.788),
(2395, 2392.562),
(6925, 6918.687),
(7846, 7838.899),
])
norm_constant = 0.9991445
a, b = (4.468558, -1.389374)
class MadeUpTest(ChinesePluralsTest):
input = collections.defaultdict(lambda:0)
input.update([
(1, 200),
(2, 20),
(3, 12),
(4, 5),
(6, 7),
(8, 4),
(9, 2),
(10, 3),
(11, 1),
(13, 2),
(16, 1),
(19, 1),
(30, 1),
])
output = copy.copy(input)
output.update([
(0, 0.3846154),
(1, 0.2069015),
(2, 1.316592),
(3, 2.252004),
(4, 3.226675),
(6, 5.226975),
(8, 7.257669),
(9, 8.27874),
(10, 9.302245),
(11, 10.32758),
(13, 12.38216),
(16, 15.47039),
(19, 18.5632),
(30, 29.92122),
])
a, b = 4.933901, -2.114833
norm_constant = 1.034508
class TrivialTest(ChinesePluralsTest):
input = collections.defaultdict(lambda:0)
input.update([
(1, 4),
(2, 1),
])
max_r = 2
a, b = 1.386294, -2
norm_constant = 0.6
output = copy.copy(input)
output.update([
(0, 2/3),
(1, .3),
(2, .8),
])
| g = msg + (" (%r ≠ %r)" % (left, right))
| conditional_block |
sgt.py | #!/usr/bin/python
# vim: set fileencoding=utf-8 :
"""
Simple good-turing estimation, as described in
W. Gale. Good-Turing smoothing without tears. Journal of
Quantitative Linguistics, 2:217-37, 1995.
"""
from __future__ import division
from collections import defaultdict
import unittest
import collections
import copy
from math import log, exp, fsum, floor
import numpy
from . import averaging_transform
from memo import instancememo
class Estimator(object):
"""
The estimator created by SGT method.
Slope and intercept define the linear estimate. The linear_cutoff parameter
defines the r value strictly below which the estimate will be computed with
the unsmoothed Turing estimate. N is the array of values N_r; N[0] should
be equal to the sum of all N values thereafter.
'b' is used as defined in the paper and is the log-log slope of N[r] with
respect to r. It needs to be < -1 for SGT smoothing to be applicable.
"""
def __init__(self, N, *args, **kwargs):
"""
Create a Simple Good Turing estimator for the given N_r values (as
specified in the 'N' kwarg). N[r] == N_r, so N[0] should be blank.
"""
self.N = copy.copy(N)
if isinstance(self.N, list):
self.max_r = len(self.N)
elif isinstance(self.N, dict) or isinstance(self.N, defaultdict):
self.max_r = max(self.N.keys())
super(Estimator, self).__init__(*args, **kwargs)
assert not self.N[0]
self._precompute()
def _precompute(self):
"""
Do the necessary precomputation to compute estimates
"""
N = self.N
# Store 'N' as used by Gale in N[0] --- this is the total # of
# occurrences.
N[0] = sum(N[r] * r for r in range(1, self.max_r + 1))
self.Z = Z = averaging_transform.transform(N, self.max_r) # Z[r] = Z_r
self.b, self.a = self._regress(Z) # 'a' and 'b' as used in (Gale); a
# is intercept and b is slope.
assert self.b < -1, ("Log-linear slope > -1 (%f); SGT not applicable" %
self.b)
# Find the transition point between linear Good-Turing estimate and the
# Turing estimate.
self.linear_cutoff = self._find_cutoff()
self.norm_constant = self._find_norm_constant()
def _find_norm_constant(self):
N = self.N
return ((1 - self.rstar_unnorm(0)) /
fsum(N[r] * self.p_unnorm(r)
for r in range(1, self.max_r + 1)))
def _regress(self, Z):
|
def _find_cutoff(self):
"""
Find first r value s.t. the linear and turing estimates of r* are not
significantly different.
"""
cutoff = 1
while ((self.linear_rstar_unnorm(cutoff) -
self.turing_rstar_unnorm(cutoff))**2
> self.approx_turing_variance(cutoff)):
cutoff += 1
return cutoff
def approx_turing_variance(self, r):
"""
Compute the approximate variance of the turing estimate for r* given r,
using the approximation given in (Gale):
var(r^{*}_T) ≈ (r + 1)^2 (N_{r+1}/N_r^2) (1 + N_{r+1}/N_r)
"""
N = self.N
return (r + 1)**2 * (N[r+1] / N[r]**2) * (1 + N[r+1] / N[r])
def linear_rstar_unnorm(self, r):
"""
Linear Good-Turing estimate of r* for given r:
log(N_r) = a + b log(r)
--> N_r = A * r^b
and r* = (r + 1)(N_{r+1})/N_r
--> r* = (r + 1)(A * (r+1)^b)/(A * r^b)
= (r+1)^{b+1) / r^b
= r (1 + 1/r)^{b+1}
"""
return r * (1 + 1/r)**(self.b + 1) if r > 0 else None
def turing_rstar_unnorm(self, r):
"""
simple Turing estimate of r* for given r (unsmoothed):
r* = (r + 1)(N_{r+1})/N_r
"""
return ((r + 1) * self.N[r + 1] / self.N[r]
if self.N[r + 1] > 0 and self.N[r] > 0
else None)
@instancememo
def rstar_unnorm(self, r):
return (self.linear_rstar_unnorm(r) if r >= self.linear_cutoff
else self.turing_rstar_unnorm(r))
@instancememo
def rstar(self, r):
return (self.rstar_unnorm(0) if r == 0
else self.rstar_unnorm(r) * self.norm_constant)
def p_unnorm(self, r):
return self.rstar_unnorm(r) / self.N[0]
def p(self, r):
return self.rstar(r) / self.N[0]
class TooFlatTest(unittest.TestCase):
input = collections.defaultdict(lambda:0)
input.update([
(1, 1),
(2, 1),
])
max_r = 2
def test_failure(self):
with self.assertRaises(AssertionError):
estimator = Estimator(N=self.input)
class ChinesePluralsTest(unittest.TestCase):
max_r = 1918
maxDiff = None
input = collections.defaultdict(lambda:0)
input.update([
(1, 268),
(2, 112),
(3, 70),
(4, 41),
(5, 24),
(6, 14),
(7, 15),
(8, 14),
(9, 8),
(10, 11),
(11, 9),
(12, 6),
(13, 6),
(14, 3),
(15, 7),
(16, 9),
(17, 4),
(18, 4),
(19, 8),
(20, 2),
(21, 4),
(22, 2),
(23, 2),
(24, 3),
(25, 4),
(26, 4),
(27, 4),
(28, 1),
(29, 1),
(31, 2),
(33, 1),
(39, 3),
(41, 1),
(46, 1),
(47, 1),
(50, 1),
(52, 2),
(53, 1),
(55, 1),
(57, 1),
(60, 1),
(74, 1),
(84, 1),
(108, 1),
(109, 1),
(177, 1),
(400, 1),
(1918, 1),
])
output = copy.copy(input)
output.update([
(0, 0.04090978),
(1, 0.8414893),
(2, 1.887716),
(3, 2.288452),
(4, 3.247259),
(5, 4.222094),
(6, 5.206074),
(7, 6.195765),
(8, 7.189259),
(9, 8.185414),
(10, 9.183503),
(11, 10.18304),
(12, 11.1837),
(13, 12.18523),
(14, 13.18746),
(15, 14.19026),
(16, 15.19353),
(17, 16.19719),
(18, 17.20118),
(19, 18.20545),
(20, 19.20996),
(21, 20.21467),
(22, 21.21956),
(23, 22.22462),
(24, 23.22981),
(25, 24.23512),
(26, 25.24054),
(27, 26.24606),
(28, 27.25167),
(29, 28.25735),
(31, 30.26893),
(33, 32.28073),
(39, 38.31721),
(41, 40.32964),
(46, 45.36113),
(47, 46.36749),
(50, 49.38667),
(52, 51.39953),
(53, 52.40597),
(55, 54.41891),
(57, 56.43188),
(60, 59.45142),
(74, 73.54344),
(84, 83.60977),
(108, 107.7701),
(109, 108.7768),
(177, 177.2346),
(400, 401.744),
(1918, 1930.037),
])
norm_constant = 1.006782
a, b = (6.683387, -1.964591)
def assertAlmostEqual(self, left, right, msg=None, places=None):
if msg:
msg = msg + (" (%r ≠ %r)" % (left, right))
if places is None:
# Six significant figures
places = 5 - int(floor(log(abs(right)) / log(10)))
unittest.TestCase.assertAlmostEqual(
self, left, right, msg=msg, places=places)
def test_unnorm_output(self):
estimator = Estimator(N=self.input)
keys = sorted(self.output.keys())
for key in keys :
self.assertAlmostEqual(estimator.rstar_unnorm(key),
self.output[key] /
(self.norm_constant if key > 0 else 1),
msg=("%d* (unnormalized)" % (key,)))
def test_output(self):
estimator = Estimator(N=self.input)
keys = sorted(self.output.keys())
for key in keys:
self.assertAlmostEqual(estimator.rstar(key),
self.output[key],
msg=("%d* (normalized)" % (key,)))
def test_constant(self):
estimator = Estimator(N=self.input)
self.assertAlmostEqual(estimator.norm_constant,
self.norm_constant,
msg="Normalization constant")
def test_linear(self):
estimator = Estimator(N=self.input)
self.assertAlmostEqual(estimator.a,
self.a,
msg="Linear regression intercept")
self.assertAlmostEqual(estimator.b,
self.b,
msg="Linear regression slope")
class ProsodyTest(ChinesePluralsTest):
max_r = 7846
input = collections.defaultdict(lambda:0)
input.update([
(1, 120),
(2, 40),
(3, 24),
(4, 13),
(5, 15),
(6, 5),
(7, 11),
(8, 2),
(9, 2),
(10, 1),
(12, 3),
(14, 2),
(15, 1),
(16, 1),
(17, 3),
(19, 1),
(20, 3),
(21, 2),
(23, 3),
(24, 3),
(25, 3),
(26, 2),
(27, 2),
(28, 1),
(31, 2),
(32, 2),
(33, 1),
(34, 2),
(36, 2),
(41, 3),
(43, 1),
(45, 3),
(46, 1),
(47, 1),
(50, 1),
(71, 1),
(84, 1),
(101, 1),
(105, 1),
(121, 1),
(124, 1),
(146, 1),
(162, 1),
(193, 1),
(199, 1),
(224, 1),
(226, 1),
(254, 1),
(257, 1),
(339, 1),
(421, 1),
(456, 1),
(481, 1),
(483, 1),
(1140, 1),
(1256, 1),
(1322, 1),
(1530, 1),
(2131, 1),
(2395, 1),
(6925, 1),
(7846, 1),
])
output = copy.copy(input)
output.update([
(0, 0.003883244),
(1, 0.7628079),
(2, 1.706448),
(3, 2.679796),
(4, 3.663988),
(5, 4.653366),
(6, 5.645628),
(7, 6.63966),
(8, 7.634856),
(9, 8.63086),
(10, 9.627446),
(12, 11.62182),
(14, 13.61725),
(15, 14.61524),
(16, 15.61336),
(17, 16.6116),
(19, 18.60836),
(20, 19.60685),
(21, 20.6054),
(23, 22.60264),
(24, 23.60133),
(25, 24.60005),
(26, 25.5988),
(27, 26.59759),
(28, 27.59639),
(31, 30.59294),
(32, 31.59183),
(33, 32.59073),
(34, 33.58964),
(36, 35.58751),
(41, 40.58235),
(43, 42.58035),
(45, 44.57836),
(46, 45.57738),
(47, 46.57641),
(50, 49.57351),
(71, 70.55399),
(84, 83.54229),
(101, 100.5272),
(105, 104.5237),
(121, 120.5097),
(124, 123.507),
(146, 145.4879),
(162, 161.474),
(193, 192.4472),
(199, 198.4421),
(224, 223.4205),
(226, 225.4188),
(254, 253.3947),
(257, 256.3922),
(339, 338.3218),
(421, 420.2514),
(456, 455.2215),
(481, 480.2),
(483, 482.1983),
(1140, 1138.636),
(1256, 1254.537),
(1322, 1320.48),
(1530, 1528.302),
(2131, 2128.788),
(2395, 2392.562),
(6925, 6918.687),
(7846, 7838.899),
])
norm_constant = 0.9991445
a, b = (4.468558, -1.389374)
class MadeUpTest(ChinesePluralsTest):
input = collections.defaultdict(lambda:0)
input.update([
(1, 200),
(2, 20),
(3, 12),
(4, 5),
(6, 7),
(8, 4),
(9, 2),
(10, 3),
(11, 1),
(13, 2),
(16, 1),
(19, 1),
(30, 1),
])
output = copy.copy(input)
output.update([
(0, 0.3846154),
(1, 0.2069015),
(2, 1.316592),
(3, 2.252004),
(4, 3.226675),
(6, 5.226975),
(8, 7.257669),
(9, 8.27874),
(10, 9.302245),
(11, 10.32758),
(13, 12.38216),
(16, 15.47039),
(19, 18.5632),
(30, 29.92122),
])
a, b = 4.933901, -2.114833
norm_constant = 1.034508
class TrivialTest(ChinesePluralsTest):
input = collections.defaultdict(lambda:0)
input.update([
(1, 4),
(2, 1),
])
max_r = 2
a, b = 1.386294, -2
norm_constant = 0.6
output = copy.copy(input)
output.update([
(0, 2/3),
(1, .3),
(2, .8),
])
| """
Perform linear regression on the given points in loglog space, return
result
"""
# Make a set of the nonempty points in log scale
x, y = zip(*[(log(r), log(Z[r]))
for r in range(1, self.max_r + 1) if Z[r]])
self.x, self.y = x, y
matrix = numpy.array((x, numpy.ones(len(x)))).T
return numpy.linalg.lstsq(matrix, y, rcond=None)[0] | identifier_body |
sgt.py | #!/usr/bin/python
# vim: set fileencoding=utf-8 :
"""
Simple good-turing estimation, as described in
W. Gale. Good-Turing smoothing without tears. Journal of
Quantitative Linguistics, 2:217-37, 1995.
"""
from __future__ import division
from collections import defaultdict
import unittest
import collections
import copy
from math import log, exp, fsum, floor
import numpy
from . import averaging_transform
from memo import instancememo
class Estimator(object):
"""
The estimator created by SGT method.
Slope and intercept define the linear estimate. The linear_cutoff parameter
defines the r value strictly below which the estimate will be computed with
the unsmoothed Turing estimate. N is the array of values N_r; N[0] should
be equal to the sum of all N values thereafter.
'b' is used as defined in the paper and is the log-log slope of N[r] with
respect to r. It needs to be < -1 for SGT smoothing to be applicable.
"""
def __init__(self, N, *args, **kwargs):
"""
Create a Simple Good Turing estimator for the given N_r values (as
specified in the 'N' kwarg). N[r] == N_r, so N[0] should be blank.
"""
self.N = copy.copy(N)
if isinstance(self.N, list):
self.max_r = len(self.N)
elif isinstance(self.N, dict) or isinstance(self.N, defaultdict):
self.max_r = max(self.N.keys())
super(Estimator, self).__init__(*args, **kwargs)
assert not self.N[0]
self._precompute()
def _precompute(self):
"""
Do the necessary precomputation to compute estimates
"""
N = self.N
# Store 'N' as used by Gale in N[0] --- this is the total # of
# occurrences.
N[0] = sum(N[r] * r for r in range(1, self.max_r + 1))
self.Z = Z = averaging_transform.transform(N, self.max_r) # Z[r] = Z_r
self.b, self.a = self._regress(Z) # 'a' and 'b' as used in (Gale); a
# is intercept and b is slope.
assert self.b < -1, ("Log-linear slope > -1 (%f); SGT not applicable" %
self.b)
# Find the transition point between linear Good-Turing estimate and the
# Turing estimate.
self.linear_cutoff = self._find_cutoff()
self.norm_constant = self._find_norm_constant()
def _find_norm_constant(self):
N = self.N
return ((1 - self.rstar_unnorm(0)) /
fsum(N[r] * self.p_unnorm(r)
for r in range(1, self.max_r + 1)))
def _regress(self, Z):
"""
Perform linear regression on the given points in loglog space, return
result
"""
# Make a set of the nonempty points in log scale
x, y = zip(*[(log(r), log(Z[r]))
for r in range(1, self.max_r + 1) if Z[r]])
self.x, self.y = x, y
matrix = numpy.array((x, numpy.ones(len(x)))).T
return numpy.linalg.lstsq(matrix, y, rcond=None)[0]
def _find_cutoff(self):
"""
Find first r value s.t. the linear and turing estimates of r* are not
significantly different.
"""
cutoff = 1
while ((self.linear_rstar_unnorm(cutoff) -
self.turing_rstar_unnorm(cutoff))**2
> self.approx_turing_variance(cutoff)):
cutoff += 1 | Compute the approximate variance of the turing estimate for r* given r,
using the approximation given in (Gale):
var(r^{*}_T) ≈ (r + 1)^2 (N_{r+1}/N_r^2) (1 + N_{r+1}/N_r)
"""
N = self.N
return (r + 1)**2 * (N[r+1] / N[r]**2) * (1 + N[r+1] / N[r])
def linear_rstar_unnorm(self, r):
"""
Linear Good-Turing estimate of r* for given r:
log(N_r) = a + b log(r)
--> N_r = A * r^b
and r* = (r + 1)(N_{r+1})/N_r
--> r* = (r + 1)(A * (r+1)^b)/(A * r^b)
= (r+1)^{b+1) / r^b
= r (1 + 1/r)^{b+1}
"""
return r * (1 + 1/r)**(self.b + 1) if r > 0 else None
def turing_rstar_unnorm(self, r):
"""
simple Turing estimate of r* for given r (unsmoothed):
r* = (r + 1)(N_{r+1})/N_r
"""
return ((r + 1) * self.N[r + 1] / self.N[r]
if self.N[r + 1] > 0 and self.N[r] > 0
else None)
@instancememo
def rstar_unnorm(self, r):
return (self.linear_rstar_unnorm(r) if r >= self.linear_cutoff
else self.turing_rstar_unnorm(r))
@instancememo
def rstar(self, r):
return (self.rstar_unnorm(0) if r == 0
else self.rstar_unnorm(r) * self.norm_constant)
def p_unnorm(self, r):
return self.rstar_unnorm(r) / self.N[0]
def p(self, r):
return self.rstar(r) / self.N[0]
class TooFlatTest(unittest.TestCase):
input = collections.defaultdict(lambda:0)
input.update([
(1, 1),
(2, 1),
])
max_r = 2
def test_failure(self):
with self.assertRaises(AssertionError):
estimator = Estimator(N=self.input)
class ChinesePluralsTest(unittest.TestCase):
max_r = 1918
maxDiff = None
input = collections.defaultdict(lambda:0)
input.update([
(1, 268),
(2, 112),
(3, 70),
(4, 41),
(5, 24),
(6, 14),
(7, 15),
(8, 14),
(9, 8),
(10, 11),
(11, 9),
(12, 6),
(13, 6),
(14, 3),
(15, 7),
(16, 9),
(17, 4),
(18, 4),
(19, 8),
(20, 2),
(21, 4),
(22, 2),
(23, 2),
(24, 3),
(25, 4),
(26, 4),
(27, 4),
(28, 1),
(29, 1),
(31, 2),
(33, 1),
(39, 3),
(41, 1),
(46, 1),
(47, 1),
(50, 1),
(52, 2),
(53, 1),
(55, 1),
(57, 1),
(60, 1),
(74, 1),
(84, 1),
(108, 1),
(109, 1),
(177, 1),
(400, 1),
(1918, 1),
])
output = copy.copy(input)
output.update([
(0, 0.04090978),
(1, 0.8414893),
(2, 1.887716),
(3, 2.288452),
(4, 3.247259),
(5, 4.222094),
(6, 5.206074),
(7, 6.195765),
(8, 7.189259),
(9, 8.185414),
(10, 9.183503),
(11, 10.18304),
(12, 11.1837),
(13, 12.18523),
(14, 13.18746),
(15, 14.19026),
(16, 15.19353),
(17, 16.19719),
(18, 17.20118),
(19, 18.20545),
(20, 19.20996),
(21, 20.21467),
(22, 21.21956),
(23, 22.22462),
(24, 23.22981),
(25, 24.23512),
(26, 25.24054),
(27, 26.24606),
(28, 27.25167),
(29, 28.25735),
(31, 30.26893),
(33, 32.28073),
(39, 38.31721),
(41, 40.32964),
(46, 45.36113),
(47, 46.36749),
(50, 49.38667),
(52, 51.39953),
(53, 52.40597),
(55, 54.41891),
(57, 56.43188),
(60, 59.45142),
(74, 73.54344),
(84, 83.60977),
(108, 107.7701),
(109, 108.7768),
(177, 177.2346),
(400, 401.744),
(1918, 1930.037),
])
norm_constant = 1.006782
a, b = (6.683387, -1.964591)
def assertAlmostEqual(self, left, right, msg=None, places=None):
if msg:
msg = msg + (" (%r ≠ %r)" % (left, right))
if places is None:
# Six significant figures
places = 5 - int(floor(log(abs(right)) / log(10)))
unittest.TestCase.assertAlmostEqual(
self, left, right, msg=msg, places=places)
def test_unnorm_output(self):
estimator = Estimator(N=self.input)
keys = sorted(self.output.keys())
for key in keys :
self.assertAlmostEqual(estimator.rstar_unnorm(key),
self.output[key] /
(self.norm_constant if key > 0 else 1),
msg=("%d* (unnormalized)" % (key,)))
def test_output(self):
estimator = Estimator(N=self.input)
keys = sorted(self.output.keys())
for key in keys:
self.assertAlmostEqual(estimator.rstar(key),
self.output[key],
msg=("%d* (normalized)" % (key,)))
def test_constant(self):
estimator = Estimator(N=self.input)
self.assertAlmostEqual(estimator.norm_constant,
self.norm_constant,
msg="Normalization constant")
def test_linear(self):
estimator = Estimator(N=self.input)
self.assertAlmostEqual(estimator.a,
self.a,
msg="Linear regression intercept")
self.assertAlmostEqual(estimator.b,
self.b,
msg="Linear regression slope")
class ProsodyTest(ChinesePluralsTest):
max_r = 7846
input = collections.defaultdict(lambda:0)
input.update([
(1, 120),
(2, 40),
(3, 24),
(4, 13),
(5, 15),
(6, 5),
(7, 11),
(8, 2),
(9, 2),
(10, 1),
(12, 3),
(14, 2),
(15, 1),
(16, 1),
(17, 3),
(19, 1),
(20, 3),
(21, 2),
(23, 3),
(24, 3),
(25, 3),
(26, 2),
(27, 2),
(28, 1),
(31, 2),
(32, 2),
(33, 1),
(34, 2),
(36, 2),
(41, 3),
(43, 1),
(45, 3),
(46, 1),
(47, 1),
(50, 1),
(71, 1),
(84, 1),
(101, 1),
(105, 1),
(121, 1),
(124, 1),
(146, 1),
(162, 1),
(193, 1),
(199, 1),
(224, 1),
(226, 1),
(254, 1),
(257, 1),
(339, 1),
(421, 1),
(456, 1),
(481, 1),
(483, 1),
(1140, 1),
(1256, 1),
(1322, 1),
(1530, 1),
(2131, 1),
(2395, 1),
(6925, 1),
(7846, 1),
])
output = copy.copy(input)
output.update([
(0, 0.003883244),
(1, 0.7628079),
(2, 1.706448),
(3, 2.679796),
(4, 3.663988),
(5, 4.653366),
(6, 5.645628),
(7, 6.63966),
(8, 7.634856),
(9, 8.63086),
(10, 9.627446),
(12, 11.62182),
(14, 13.61725),
(15, 14.61524),
(16, 15.61336),
(17, 16.6116),
(19, 18.60836),
(20, 19.60685),
(21, 20.6054),
(23, 22.60264),
(24, 23.60133),
(25, 24.60005),
(26, 25.5988),
(27, 26.59759),
(28, 27.59639),
(31, 30.59294),
(32, 31.59183),
(33, 32.59073),
(34, 33.58964),
(36, 35.58751),
(41, 40.58235),
(43, 42.58035),
(45, 44.57836),
(46, 45.57738),
(47, 46.57641),
(50, 49.57351),
(71, 70.55399),
(84, 83.54229),
(101, 100.5272),
(105, 104.5237),
(121, 120.5097),
(124, 123.507),
(146, 145.4879),
(162, 161.474),
(193, 192.4472),
(199, 198.4421),
(224, 223.4205),
(226, 225.4188),
(254, 253.3947),
(257, 256.3922),
(339, 338.3218),
(421, 420.2514),
(456, 455.2215),
(481, 480.2),
(483, 482.1983),
(1140, 1138.636),
(1256, 1254.537),
(1322, 1320.48),
(1530, 1528.302),
(2131, 2128.788),
(2395, 2392.562),
(6925, 6918.687),
(7846, 7838.899),
])
norm_constant = 0.9991445
a, b = (4.468558, -1.389374)
class MadeUpTest(ChinesePluralsTest):
input = collections.defaultdict(lambda:0)
input.update([
(1, 200),
(2, 20),
(3, 12),
(4, 5),
(6, 7),
(8, 4),
(9, 2),
(10, 3),
(11, 1),
(13, 2),
(16, 1),
(19, 1),
(30, 1),
])
output = copy.copy(input)
output.update([
(0, 0.3846154),
(1, 0.2069015),
(2, 1.316592),
(3, 2.252004),
(4, 3.226675),
(6, 5.226975),
(8, 7.257669),
(9, 8.27874),
(10, 9.302245),
(11, 10.32758),
(13, 12.38216),
(16, 15.47039),
(19, 18.5632),
(30, 29.92122),
])
a, b = 4.933901, -2.114833
norm_constant = 1.034508
class TrivialTest(ChinesePluralsTest):
input = collections.defaultdict(lambda:0)
input.update([
(1, 4),
(2, 1),
])
max_r = 2
a, b = 1.386294, -2
norm_constant = 0.6
output = copy.copy(input)
output.update([
(0, 2/3),
(1, .3),
(2, .8),
]) | return cutoff
def approx_turing_variance(self, r):
""" | random_line_split |
trab05.py | # Nomes: Eduardo de Sousa Siqueira nUSP: 9278299
# Igor Barbosa Grécia Lúcio nUSP: 9778821
# SCC0251 - Image Processing 1 semestre 2021
# Trabalho 05. Image Descriptors
import numpy as np
import imageio as io
from numpy.lib import stride_tricks
import sys
def luminance_preprocessing(image):
red_channel = image[:, :, 0]
green_channel = image[:, :, 1]
blue_channel = image[:, :, 2]
new_image = np.floor(
(0.299 * red_channel) +
(0.587 * green_channel) +
(0.114 * blue_channel)
)
return new_image.astype(image.dtype)
def quantize(image, bits):
return image >> (8-bits)
#Função que constrói o descritor do histograma de cores normalizado
def normalized_histogram_descriptor(image, bits):
hist, | nção que constrói a matriz de co-ocorrências de intensidade diagonalmente conectadas
def diag_coocurrence_mat(image, b):
mat_size = 2 ** b #O tamanho da matriz é igual à quantidade de intensidades diferentes da imagem
mat = np.zeros((mat_size, mat_size), dtype=int)
#Essa será a lista de todas as co-ocorrências diagonais da imagem
#Para cada pixel x, y, co_occurrences[x, y] será um array de 2 posições, onde
#co_occurrences[x, y, 0] é o valor do pixel e co_occurrences[x, y, 1] é o pixel na diagonal direita
co_occurrences = stride_tricks.as_strided(
x=image,
shape=(image.shape[0]-1, image.shape[1]-1, 2), #Não consideramos as últimas linha e coluna
strides=(*image.strides, image.strides[0] + image.strides[1]), #Os primeiros passos são iguais o da imagem, o terceiro valor guia ao pixel na diagonal (+1 linha +1 coluna)
writeable=False #Evita escritas de memória, pois essa não é uma função segura
)
#Para cada valor de intensidade, contamos todas as suas co_ocorrências usando numpy fancy indexing, e preenchemos essa linha da matriz
for intensity in np.unique(image):
counts, _ = np.histogram(co_occurrences[co_occurrences[:, :, 0] == intensity, 1], bins=mat_size)
mat[intensity] = counts
return mat
#Função que constrói o descritor a partir das 5 métricas da matriz de co-ocorrências
def haralick_texture_descriptor(image, b):
c = diag_coocurrence_mat(image, b)
c = c / np.sum(c) #Normalizando a matriz
descriptors = []
#Energy
descriptors.append(
np.sum(np.square(c))
)
#Entropy
descriptors.append(
np.multiply(
np.sum(c * np.log10(c + 0.001)),
-1 #Multiplica por -1, conforme descrito
)
)
#Contrast
#cálculo de (i - j)²
ii, jj = np.indices(c.shape)
factors = np.square(ii - jj)
#cálculo do contraste
descriptors.append(
np.sum(
c * factors
) / c.size
) #c.size == N² == número de elementos da matriz de co-ocorrencias
#Correlation
#Para o cálculo vetorizado, computamos todos os valores separadamente para cada linha e coluna
#O resultado final será uma matriz, em que cada elemento (i, j) é o valor da correlação para esse pixel
#Calculando somas parciais das linhas e colunas, transformando-as em vetores linha e coluna para possibilitar
#o broadcasting
sum_rows = np.sum(c, axis=1)[np.newaxis, :] #Transforma em um vetor linha de 1 dimensão
sum_cols = np.sum(c, axis=0)[:, np.newaxis] #Transforma em um vetor coluna
#Cálculo das médias direcionais. Será um vetor em que cada valor dir_mean[x] corresponde à média direcional da linha/coluna x
dir_mean_i = np.sum(sum_rows * ii, axis=1, keepdims=True)
dir_mean_j = np.sum(sum_cols * jj, axis=0, keepdims=True)
#Cálculo dos desvios padrões, equivalente ao cálculo anterior
std_dev_i = np.sum(np.square(ii - dir_mean_i) * sum_rows, axis=1, keepdims=True)
std_dev_j = np.sum(np.square(jj - dir_mean_j) * sum_cols, axis=0, keepdims=True)
#Inicializamos a matriz de correlações com zeros, para os casos em que os desvios padrões são 0
corr = np.zeros(c.shape, dtype=np.double)
#Cálculo vetorizado da correlação. Por causa do broadcasting de numpy e as conversões anteriores para vetores linha e coluna,
#a multiplicação de dir_mean_i e dir_mean_j resulta em uma matriz de tamanho igual ao da matriz de co-ocorrências, onde o valor [i, j]
#é igual à multiplicação de dir_mean_i[i] * dir_mean_j[j]. A multiplicação dos desvios ocorre de maneira equivalente
corr = np.divide(
(ii * jj * c) - (dir_mean_i * dir_mean_j),
(std_dev_i * std_dev_j),
out=corr,
where=np.logical_and(std_dev_i != 0, std_dev_j != 0) #O cálculo é feito apenas nas posições em que os desvios são acima de 0
)
#Fazendo a soma dos elementos da matriz anterior, obtemos o valor de correlação geral
descriptors.append(np.sum(corr))
#Homogeneity
descriptors.append(
np.sum(
c / (1 + np.abs(ii - jj))
)
)
#Evitando divisão por 0
norm = np.linalg.norm(descriptors)
return descriptors / norm if norm != 0 else descriptors
#Taken from: https://gist.github.com/arifqodari/dfd734cf61b0a4d01fde48f0024d1dc9
#Caso run.codes não aceite scipy convolve
def strided_convolution(image, weight, stride):
im_h, im_w = image.shape
f_h, f_w = weight.shape
out_shape = (1 + (im_h - f_h) // stride, 1 + (im_w - f_w) // stride, f_h, f_w)
out_strides = (image.strides[0] * stride, image.strides[1] * stride, image.strides[0], image.strides[1])
windows = stride_tricks.as_strided(image, shape=out_shape, strides=out_strides)
return np.tensordot(windows, weight, axes=((2, 3), (0, 1)))
#Função que faz o cálculo do descritor de histograma dos gradientes orientados
def oriented_gradients_histogram(image):
sobel_x = np.array([
[-1, -2, -1],
[0, 0, 0],
[1, 2, 1]
])
sobel_y = np.array([
[-1, 0, 1],
[-2, 0, 2],
[-1, 0, 1]
])
#Try-except para o run.codes caso scipy esteja quebrada
try:
from scipy import ndimage
grad_x = ndimage.convolve(image.astype(np.double), sobel_x)
grad_y = ndimage.convolve(image.astype(np.double), sobel_y)
except ImportError:
grad_x = strided_convolution(image.astype(np.double), sobel_x, 1)
grad_y = strided_convolution(image.astype(np.double), sobel_x, 1)
#Cálculo da matriz de magnitude
magnitude_num = np.sqrt(np.square(grad_x) + np.square(grad_y))
magnitude_mat = magnitude_num / np.sum(magnitude_num)
#Ignora os erros de divisão por 0
np.seterr(divide='ignore', invalid='ignore')
#Algumas divisões de 0/0 resultam em NaN, mas isso é tratado automaticamente pela função np.digitize
angles = np.arctan(grad_y / grad_x)
#Fazendo as conversões
angles = angles + (np.pi / 2)
angles = np.degrees(angles)
#Construindo as bins
bins = np.arange(0, 180, 20)
angle_bins = np.digitize(angles, bins, right=False)
#Preenchendo as bins
descriptor = [np.sum(magnitude_mat[angle_bins == i]) for i in range(9)]
#Evitando divisão por 0
norm = np.linalg.norm(descriptor)
return descriptor / norm if norm != 0 else descriptor
#Função auxiliar que calcula cada descritor e os retorna já concatenados
def compute_descriptor(image, b):
dc = normalized_histogram_descriptor(image, b)
dt = haralick_texture_descriptor(image, b)
dg = oriented_gradients_histogram(image)
return np.concatenate((dc, dt, dg))
#Função que faz a procura de um objeto na imagem à partir dos seus descritores
def find_object(image, b, object_descriptor):
#Fazendo o pré-processamento
quantized_graylevel_image = quantize(luminance_preprocessing(image), b)
#Calculando a quantidade de janelas 32x32 que cabem na imagem
window_coords = ((quantized_graylevel_image.shape[0] // 16)- 1, (quantized_graylevel_image.shape[1] // 16 )- 1) #Nessa conta, a última janela 32x32 seria metade fora do vetor, portanto é ignorada
#Cada passo pula 16 posições, para a próxima janela 32x32
window_strides = (quantized_graylevel_image.strides[0] * 16, quantized_graylevel_image.strides[1] * 16)
#Esse vetor, de 4 dimensões, computa automaticamente todas as janelas 32x32 para a imagem
#windows[x, y] é a matriz 32x32 correspondente à janela y da linha x
windows = stride_tricks.as_strided(
quantized_graylevel_image,
shape=(*window_coords, 32, 32),
#strides[0, 1] faz o cálculo dos pulos para cada janela. strides[2, 3] faz o cálculo do pixel dentro da janela
strides=(*window_strides, *quantized_graylevel_image.strides),
writeable=False
)
distances = np.zeros(window_coords)
for i in range(window_coords[0]):
for j in range(window_coords[1]):
ld = compute_descriptor(windows[i, j], b)
distances[i, j] = np.sqrt(
np.sum(
np.square(object_descriptor - ld)
)
)
coords = np.where(distances == distances.min()) #Distância mínima é onde assumimos que o programa encontrou o objeto
return (coords[0][0], coords[1][0])
def main(opt):
#Lendo entrada do programa
f = input().rstrip()
g = input().rstrip()
b = int(input().rstrip())
#Computando descritor do objeto d
object_image = np.asarray(io.imread(f))
quantized_graylevel_object = quantize(luminance_preprocessing(object_image), b)
d = compute_descriptor(quantized_graylevel_object, b)
#Fazendo a busca baseado no descritor d obtido
large_image = np.asarray(io.imread(g))
i, j = find_object(large_image, b, d)
print(i, j) #Impressão na tela dos índices da janela
#Passando arg 1 na linha de comando, faz a impressão da imagem e o resultado da busca
if opt:
import matplotlib.pyplot as plt
import matplotlib.patches as patches
fig, ax = plt.subplots()
ax . imshow ( large_image )
rect = patches.Rectangle((j * 16 , i * 16 ) , 32 , 32 ,
linewidth =1, edgecolor='r' , facecolor='none')
ax.add_patch ( rect )
plt.show ()
if __name__ == '__main__':
#Conveniência para correção.
opt = False
if len(sys.argv) == 2:
opt = True
main(opt)
| _ = np.histogram(image, bins=2 ** bits)
norm_hist = hist / np.sum(hist)
return norm_hist / np.linalg.norm(norm_hist)
#Fu | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.