file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
12.1k
suffix
large_stringlengths
0
12k
middle
large_stringlengths
0
7.51k
fim_type
large_stringclasses
4 values
infer_lst.py
Enhance import torchvision.transforms as T import numpy as np import torch from torch.utils.data import DataLoader import datasets import util.misc as utils from util import box_ops import datasets.samplers as samplers from datasets import build_dataset, get_coco_api_from_dataset from engine import evaluate, train_one...
(args): utils.init_distributed_mode(args) print("git:\n {}\n".format(utils.get_sha())) if args.frozen_weights is not None: assert args.masks, "Frozen training is meant for segmentation only" print(args) device = torch.device(args.device) # fix the seed for reproducibility seed = ...
main
identifier_name
server.rs
// NewService, // Service, // }; // use actix_server::{ServerConfig}; // use actix_web::dev::Server use actix::prelude::*; // use bytes::{BytesMut, Bytes}; // use futures::{ // future::{ // ok, // join_all, // Future, // }, // Async, Poll, // }; // use serde_json::to_writer; // use actix_web::{ // App, // web, // middl...
// StatusCode, // }, // Error, Request, Response, // }; // use actix_service::{
random_line_split
server.rs
req.path(); // match path { // "/db" => { // let h_srv = self.hdr_srv.clone(); // let h_ct = self.hdr_ctjson.clone(); // Box::new(self.db.get("mydomain:one") // .map(|v:String| { // let mut body = BytesMut::new(); // ...
{ // if ss[i] == msg.addr { // if v == msg.addr { s = i; f = true; break; // } }
conditional_block
server.rs
resp::{RespValue}, // }; use crate::ws::{Close as WsClose, WsSession}; // use super::db::{RedisConnection}; // pub struct App { // // db: PgConnection, // db: RedisConnection, // // db: Arc<PairedConnection>, // hdr_srv: HeaderValue, // hdr_ctjson: HeaderValue, // hdr_cthtml: HeaderValue, // ...
{ // println!("{:?} joined wsserver", msg.addr); // let mut s = &mut *self.sessions.get_mut().unwrap(); let s = &mut self.sessions; s.push(msg.addr); //.downgrade()); println!( "new web socket added to server : {} sockets opened", s.len() ); }
identifier_body
server.rs
Connection, // // db: Arc<PairedConnection>, // hdr_srv: HeaderValue, // hdr_ctjson: HeaderValue, // hdr_cthtml: HeaderValue, // } // impl Service for App { // type Request = Request; // type Response = Response; // type Error = Error; // type Future = Box<dyn Future<Item = Response, Er...
handle
identifier_name
rt_network_v3_2_x.py
', 'CurrentThrottle', 'CurrentWheel', 'Distance3D', 'MPH', 'ManualBrake', 'ManualThrottle', 'ManualWheel', 'RangeW', 'RightLaneDist', 'RightLaneType', 'LeftLaneDist', 'LeftLaneType', 'ReactionTime'] raw_df = pd.read_csv(dataset_file, usecols=raw_columns) raw_df.set_index(...
def upsample_minority_TOTs(X_train, y_train, tot_labels, random_state=27): # contat the training data together. X = pd.concat([X_train, y_train], axis=1) # separate majority and minority classes buckets = {l: X[X.TOT == l] for l in tot_labels} maj_label, majority = reduce(lambda a,b: b if b[1].shape...
all_users = df.copy() df = df.loc[df['Name'].isin(chunk_users)]
random_line_split
rt_network_v3_2_x.py
# delete sample data !rm -rf sample_data # setup nnet tools (for converting model to Stanford's nnet format) setup_nnet_tools(nnet_tools_path) # used for conversion to NNet format from NNet.utils.writeNNet import writeNNet """## Download Dataset""" # GDrive ID of csv file (AllData_ReactionTime.csv) # https://drive...
if not os.path.exists(nnet_tools_path): install_nnet_tools(nnet_tools_path) # add folder to PYTHONPATH & JUPYTER_PATH update_path_vars([nnet_tools_path])
identifier_body
rt_network_v3_2_x.py
)) def display_processed_data(feature_names, unencoded=True, encoded=True, describe=True): if unencoded: print_heading('Unencoded Data') display(pd.concat([X_train, y_train], axis=1).describe()) if encoded: enc_tot_labels = onehot.get_feature_names(input_features=['TOT']) ...
save_encoders
identifier_name
rt_network_v3_2_x.py
', 'CurrentThrottle', 'CurrentWheel', 'Distance3D', 'MPH', 'ManualBrake', 'ManualThrottle', 'ManualWheel', 'RangeW', 'RightLaneDist', 'RightLaneType', 'LeftLaneDist', 'LeftLaneType', 'ReactionTime'] raw_df = pd.read_csv(dataset_file, usecols=raw_columns) raw_df.set_index(...
if encoded: enc_tot_labels = onehot.get_feature_names(input_features=['TOT']) print_heading('Encoded Data') display(pd.concat([pd.DataFrame(X_train_enc, columns=feature_names), pd.DataFrame(y_train_enc, columns=enc_tot_labels)], ...
print_heading('Unencoded Data') display(pd.concat([X_train, y_train], axis=1).describe())
conditional_block
cdb.py
[data] [hash_lookup_table] <- there's 256 of these; they're full of babies ... each one has [hash][absolute offset] ... each is (2*entries) long for hash searches [hash_lookup_table] Usage: >>> (build a cdb) >>> read = reader("test.cdb") >>> print 'read["a key"] =', read[...
random_line_split
cdb.py
a die badly flag (throw key error) found = False # loop through the number of cells in the hash range for step in range(ncells): self.filep.seek(pos_bucket + ((offset + step) % ncells) * 8) # grab the hash and position in the data stream (hash, pointer) = unpack('<LL', self.filep.read(8)) # throw ...
starttime = time.time() print "TEST: Massive stress test for large databases (%d entries)" % massive a = builder(fname) for i in xrange(massive): a[testlist[i%len(testlist)][0]] = testlist[i%len(testlist)][1] if not i % (massive / 37): print '.', #print "%3.1f%% complete" % (float(i) / (5*(10**6))*100) a....
identifier_body
cdb.py
<- 256 pairs of uint32 structures [absolute offset][length] ... positioning works like this: header[hash & 0xff] [header] [data] <- we're jumping over this; ... each data node consists of [key_length][value_length][key][value] [data] [hash_lookup_table] <- there's 256 of these; they're full of babi...
(self,index,single=True): return_value = [] hash_prime = calc_hash(index) # pull data from the cached header headhash = hash_prime % 256 pos_bucket= self.header[headhash + headhash] ncells = self.header[headhash + headhash + 1] # since the 256*8 bytes are all zeroed, this means the hash # was invalid ...
__get
identifier_name
cdb.py
<- 256 pairs of uint32 structures [absolute offset][length] ... positioning works like this: header[hash & 0xff] [header] [data] <- we're jumping over this; ... each data node consists of [key_length][value_length][key][value] [data] [hash_lookup_table] <- there's 256 of these; they're full of babi...
else: self.filep = infile # attempt to read file from the start self.filep.seek(0) self.start = self.filep.tell() # track pointers and hash table data self.hashbucket = [ array.array('L') for i in range(256) ] # skip past header storage (file header + 2048) self.position_hash = self.start + 2048 s...
self.filep = open(infile, "w+b")
conditional_block
gimli.rs
seg.filesize(endian) > 0 { text_fileoff_zero = true; } } segments.push(LibrarySegment { len: seg.vmsize(endian).try_into().ok()?, stated_virtual_memory_address: seg.vmaddr(endian)...
{ // A very small, very simple LRU cache for debug info mappings. // // The hit rate should be very high, since the typical stack doesn't cross // between many shared libraries. // // The `addr2line::Context` structures are pretty expensive to create. Its // cost ...
identifier_body
gimli.rs
let name = libc::_dyld_get_image_name(i); if name.is_null() { return None; } CStr::from_ptr(name) }; // Load the image header of this library and delegate to `object` to // parse all the load command...
/// Note that this is basically an LRU cache and we'll be shifting things /// around in here as we symbolize addresses. mappings: Vec<(usize, Mapping)>,
random_line_split
gimli.rs
incorrect. For native executables, however, it // appears correct. Lifting some logic from LLDB's source it has // some special-casing for the first `__TEXT` section loaded from // file offset 0 with a nonzero size. For whatever reason when this // is present it appears ...
mapping_for_lib
identifier_name
main.py
logging.critical('Please, rename config.py.template to config.py and edit it.\nOr specify a config to load on the command line: py scgb.py <config file>') sys.exit(1) # Init database db = Database(config.stats_database) # Init banlist load_banlist() # Init soundcloud clie...
def process_comment(comment): """Process a single comment.""" if not comment.body: logging.info('Empty URL detected.') return 'Your comment is empty.' if comment.user_id in banlist['user']: logging.info('Banned user id: %d', comment.user_id) return 'You...
if config.use_advanced_description and should_update_description: update_description()
random_line_split
main.py
# Load secrets file if os.path.exists(config.token_cache): with open(config.token_cache, 'r', encoding='utf-8') as f: secrets = json.load(f) else: secrets = {} # Try to reuse the cached access token if secrets\ and secrets['version'] == SECRETS_VERS...
group_repost
identifier_name
main.py
# Init banlist load_banlist() # Init soundcloud client init_api() def init_api(): """Authenticate with SoundCloud API. Cache access token in the secrets file.""" global soundcloud import json SECRETS_VERSION = 1 # Load secrets file if os.path.exists(conf...
global db global config # Init log logging.basicConfig(stream=sys.stdout, level=logging.INFO, datefmt='[%Y-%m-%d %H:%M:%S]', format='%(asctime)s %(levelname)s %(message)s') logging.getLogger("requests").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) # ...
identifier_body
main.py
logging.critical('Please, rename config.py.template to config.py and edit it.\nOr specify a config to load on the command line: py scgb.py <config file>') sys.exit(1) # Init database db = Database(config.stats_database) # Init banlist load_banlist() # Init soundcloud clie...
def process_comment(comment): """Process a single comment.""" if not comment.body: logging.info('Empty URL detected.') return 'Your comment is empty.' if comment.user_id in banlist['user']: logging.info('Banned user id: %d', comment.user_id) return 'Yo...
update_description()
conditional_block
sigma-form-upload-file.component.ts
el usuario */ object = Object; /** Listado de errores personalizados permitidos por el componente */ @Input('errors') errors: [] = []; /** Propiedad Placeholder asociado al campo del formulario */ @Input('placeholder') placeholder: string = ''; /** Cadena de texto con los tipos de archivos aceptados por e...
* * Método que permite validar el tipo de archivo con los permitidos * * @param control Control de formulario al cual se le asociará el mensaje de falla o éxito */ acceptValid(control: FormControl) { if (this.accept) { let validate = this.setValidateFile(control.validator, InputFileAcceptsValidat...
equired = true; } } } /*
conditional_block
sigma-form-upload-file.component.ts
izados por el usuario */ object = Object; /** Listado de errores personalizados permitidos por el componente */ @Input('errors') errors: [] = []; /** Propiedad Placeholder asociado al campo del formulario */ @Input('placeholder') placeholder: string = ''; /** Cadena de texto con los tipos de archivos acept...
addErrors() { if (this.errors.length > 0) { this.errors.map(item => { this.basicErrors.push(item); }); } } validateShowAttachFile(): Boolean { if (this.control.disabled) { return false; } if (this.maxUpload === 0 ) { return true; } if ( this.files && (...
*/
random_line_split
sigma-form-upload-file.component.ts
por el usuario */ object = Object; /** Listado de errores personalizados permitidos por el componente */ @Input('errors') errors: [] = []; /** Propiedad Placeholder asociado al campo del formulario */ @Input('placeholder') placeholder: string = ''; /** Cadena de texto con los tipos de archivos aceptados p...
l.disabled) { return false; } if (this.control.errors) { if (Object.keys(this.control.errors).length > 0) { return false; } } if (!this.autoUpdate) { return false; } return true; } /** * Método encargado de adicionar los archivos cargados por el cliente a...
(this.contro
identifier_name
sigma-form-upload-file.component.ts
por el usuario */ object = Object; /** Listado de errores personalizados permitidos por el componente */ @Input('errors') errors: [] = []; /** Propiedad Placeholder asociado al campo del formulario */ @Input('placeholder') placeholder: string = ''; /** Cadena de texto con los tipos de archivos aceptados p...
is.onTouch(); this.response(); this.status = 'reseting'; this.detectChange(); this.action = 'edit'; this.ngOnInit(); } /** * Método que permite la asignación de los errores de tipos de archivo * a la sección de errores del formulario */ setMensajeErrorTipoArchivo() { this.errorTi...
.response(); this.status = 'rewrite'; this.detectChange(); this.clearInputHiden(); } resetFormConditions() { th
identifier_body
types.rs
macro_rules! retain_release { ($name:ident, $ffi_ref:ty, $retain_fn:tt, $drop_fn:tt) => { impl Drop for $name { fn drop(&mut self) { unsafe { $drop_fn(self.0) }; } } impl Clone for $name { fn clone(&self) -> $name { let x =...
use std::ffi::CString; use std::ops::Deref; use std::ptr::{null, null_mut};
random_line_split
types.rs
f(&self) -> &$ffi_ref { &self.0 } } }; } unsafe impl Send for GlobalContext {} unsafe impl Sync for GlobalContext {} unsafe impl Send for Context {} unsafe impl Sync for Context {} unsafe impl Send for String {} unsafe impl Sync for String {} unsafe impl Send for Object {} unsaf...
} impl fmt::Debug for Exception { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("Exception") .field("stack", &self.stack()) .field("message", &self.message()) .finish() } } impl fmt::Display for Exception { fn fmt(&self, f: &mut fmt::For...
{ let mut s = f.debug_struct("Object"); unsafe { let array = JSObjectCopyPropertyNames(*self.0, self.1); let size = JSPropertyNameArrayGetCount(array); for i in 0..size { let js_ref = JSPropertyNameArrayGetNameAtIndex(array, i); let pr...
identifier_body
types.rs
f(&self) -> &$ffi_ref { &self.0 } } }; } unsafe impl Send for GlobalContext {} unsafe impl Sync for GlobalContext {} unsafe impl Send for Context {} unsafe impl Sync for Context {} unsafe impl Send for String {} unsafe impl Sync for String {} unsafe impl Send for Object {} unsaf...
<'a>(&'a self, script: &'a String) -> Result<Value, Exception> { self.evaluate_script_sync(script) } pub fn add_function( &self, name: &str, callback: JsCallback, ) -> Result<(), Box<dyn std::error::Error>> { let name = String::new(name).unwrap(); let obj = s...
evaluate_script
identifier_name
setting.go
用户名: %s</br> 密码: %s</br>邮箱: %s</body></html>`, r.Referer(), r.Referer(), getuser.Nickname, getuser.Password, getuser.Email), getuser.Email) // // 验证组和职位不能为空 // if getuser.StatusGroup == "" || getuser.RoleGroup == "" || getuser.Position == "" { // w.Write(errorcode.Error("验证组和职位不能为空")) // return // } // //1,先要...
db.Mconn.GetRows(getallsql, uid) if err != nil { golog.Error(err) w.Write(errorcode.ErrorE(err)) return } for adminrows.Next() { ul := &user.User{} err = adminrows.Scan(&ul.Id, &ul.Createtime, &ul.Realname, &ul.Nickname, &ul.Email, &ul.Disable, &ul.RoleGroup, &ul.StatusGroup, &ul.Position) ...
rite(send) return } else { getallsql := `select u.id,createtime,realname,nickname,email,disable, j.name from user as u join jobs as j on u.jid in (select id from jobs where hypo=(select jid from user where id=?))` adminrows, err :=
conditional_block
setting.go
用户名: %s</br> 密码: %s</br>邮箱: %s</body></html>`, r.Referer(), r.Referer(), getuser.Nickname, getuser.Password, getuser.Email), getuser.Email) // // 验证组和职位不能为空 // if getuser.StatusGroup == "" || getuser.RoleGroup == "" || getuser.Position == "" { // w.Write(errorcode.Error("验证组和职位不能为空")) // return // } // //1,先要...
r)) return } _, err = db.Mconn.Update("update bugs set dustbin=ABS(dustbin-1) where uid=?", id) if err != nil { golog.Error(err) w.Write(errorcode.ErrorE(err)) return } send, _ := json.Marshal(errorcode) w.Write(send) return } // 显示自己能管理的权限,不显示自己的 func UserList(w http.ResponseWriter, r *http.Request) ...
e.ErrorE(er
identifier_name
setting.go
code.IsUse()) return } // 查看用户组是否存在此用户 userrows, err := db.Mconn.GetRows("select ids from usergroup") if err != nil { golog.Error(err) w.Write(errorcode.ErrorE(err)) return } var hasgroup bool for userrows.Next() { var ids string userrows.Scan(&ids) for _, v := range strings.Split(ids, ",") { if...
Marshal(ts) w.Write(send) return } func ResetPwd(w http.ResponseWriter, r *http.Request) { errorcode := &response.Response{} rp := xmux.GetInstance(r).Data.(*user.ResetPassword) newpassword := encrypt.PwdEncrypt(rp.Password, cache.Salt) updatepwdsql := "update user set password=? where id=?" _, err := db.M...
identifier_body
setting.go
"github.com/hyahm/golog" "github.com/hyahm/xmux" ) func CreateUser(w http.ResponseWriter, r *http.Request) { errorcode := &response.Response{} // nickname := xmux.GetInstance(r).Get("nickname").(string) uid := xmux.GetInstance(r).Get("uid").(int64) createTime := time.Now().Unix() getuser := xmux.GetInstance(r...
"itflow/internal/user" "net/http" "strings" "time"
random_line_split
util.js
dragleave", function (event) { // 当拖动元素离开可放置目标节点,重置其背景 if (event.target.className == "dropzone") { event.target.style.background = ""; } }, false); document.addEventListener("drop", function (event) { // 阻止默认动作(如打开一些元素的链接) event.preventDefault(); // 移动拖动的元素到所选择的放置目标节...
aram dictMap 数据字典Map */ initConditionData(arr, dictMap) { for (let i = 0; i < arr.length; i++) { var key = arr[i].dict || arr[i].key; if (key && dictMap[key]) { this.attachDataMap(arr[i], dictMap[key]); } } }, /** * 获取字典表Map。 * @param callback 字典获取之后回调执行函数 * */ ...
数组 * @p
identifier_name
util.js
); let src = CryptoJS.enc.Base64.stringify(base64); var decrypt = CryptoJS.AES.decrypt(src, key, { iv: key, mode: CryptoJS.mode.CBC, padding: CryptoJS.pad.ZeroPadding }); var decryptedStr = decrypt.toString(CryptoJS.enc.Utf8); return decryptedStr.toString(); }, /* 数组对象排序,arr...
(tDSL[k])); } else { ret = !(v === tDSL[k]); } if (ret) { break; } } return ret; } /* function getInervalHour(startDate, endDate) { var ms = endDate.getTime() - startDate.getTime(); if (ms < 0) return 0; return Math.floor(ms/1000/60/60); } console.log("...
identifier_body
util.js
dragleave", function (event) { // 当拖动元素离开可放置目标节点,重置其背景 if (event.target.className == "dropzone") { event.target.style.background = ""; } }, false); document.addEventListener("drop", function (event) { // 阻止默认动作(如打开一些元素的链接) event.preventDefault(); // 移动拖动的元素到所选择的放置目标节...
查询条件的选项标签添加对应值映射和key。 * @param arr 查询条件数组 * @param dictMap 数据字典Map */ initConditionData(arr, dictMap) { for (let i = 0; i < arr.length; i++) { var key = arr[i].dict || arr[i].key; if (key && dictMap[key]) { this.attachDataMap(arr[i], dictMap[key]); } } }, /** * 获取字典表M...
ion (item, index) { map[item[key]] = item[value] }) } return map }, /** * 给多个
conditional_block
util.js
("dragleave", function (event) { // 当拖动元素离开可放置目标节点,重置其背景 if (event.target.className == "dropzone") { event.target.style.background = ""; } }, false); document.addEventListener("drop", function (event) { // 阻止默认动作(如打开一些元素的链接) event.preventDefault(); // 移动拖动的元素到所选择的放置目...
var decryptedStr = decrypt.toString(CryptoJS.enc.Utf8); return decryptedStr.toString(); }, /* 数组对象排序,array-数组对象,key-排序字段,status=0-升序,status=1-降序 */ sortByKey(array, key, status) { return array.sort(function (a, b) { var x = a[key]; var y = b[key]; if (status === 0) { return ...
});
random_line_split
Assembler.py
each translation process. numOfVariables = 0 def translate_c_command(command): command_array = command.split("=") if len(command_array) == 1: # no destination command_and_jump_array = command_array[0].split(";") destination_command = "" else: # if length = 2 destinatio...
if command.startswith('('): marker_dictionary[command[1:-1]] = line_counter continue commands_list.append(command) line_counter += 1 return commands_list, marker_dictionary def assemble_file(assembly_file_name, hack_file_name): """ ...
""" This function process an assembly file before it's translation to machine code. It creates a dictionary, and places into it all markers in the code, and assigns each one of them it's location in code, allowing to use it as a reference in future. While doing so, it deletes each marker's decl...
identifier_body
Assembler.py
RAM_RESERVE_END = 16 # A constant representing the first place in RAM available for variables VAR_FIRST_MEM = 16 """ Global variables""" # A global variable representing the number of variables created in the # supplied assembly code. When translating multiple files, this variable is # set to 0 at the beginning...
SCREEN = 16384 KBD = 24576 # The last number of RAM to be reserved
random_line_split
Assembler.py
each translation process. numOfVariables = 0 def translate_c_command(command): command_array = command.split("=") if len(command_array) == 1: # no destination command_and_jump_array = command_array[0].split(";") destination_command = "" else: # if length = 2 destinatio...
# A only commands elif compute_command == "0": suffix = "101010" elif compute_command == "1": suffix = "111111" elif compute_command == "-1": suffix = "111010" elif compute_command == "D": suffix = "001100" elif compute_command == "!D": suffi...
suffix = "010101"
conditional_block
Assembler.py
each translation process. numOfVariables = 0 def translate_c_command(command): command_array = command.split("=") if len(command_array) == 1: # no destination command_and_jump_array = command_array[0].split(";") destination_command = "" else: # if length = 2 destinatio...
(hack_file, marker_dictionary, cmd): """ This function writes a translated assembly name as hack machine code into the supplied .hack file. The function uses helper functions to translate code according to the type of code. :param hack_file: a .hack file (destination for hack machine code) ...
write_cmd
identifier_name
unqfy.js
ArtistNotFound') const { Artist, Album, Track, User, Playlist, Listening } = require('./entities/all') // esto hace falta para el framework de persistencia const {ArtistCreation, TrackCreation, UserCreation} = require('./entities-creation/all') // Method objects const PlaylistGenerator = require('./PlaylistGenerator.j...
() { return this._nextId } ///////////////////// addUser({name, email}) { const newUser = new UserCreation(this, {name, email}).handle() this._entitiesRepository.add('user', newUser) return newUser } verifyId(id){ return this._nextId >= id } registerListening(userId, trackId) { ...
id
identifier_name
unqfy.js
ArtistNotFound') const { Artist, Album, Track, User, Playlist, Listening } = require('./entities/all') // esto hace falta para el framework de persistencia const {ArtistCreation, TrackCreation, UserCreation} = require('./entities-creation/all') // Method objects const PlaylistGenerator = require('./PlaylistGenerator.j...
getPlaylistById(id) { return this._entitiesRepository.findBy('playlist', {prop: 'id', value: id}) } getUserById(id) { return this._entitiesRepository.findBy('user' , {prop: 'id', value: id}) } getArtistByName(aName) { return this._entitiesRepository.findBy('artist', { prop: 'name', value: aName }) ...
{ return this._entitiesRepository.findBy('track' , {prop: 'id', value: id}) }
identifier_body
unqfy.js
/ArtistNotFound') const { Artist, Album, Track, User, Playlist, Listening } = require('./entities/all') // esto hace falta para el framework de persistencia const {ArtistCreation, TrackCreation, UserCreation} = require('./entities-creation/all') // Method objects const PlaylistGenerator = require('./PlaylistGenerator....
const artist = this._getAuthorOfAlbum(album) this._removeFromAllPlaylists(album.tracks) artist.removeAlbum(album) } /* TRACK */ addTrack(albumId, {name, duration, genres}) { const lyricsProvider = this.lyricsProvider; const newTrack = new TrackCreation(this, {name, duration, genres, lyricsPro...
removeAlbum(albumId) { const album = this.getAlbumById(albumId)
random_line_split
unqfy.js
ArtistNotFound') const { Artist, Album, Track, User, Playlist, Listening } = require('./entities/all') // esto hace falta para el framework de persistencia const {ArtistCreation, TrackCreation, UserCreation} = require('./entities-creation/all') // Method objects const PlaylistGenerator = require('./PlaylistGenerator.j...
} updateArtist(artistId, artistData) { const artist = this.getArtistById(artistId) artist.update(artistData)
{ throw new ArtistNotFound(artistName) }
conditional_block
tag.rs
0, 0, pos_bytes[0], pos_bytes[1], ]); tag_fst_items.push((key, value)); } } } tag_fst_items.sort_by(|(a, _), (b, _)| a.cmp(b)); le...
( &self, word: &str, add_lower: bool, add_lower_if_empty: bool, ) -> Vec<WordData> { let mut tags = self.get_raw(&word); let lower = word.to_lowercase(); if (add_lower || (add_lower_if_empty && tags.is_empty())) && (word != lower &...
get_strict_tags
identifier_name
tag.rs
PosIdInt>, word_store: BiMap<String, WordIdInt>, groups: DefaultHashMap<WordIdInt, Vec<WordIdInt>>, } impl Tagger { fn get_lines<S1: AsRef<Path>, S2: AsRef<Path>>( paths: &[S1], remove_paths: &[S2], ) -> std::io::Result<Vec<(String, String, String)>> { let mut output = Vec::new...
{ let indices = word .char_indices() .take(std::cmp::max(n_chars - 4, 0) as usize) .skip(1) .map(|x| x.0); // the word always has at least one char if the above condition is satisfied // but s...
conditional_block
tag.rs
0, 0, pos_bytes[0], pos_bytes[1], ]); tag_fst_items.push((key, value)); } } } tag_fst_items.sort_by(|(a, _), (b, _)| a.cmp(b)); le...
let reader = std::io::BufReader::new(file); for line in reader.lines() { let line = line?; if line.starts_with('#') { continue; } if disallowed.contains(&line) { continue; } ...
random_line_split
tag.rs
} disallowed.push(line.to_string()); } } for path in paths { let file = File::open(path.as_ref())?; let reader = std::io::BufReader::new(file); for line in reader.lines() { let line = line?; if line.start...
{ self.word_store .get_by_left(lemma) .and_then(|x| self.groups.get(x)) .map(|vec| vec.iter().map(|x| self.str_for_word_id(x)).collect()) .unwrap_or_else(Vec::new) }
identifier_body
base.py
{} # Map of reverse relation self.reverse_rel = {} # Map of related classes and the field associated self.rel_class = {} def add_field(self, field): """ Add a field to the class. It makes sure all related variables are up to date """ if fie...
""" Save a row """ # For each field get the value to insert values = {key : self._meta.fields[key].insert_format(value) for key, value in self.dictValues.items()} if self._meta.primary_key: # If an id exist then we should update if self.id: ...
identifier_body
base.py
{} # Map of reverse relation self.reverse_rel = {} # Map of related classes and the field associated self.rel_class = {} def add_field(self, field): """ Add a field to the class. It makes sure all related variables are up to date """ if fie...
if self.id: pk = yield self.update(values) if self._meta.propagate: self._meta.database.propagate(self) # Else it means we should create the row else: # XXX To Do: What happen if insert failed. What should we return ...
conditional_block
base.py
import inlineCallbacks, returnValue from fields import PrimaryKeyField from query import SelectQuery, \ InsertQuery, \ AddQuery, \ RemoveQuery, \ UpdateQuery, \ DeleteQuery """ Metaclass enables to have a set of variable for ea...
(self, name, value): """ Overide __setattr__ to update dict value and field value at once """ object.__setattr__(self, name, value) if name in self.dictValues: # If updating a field value if self._meta.fields[name].salt: # field is salt # If field is ...
__setattr__
identifier_name
base.py
up to date """ if field.name in self.fields: print("WARNING: Field {0} already in model {1}" .format(field.name, self.table_name)) return self.fields[field.name] = field self.sorted_fields.append(field) self.sorted_fields_names....
def propagate_update(self, dictValues):
random_line_split
rca.rs
is rolled up by aggregate fn (e.g. sum) // - 2nd drill is rolled up by groupArray, which just collects all the values into an array in // order. // - the original measure is also rolled up by groupArray. // // Then the pivoted table is melted using Array Join on the 2nd drill and the original measure // (which would b...
( table: &TableSql, cuts: &[CutSql], drills: &[DrilldownSql], meas: &[MeasureSql], rca: &RcaSql, ) -> (String, String) { // append the correct rca drill to drilldowns // for a, both // for b, d2 // for c, d1 // for d, none let mut a_drills = drills.to_vec(); let mut b...
calculate
identifier_name
rca.rs
is rolled up by aggregate fn (e.g. sum) // - 2nd drill is rolled up by groupArray, which just collects all the values into an array in // order. // - the original measure is also rolled up by groupArray. // // Then the pivoted table is melted using Array Join on the 2nd drill and the original measure // (which would b...
else { format!("groupArray({col}_{alias_postfix}) as {col}_{alias_postfix}_s", col=l.key_column, alias_postfix=alias_postfix) } }) }); let group_array_rca_drill_2 = join(group_array_rca_drill_2, ", "); let join_array_rca_drill_2 = rca.drill_2.iter() ...
{ format!("groupArray({key_col}_{alias_postfix}) as {key_col}_{alias_postfix}_s, groupArray({name_col}_{alias_postfix}) as {name_col}_{alias_postfix}_s", key_col=l.key_column, name_col=name_col, alias_postfix=alias_postfix) }
conditional_block
rca.rs
// // The optimization is to derive the c and d aggregates from a and b. Since cuts are allowed on the // first drill in the rca, both a and b have to be scanned (b cannot be cut on the first drill). // // In clickhouse there is no partition, so it's trickier to do what looks like two different group // by. // // The g...
// drill dim).
random_line_split
rca.rs
measure is rolled up by aggregate fn (e.g. sum) // - 2nd drill is rolled up by groupArray, which just collects all the values into an array in // order. // - the original measure is also rolled up by groupArray. // // Then the pivoted table is melted using Array Join on the 2nd drill and the original measure // (which...
println!("c: {:?}", c_drills); println!("d: {:?}", d_drills); // prepend the rca sql to meas let all_meas = { let mut temp = vec![rca.mea.clone()]; temp.extend_from_slice(meas); temp }; // for cuts, // - a can be cut on d1 and ext // - b cannot be int cut, only ...
{ // append the correct rca drill to drilldowns // for a, both // for b, d2 // for c, d1 // for d, none let mut a_drills = drills.to_vec(); let mut b_drills = drills.to_vec(); let mut c_drills = drills.to_vec(); let d_drills = drills.to_vec(); a_drills.extend_from_slice(&rca...
identifier_body
acss.go
, secret) if err != nil { return a.broadcastImplicate(err, msgs) } a.outS = myShare a.tryOutput() // Maybe the READY messages are already received. return a.handleImplicateRecoverPending(a.broadcastVote(msgVoteOK, msgs)) } // > on receiving <OK> from n-f parties: // > send <READY> to all parties func (a *acss...
} } fu
identifier_name
acss.go
It can be provided by the dealer only. func (a *acssImpl) Input(input gpa.Input) gpa.OutMessages { if a.me != a.dealer { panic(errors.New("only dealer can initiate the sharing")) } if input == nil { panic(errors.New("we expect kyber.Scalar as input")) } return a.handleInput(input.(kyber.Scalar)) } // Receive...
// NOTE: We assume `if out == true:` stands for a wait for such condition. func (a *acssImpl) handleImplicate(msg *msgImplicateRecover) gpa.OutMessages { peerIndex := a.peerIndex(msg.sender) if peerIndex == -1 {
random_line_split
acss.go
dealer. voteOKRecv map[gpa.NodeID]bool // A set of received OK votes. voteREADYRecv map[gpa.NodeID]bool // A set of received READY votes. voteREADYSent bool // Have we sent our READY vote? pendingIRMsgs []*msgImplicateRecover // I/R messages are buffered, ...
if !a.voteREADYSent && count >= (a.n-a.f) { a.voteREADYSent = true return a.broadcastVote(msgVoteREADY, gpa.NoMessages()) } return nil } // > on receiving <READY> from f+1 parties: // > send <READY
identifier_body
acss.go
github.com/tyurek/hbACSS // // A PoC implementation: <https://github.com/Wollac/async.go>. // // The Crypto part shown the pseudo-code above is replaced in the implementation with the // scheme allowing to keep the private keys secret. The scheme implementation is taken // from the PoC mentioned above. It is described ...
r the algorithm is the secret to share. // It can be provided by the dealer only. func (a *acssImpl) Input(input gpa.Input) gpa.OutMessages { if a.me != a.dealer { panic(errors.New("only dealer can initiate the sharing")) } if input == nil { panic(errors.New("we expect kyber.Scalar as input")) } return a.handl...
gpa.NewOwnHandler(me, &a) } // Input fo
conditional_block
discord.go
bot.state.Open(); discordErr != nil { return nil, discordErr } // Get our Discord user self, discordErr := bot.state.Me() if discordErr != nil { return nil, discordErr } // Set our data and create the Minecraft watcher bot.id = self.ID bot.name = self.Username bot.avatarURL = self.AvatarURL() if Confi...
bot.channel = discord.ChannelID(snowflake) } else { return nil, errors.New("no channel ID configured") } bot.watcher = NewWatcher(self.Username, *Config.Minecraft.CustomDeathKeywords) return bot, discordErr } // Close cleans up the watcher and closes the Discord session. func (bot *DiscordBot) Close() error...
{ return nil, discordErr }
conditional_block
discord.go
bot.state.Open(); discordErr != nil { return nil, discordErr } // Get our Discord user self, discordErr := bot.state.Me() if discordErr != nil { return nil, discordErr } // Set our data and create the Minecraft watcher bot.id = self.ID bot.name = self.Username bot.avatarURL = self.AvatarURL() if Confi...
() { // Make our messages channel mc := make(chan *MinecraftMessage) // Start our Minecraft watcher go bot.watcher.Watch(mc) for { // Read message from the channel msg := <-mc Log.Debugf("Received a line from Minecraft: Username='%s', Text='%s'\n", msg.Username, msg.Message) // Don't send messages that a...
WaitForMessages
identifier_name
discord.go
(bot *DiscordBot) onGuildCreate(e *gateway.GuildCreateEvent) { // Make sure the guild is available if e.Unavailable { Log.Warnf("Attempted to join Guild '%s', but it was unavailable\n", e.Guild.Name) return } if bot.guildID.String() != "" { Log.Warnf("Received a Guild join event for '%s', but we've already ...
{ // Format command to send to the Minecraft server command := fmt.Sprintf("tellraw @a %s", Config.Minecraft.TellrawTemplate) command = strings.Replace(command, "%username%", username, -1) command = strings.Replace(command, "%message%", content, -1) // Create RCON connection conn, err := rcon.Dial(Config.Minecra...
identifier_body
discord.go
ancementMessage: { if !Config.Discord.MessageOptions.ShowAdvancements { continue } } case DeathMessage: { if !Config.Discord.MessageOptions.ShowDeaths { continue } } case JoinLeaveMessage: { if !Config.Discord.MessageOptions.ShowJoinsLeaves { continue } } }...
content = strings.Replace(content, fmt.Sprintf("<#%s>", id), fmt.Sprintf("#%s", channel.Name), -1) }
random_line_split
mod.rs
32, pub h: u32, } impl From<Rect> for AtlasRect { fn from(rect: Rect) -> AtlasRect { AtlasRect { x: rect.x, y: rect.y, w: rect.w, h: rect.h, } } } pub type AtlasTextureRegion = (f32, f32, f32, f32); pub enum TileShape { Static, Autot...
fn texture_size(&self, texture_idx: usize) -> (u32, u32) { self.textures[texture_idx].dimensions() } fn get_frame(&self, tile_type: &str) -> &AtlasFrame { let tex_name = &self.config.locations[tile_type]; &self.config.frames[tex_name] } pub fn get_tile_texture_idx(&self, ...
{ self.texture_size(frame.texture_idx) }
identifier_body
mod.rs
: texture_idx, rect: AtlasRect::from(rect), offsets: HashMap::new(), } } } pub type TilePacker<'a> = TexturePacker<'a, DynamicImage, SkylinePacker<Rgba<u8>>>; pub struct TileAtlas { config: TileAtlasConfig, textures: Vec<Texture2d>, indices: Vec<String>, } pub struct T...
get_texture_offset_indexed
identifier_name
mod.rs
32, pub h: u32, } impl From<Rect> for AtlasRect { fn from(rect: Rect) -> AtlasRect { AtlasRect { x: rect.x, y: rect.y, w: rect.w, h: rect.h, } } } pub type AtlasTextureRegion = (f32, f32, f32, f32); pub enum TileShape { Static, Autot...
pub fn new(texture_idx: usize, rect: Rect, tile_size: (u32, u32)) -> Self { AtlasFrame { tile_size: tile_size, texture_idx: texture_idx, rect: AtlasRect::from(rect), offsets: HashMap::new(), } } } pub type TilePacker<'a> = TexturePacker<'a, Dynami...
offsets: HashMap<String, AtlasTile>, } impl AtlasFrame {
random_line_split
compensation_disp.py
(max_v_gr, signal_duration, factor=0.9): # delta k powinno być = 1/(n(delta_x) i mniejsze niż 1/(m*delta_t*v_gr_max) m*delta_t jest równe długości trwania sygnału :) if signal_duration <= 0: print("Długość sygnału musi być większa od 0") exit(0) if factor >= 1: print("Współczynnik mu...
w, freq_sampling_kHz, omega): value = -1 for ind in range(len(freq_sampling_kHz)-1): if freq_sampling_kHz[ind] == omega: value = G_w[ind] break elif freq_sampling_kHz[ind] < omega and freq_sampling_kHz[ind + 1] > omega: a = (G_w[ind] - G_w[ind+1])/(freq_sampli...
ct[ind] < temp_k and k_vect[ind + 1] > temp_k: omega = mode.findPointWithGivenK_rad_s([mode.points[ind], mode.points[ind+1]], temp_k) break return omega def find_value_by_omega_in_G_w(G_
conditional_block
compensation_disp.py
omega = mode.findPointWithGivenK_rad_s([mode.points[-2], mode.points[-1]], temp_k) elif temp_k < k_vect[0]: if mode.points[0].w < 5: temp_point = selectMode.Point() omega = mode.findPointWithGivenK_rad_s([temp_point, mode.points[0]], temp_k) else: omega = mode.po...
[0] dt = time[-1]/len(time) frequency_from_numpy = np.fft.rfftfreq(len(signal[1]), d=dt)*1e-3 G_w = np.sqrt(signal_after_fft.real**2 + signal_after_fft.imag**2) #znalezienie najsilniejszej/średniej omegi max_g = G_w[0] max_ind = 0 for ind, g in enumerate(G_w): if g>max_g: ...
identifier_body
compensation_disp.py
k_temp = Anim_dyspersji.curve_sampling(mode.all_omega_khz, dispercion_curves.k_v, [f_Nyq_kHz]) if k_temp > max_k_Nyq: max_k_Nyq = k_temp return factor*max_k_Nyq[0] # Zwracana wartość jest w rad/m def calculate_delta_k(max_v_gr, signal_duration, factor=0.9): # delta k powinno być = ...
random_line_split
compensation_disp.py
temp_k > k_vect[-1]: omega = mode.findPointWithGivenK([mode.points[-2], mode.points[-1]], temp_k) elif temp_k < k_vect[0]: if mode.points[0].w < 5: temp_point = selectMode.Point() omega = mode.findPointWithGivenK([temp_point, mode.points[0]], temp_k) else: ...
urves): signal_after_f
identifier_name
channelamqp.go
AMQPChannelStatus", 0, err) return err } // Given a PCF response message, parse it to extract the desired statistics func parseAMQPChlData(instanceType int32, cfh *ibmmq.MQCFH, buf []byte) string { var elem *ibmmq.PCFParameter traceEntry("parseAMQPChlData") ci := getConnection(GetConnectionKey()) //os := &ci.ob...
case ibmmq.MQIACH_CHANNEL_TYPE:
random_line_split
channelamqp.go
Attribute(attr, "Channel Status", ibmmq.MQIACH_CHANNEL_STATUS) attr = ATTR_CHL_SINCE_MSG st.Attributes[attr] = newStatusAttribute(attr, "Time Since Msg", -1) // These are not really monitoring metrics but it may enable calculations to be made such as %used for // the channel instance availability. It's extracted ...
for k := range amqpInfoMap { amqpInfoMap[k].AttrCurInst = 0 } channelPatterns := strings.Split(patterns, ",") if len(channelPatterns) == 0 { traceExit("CollectAMQPChannelStatus", 1) return nil } for _, pattern := range channelPatterns { pattern = strings.TrimSpace(pattern) if len(pattern) == 0 { ...
{ st.Attributes[k].Values = make(map[string]*StatusValue) }
conditional_block
channelamqp.go
Attribute(attr, "Channel Status", ibmmq.MQIACH_CHANNEL_STATUS) attr = ATTR_CHL_SINCE_MSG st.Attributes[attr] = newStatusAttribute(attr, "Time Since Msg", -1) // These are not really monitoring metrics but it may enable calculations to be made such as %used for // the channel instance availability. It's extracted ...
} channelPatterns := strings.Split(patterns, ",") if len(channelPatterns) == 0 { traceExit("CollectAMQPChannelStatus", 1) return nil } for _, pattern := range channelPatterns { pattern = strings.TrimSpace(pattern) if len(pattern) == 0 { continue } // This would allow us to extract SAVED informati...
{ var err error traceEntry("CollectAMQPChannelStatus") ci := getConnection(GetConnectionKey()) os := &ci.objectStatus[OT_CHANNEL_AMQP] st := GetObjectStatus(GetConnectionKey(), OT_CHANNEL_AMQP) os.objectSeen = make(map[string]bool) // Record which channels have been seen in this period ChannelAMQPInitAttribu...
identifier_body
channelamqp.go
(patterns string) error { var err error traceEntry("CollectAMQPChannelStatus") ci := getConnection(GetConnectionKey()) os := &ci.objectStatus[OT_CHANNEL_AMQP] st := GetObjectStatus(GetConnectionKey(), OT_CHANNEL_AMQP) os.objectSeen = make(map[string]bool) // Record which channels have been seen in this period ...
inquireAMQPChannelAttributes
identifier_name
btckey.go
G */ Q := secp256k1.ScalarBaseMult(priv.D) /* Check that Q is on the curve */ if !secp256k1.IsOnCurve(Q) { panic("Catastrophic math logic failure in public key derivation.") } priv.X = Q.X priv.Y = Q.Y return &priv.PublicKey } // GenerateKey generates a public and private key pair using random source rand....
b = append([]byte{0x3f}, b...) } /* Verify checksum */ if len(b) < 5 { return 0, nil, fmt.Errorf("Invalid base-58 check string: missing checksum.") } /* Create a new SHA256 context */ sha256_h := sha256.New() /* SHA256 Hash #1 */ sha256_h.Reset() sha256_h.Write(b[:len(b)-4]) hash1 := sha256_h.Sum(nil) ...
break }
conditional_block
btckey.go
bcpy = append(bcpy, hash2[0:4]...) /* Encode base58 string */ s = b58encode(bcpy) /* For number of leading 0's in bytes, prepend 1 */ for _, v := range bcpy { if v != 0 { break } s = "1" + s } return s } // b58checkdecode decodes base-58 check encoded string s into a version ver and byte slice b. fu...
P, err := secp256k1.Decompress(new(big.Int).SetBytes(b[1:33]), uint(b[0]&0x1)) if err != nil {
random_line_split
btckey.go
G */ Q := secp256k1.ScalarBaseMult(priv.D) /* Check that Q is on the curve */ if !secp256k1.IsOnCurve(Q) { panic("Catastrophic math logic failure in public key derivation.") } priv.X = Q.X priv.Y = Q.Y return &priv.PublicKey } // GenerateKey generates a public and private key pair using random source rand....
return false, fmt.Errorf("Invalid private key bytes, unknown suffix byte 0x%02x.", priv_bytes[len(priv_bytes)-1]) } return true, nil } // ToBytes converts a Bitcoin private key to a 32-byte byte slice. func (priv *PrivateKey) ToBytes() (b []byte) { d := priv.D.Bytes() /* Pad D to 32 bytes */ padded_d := appen...
/* See https://en.bitcoin.it/wiki/Wallet_import_format */ /* Base58 Check Decode the WIF string */ ver, priv_bytes, err := b58checkdecode(wif) if err != nil { return false, err } /* Check that the version byte is 0x80 */ if ver != 0x80 { return false, fmt.Errorf("Invalid WIF version 0x%02x, expected 0x80."...
identifier_body
btckey.go
string wif is a valid Wallet Import Format or Wallet Import Format Compressed string. If it is not, err is populated with the reason. func CheckWIF(wif string) (valid bool, err error) { /* See https://en.bitcoin.it/wiki/Wallet_import_format */ /* Base58 Check Decode the WIF string */ ver, priv_bytes, err := b58che...
AddressUncompressed()
identifier_name
unbond.rs
let max_peg_fee = amount * recovery_fee; let required_peg_fee = ((total_supply + current_batch.requested_with_fee) - state.total_bond_amount)?; let peg_fee = Uint128::min(max_peg_fee, required_peg_fee); amount_with_fee = (amount - peg_fee)?; } else { amount_with_f...
{ // Read params let params = read_parameters(&deps.storage).load()?; let epoch_period = params.epoch_period; let threshold = params.er_threshold; let recovery_fee = params.peg_recovery_fee; let mut current_batch = read_current_batch(&deps.storage).load()?; // Check slashing, update state,...
identifier_body
unbond.rs
<S: Storage, A: Api, Q: Querier>( deps: &mut Extern<S, A, Q>, env: Env, amount: Uint128, sender: HumanAddr, ) -> StdResult<HandleResponse> { // Read params let params = read_parameters(&deps.storage).load()?; let epoch_period = params.epoch_period; let threshold = params.er_threshold; ...
handle_unbond
identifier_name
unbond.rs
params.epoch_period; let threshold = params.er_threshold; let recovery_fee = params.peg_recovery_fee; let mut current_batch = read_current_batch(&deps.storage).load()?; // Check slashing, update state, and calculate the new exchange rate. slashing(deps, env.clone())?; let mut state = read_st...
})?; // Send the money to the user let msgs = vec![BankMsg::Send { from_address: contract_address.clone(), to_address: sender_human, amount: coins(withdraw_amount.u128(), &*coin_denom), } .into()]; let res = HandleResponse { messages: msgs, log: vec![ ...
Ok(last_state)
random_line_split
main.rs
(std::time::Duration::from_secs(15)); // After 8 probes go unacknowledged treat the connection as dead. options.keepalive_count = Some(8); options } struct RedirectInfo { url: Option<hyper::Uri>, referrer: Option<hyper::Uri>, method: hyper::Method, } fn redirect_info( old_uri: &hyper::Uri...
(mut self, loader_client: net_http::LoaderClientProxy) -> Result<(), zx::Status> { let client = fhyper::new_https_client_from_tcp_options(tcp_options()); loop { break match client.request(self.build_request()).await { Ok(hyper_response) => { let redirect =...
start
identifier_name
main.rs
(std::time::Duration::from_secs(15)); // After 8 probes go unacknowledged treat the connection as dead. options.keepalive_count = Some(8); options } struct RedirectInfo { url: Option<hyper::Uri>, referrer: Option<hyper::Uri>, method: hyper::Method, } fn redirect_info( old_uri: &hyper::Uri...
Err(e) => { debug!("Not redirecting because: {}", e); break Ok(()); } }; trace!("Redirect allowed to {} {}", self.method, self.u...
{ let client = fhyper::new_https_client_from_tcp_options(tcp_options()); loop { break match client.request(self.build_request()).await { Ok(hyper_response) => { let redirect = redirect_info(&self.url, &self.method, &hyper_response); if ...
identifier_body
main.rs
(std::time::Duration::from_secs(15)); // After 8 probes go unacknowledged treat the connection as dead. options.keepalive_count = Some(8); options } struct RedirectInfo { url: Option<hyper::Uri>, referrer: Option<hyper::Uri>, method: hyper::Method, } fn redirect_info( old_uri: &hyper::Uri...
} fn to_error_response(error: net_http::Error) -> net_http::Response { net_http::Response { error: Some(error), body: None, final_url: None, status_code: None, status_line: None, headers: None, redirect: None, ..net_http::Response::EMPTY } } stru...
random_line_split
spritecfg.rs
let fst = chunks.next(); let snd = chunks.next(); match fst { Some("MAIN") => match snd { Some(ofs) => main = usize::from_str_radix(ofs, 16).unwrap(), _ => return single_error("No offset after \"MAIN\" declaration"), }, Some("INIT") => match snd { Some(ofs) => init = usize::f...
{ let mut it = buf.split_whitespace().skip(1); let mut d = [0u8; 9]; for output_byte in &mut d { if let Some(s) = it.next() { *output_byte = try!(read_byte(s)); } else { return Err(CfgErr{ explain: String::from("Old-style CFG too short") }); } }; let (name, name_set) = default_name(path, gen, id); le...
identifier_body
spritecfg.rs
0, 0, 0], dys_option_bytes: [0, 0], acts_like: 0, extra_bytes: 0, name: "".to_string(), desc: "".to_string(), name_set: None, desc_set: None, source_path: PathBuf::from(""), } } pub fn needs_init(&self) -> bool { match self.genus {...
if init == 0 && self.needs_init() { return single_error("No init routine"); } if drop == 0 && self.needs_drop() { return single_error("Drop routine required by dys_opts, but not provided"); } if drop != 0 && !self.needs_drop() { return single_error("Sprite...
{ return single_error("No main routine"); }
conditional_block
spritecfg.rs
0, 0, 0], dys_option_bytes: [0, 0], acts_like: 0, extra_bytes: 0, name: "".to_string(), desc: "".to_string(), name_set: None, desc_set: None, source_path: PathBuf::from(""), } } pub fn needs_init(&self) -> bool { match self.genus {...
(&self, ebit: bool) -> &String { if ebit && self.name_set.is_some() { self.name_set.as_ref().unwrap() } else { &self.name } } pub fn desc(&self, ebit: bool) -> &String { if ebit && self.desc_set.is_some() { self.desc_set.as_ref().unwrap() } else { &self.desc } } pub fn uses_ebit(&self) -> ...
name
identifier_name
spritecfg.rs
, 0, 0, 0], dys_option_bytes: [0, 0], acts_like: 0, extra_bytes: 0, name: "".to_string(), desc: "".to_string(), name_set: None, desc_set: None, source_path: PathBuf::from(""), } } pub fn needs_init(&self) -> bool { match self.genus ...
{ let mut tempasm = OpenOptions::new() .write(true) .truncate(true) .create(true) .open(temp) .unwrap(); tempasm.write_all(prelude.as_bytes()).unwrap(); let mut source_buf = Vec::<u8>::with_capacity(8 ...
random_line_split
models.py
SIZE_BIG = 4 SIZE_HOUSE = 10 # required_bohnen are per abo, not per person AboTyp = namedtuple('AboTyp', ['size', 'name_short', 'name_long', 'description', 'min_anteilsscheine', 'visible', 'required_bohnen', 'cost']); abo_types = { SIZE_NONE: AboTyp( size=SIZE_NONE, name_short='Keins...
: name = models.CharField("Name", max_length=100, unique=True) description = models.TextField("Beschreibung", max_length=1000, default="") core = models.BooleanField("Kernbereich", default=False) hidden = models.BooleanField("versteckt", default=False) coordinator = models.ForeignKey(Loco, on_delete...
bose_name_plural = "Anteilscheine" class Taetigkeitsbereich(models.Model)
identifier_body
models.py
print 'get_abos_by_size', self, result return result """ def small_abos(self): return len(self.active_abos().filter(Q(groesse=1) | Q(groesse=3))) def big_abos(self): return len(self.active_abos().filter(Q(groesse=2) | Q(groesse=3) | Q(groesse=4))) + len(self.act...
if abo_size is not Abo.SIZE_NONE: result[abo_size] = len(self.active_abos().filter(groesse=abo_size))
conditional_block
models.py
SIZE_BIG = 4 SIZE_HOUSE = 10 # required_bohnen are per abo, not per person AboTyp = namedtuple('AboTyp', ['size', 'name_short', 'name_long', 'description', 'min_anteilsscheine', 'visible', 'required_bohnen', 'cost']); abo_types = { SIZE_NONE: AboTyp( size=SIZE_NONE, name_short='Keins...
# user class is only used for logins, permissions, and other builtin django stuff # all user information should be stored in the Loco model user = models.OneToOneField(User, related_name='loco', null=True, blank=True) first_name = models.CharField("Vorname", max_length=30) last_name = models.CharFi...
random_line_split
models.py
SIZE_BIG = 4 SIZE_HOUSE = 10 # required_bohnen are per abo, not per person AboTyp = namedtuple('AboTyp', ['size', 'name_short', 'name_long', 'description', 'min_anteilsscheine', 'visible', 'required_bohnen', 'cost']); abo_types = { SIZE_NONE: AboTyp( size=SIZE_NONE, name_short='Keins...
): """ Callback to create corresponding loco when new user is created. """ if created: username = helpers.make_username(instance.first_name, instance.last_name, instance.email) user = User(username=username) user.save() user = User.objects....
**kdws
identifier_name
jwt.rs
; pub static JWT_AUDIENCE_FIRESTORE: &str = "https://firestore.googleapis.com/google.firestore.v1.Firestore"; pub static JWT_AUDIENCE_IDENTITY: &str = "https://identitytoolkit.googleapis.com/google.identity.identitytoolkit.v1.IdentityToolkit"; pub trait PrivateClaims where Self: Serialize + DeserializeOwn...
.get(&format!( "https://www.googleapis.com/service_accounts/v1/jwk/{}", account_mail )) .send() .await?; let jwk_set: JWKSetDTO = resp.json().await?; Ok(jwk_set) } /// Returns true if the access token (assumed to be a jwt) has expired /// /// An error is ...
pub async fn download_google_jwks_async(account_mail: &str) -> Result<JWKSetDTO, Error> { let resp = reqwest::Client::new()
random_line_split
jwt.rs
pub static JWT_AUDIENCE_FIRESTORE: &str = "https://firestore.googleapis.com/google.firestore.v1.Firestore"; pub static JWT_AUDIENCE_IDENTITY: &str = "https://identitytoolkit.googleapis.com/google.identity.identitytoolkit.v1.IdentityToolkit"; pub trait PrivateClaims where Self: Serialize + DeserializeOwned ...
{ #[serde(flatten)] pub(crate) headers: biscuit::jws::RegisteredHeader, #[serde(flatten)] pub(crate) ne: biscuit::jwk::RSAKeyParameters, } #[derive(Serialize, Deserialize)] pub struct JWKSetDTO { pub keys: Vec<JWSEntry>, } /// Download the Google JWK Set for a given service account. /// The resul...
JWSEntry
identifier_name
jwt.rs
pub static JWT_AUDIENCE_FIRESTORE: &str = "https://firestore.googleapis.com/google.firestore.v1.Firestore"; pub static JWT_AUDIENCE_IDENTITY: &str = "https://identitytoolkit.googleapis.com/google.identity.identitytoolkit.v1.IdentityToolkit"; pub trait PrivateClaims where Self: Serialize + DeserializeOwned ...
} pub(crate) fn verify_access_token( credentials: &Credentials, access_token: &str, ) -> Result<TokenValidationResult, Error> { verify_access_token_with_claims(credentials, access_token) } pub fn verify_access_token_with_claims<T: Private
{ self.claims.get_scopes() }
identifier_body
jwt.rs
pub static JWT_AUDIENCE_FIRESTORE: &str = "https://firestore.googleapis.com/google.firestore.v1.Firestore"; pub static JWT_AUDIENCE_IDENTITY: &str = "https://identitytoolkit.googleapis.com/google.identity.identitytoolkit.v1.IdentityToolkit"; pub trait PrivateClaims where Self: Serialize + DeserializeOwned ...
Ok(true) } /// Returns true if the jwt was updated and needs signing pub(crate) fn jwt_update_expiry_if(jwt: &mut AuthClaimsJWT, expire_in_minutes: i64) -> bool { let ref mut claims = jwt.payload_mut().unwrap().registered; let now = biscuit::Timestamp::from(Utc::now()); if let Some(issued_at) = clai...
{ let diff: Duration = Utc::now().signed_duration_since(expiry.deref().clone()); return Ok(diff.num_minutes() - tolerance_in_minutes > 0); }
conditional_block
spy.rs
Html { match col.index { // timestamp 0 => render_timestamp(&self.0), // device id 1 => self.device().into(), // payload 2 => render_data_short(&self.0), // ignore _ => html! {}, } } fn render_detai...
let on_error = Closure::wrap(Box::new(move || { link.send_message(Msg::Failed); }) as Box<dyn FnMut()>); source.set_onerror(Some(&on_error.into_js_value().into())); // store result self.running = true; self.source = Some(source); } fn stop(&mut self...
// setup onerror let link = self.link.clone();
random_line_split
spy.rs
type Properties = (); fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self { Self { events: Default::default(), link, source: None, running: false, total_received: 0, application: String::new(), } } ...
ibuteEntry(pub
identifier_name
service.rs
_cpu_usage: f64, pub max_instances: i64, pub instances: i64, pub tasks: HashMap<String, String>, } #[derive(Debug)] pub struct Statistic { pub timestamp: f64, pub cpu_time: f64, pub cpu_usage: f64, pub mem_usage: f64, } #[derive(Debug, Deserialize)] struct TaskStatistic { cpus_limit: f...
if task.is_none() { continue; } let task = task.unwrap(); timestamp = task.timestamp; cpus.push(task.cpus_user_time_secs + task.cpus_system_time_secs); mems.push(100.0 * task.mem_rss_bytes as f64 / ...
let mut timestamp: f64 = 0.0; for task in tasks {
random_line_split
service.rs
_usage: f64, pub max_instances: i64, pub instances: i64, pub tasks: HashMap<String, String>, } #[derive(Debug)] pub struct Statistic { pub timestamp: f64, pub cpu_time: f64, pub cpu_usage: f64, pub mem_usage: f64, } #[derive(Debug, Deserialize)] struct TaskStatistic { cpus_limit: f64, ...
for task in tasks { if task.is_none() { continue; } let task = task.unwrap(); timestamp = task.timestamp; cpus.push(task.cpus_user_time_secs + task.cpus_system_time_secs); mems.push(100.0 * ...
{ let mut futs = Vec::new(); for (id, slave_id) in &app.tasks { let url = slaves.get::<String>(&slave_id).unwrap().to_string(); futs.push(self.get_task_statistic(url, id)); } let mut prev_timestamp = 0.0; let mut prev_cpu_time = 0.0; if let Some...
identifier_body