file_name large_stringlengths 4 140 | prefix large_stringlengths 0 12.1k | suffix large_stringlengths 0 12k | middle large_stringlengths 0 7.51k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
instances.go | .New("embedded instance is not from this independent instance")
}
//if inst.attrs == nil {
// inst.attrs = make(map[string]*data.Attribute)
//}
//
//for _, attr := range startAttrs {
// inst.attrs[attr.Name()] = attr
//}
inst.startInstance(embedded)
return nil
}
func (inst *IndependentInstance) Start(start... |
//inst.scheduleEval(host)
}
} else {
inst.returnError = err | random_line_split | |
instances.go | .taskInsts = make(map[string]*TaskInst)
embeddedInst.linkInsts = make(map[int]*LinkInst)
embeddedInst.flowURI = flowURI
if inst.subFlows == nil {
inst.subFlows = make(map[int]*Instance)
}
inst.subFlows[embeddedInst.subFlowId] = embeddedInst
inst.ChangeTracker.SubFlowChange(taskInst.flowInst.subFlowId, CtAdd, ... |
func (inst *IndependentInstance) DoStep() bool {
hasNext := false
inst.ResetChanges()
inst.stepID++
if inst.status == model.FlowStatusActive {
// get item to be worked on
item, ok := inst.workItemQueue.Pop()
if ok {
logger.Debug("Retrieved item from Flow Instance work queue")
workItem := item.(... | {
return inst.stepID
} | identifier_body |
instances.go | .taskInsts = make(map[string]*TaskInst)
embeddedInst.linkInsts = make(map[int]*LinkInst)
embeddedInst.flowURI = flowURI
if inst.subFlows == nil {
inst.subFlows = make(map[int]*Instance)
}
inst.subFlows[embeddedInst.subFlowId] = embeddedInst
inst.ChangeTracker.SubFlowChange(taskInst.flowInst.subFlowId, CtAdd, ... |
inst.scheduleEval(host)
}
//if containerInst.isHandlingError {
// //was the error handler, so directly under instance
// host,ok := containerInst.host.(*EmbeddedInstance)
// if ok {
// host.SetStatus(model.FlowStatusCompleted)
// host.returnData = containerInst.returnData
// host.retur... | {
host.SetOutput(value.Name(), value.Value())
} | conditional_block |
instances.go | = inst.flowModel.GetTaskBehavior(typeID)
}
// track the fact that the work item was removed from the queue
inst.ChangeTracker.trackWorkItem(&WorkItemQueueChange{ChgType: CtDel, ID: workItem.ID, WorkItem: workItem})
inst.execTask(behavior, workItem.taskInst)
hasNext = true
} else {
logger.Debug("... | NewActivityEvalError | identifier_name | |
ffi.rs | Vec<_>>();
SvmProblem {
nodes: nodes,
node_ptrs: node_ptrs,
y: y.data().iter().map(|&x| x as f64).collect::<Vec<_>>(),
}
}
/// Returns the unsafe object that can be passed into `libsvm`.
fn build_problem(&self) -> LibsvmProblem {
LibsvmProblem {
... | {
Err(CStr::from_ptr(message).to_str().unwrap().to_owned())
} | conditional_block | |
ffi.rs | svm_node: *const *const LibsvmNode,
}
/// Safe version of `LibsvmProblem`.
pub struct SvmProblem {
nodes: Vec<Vec<LibsvmNode>>,
node_ptrs: Vec<*const LibsvmNode>,
y: Vec<f64>,
}
/// Conert a row of the X matrix to its Libsvm representation.
fn row_to_nodes<T: NonzeroIterable>(row: T) -> Vec<LibsvmNode... | cache_size: f64,
eps: f64,
C: f64,
nr_weight: i32,
weight_label: *const i32,
weight: *const f64,
nu: f64,
p: f64,
shrinking: i32,
probability: i32,
}
/// Safe representation of `LibsvmParameter`.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SvmParameter {
svm_t... | kernel_type: KernelType,
degree: i32,
gamma: f64,
coef0: f64, | random_line_split |
ffi.rs | svm_node: *const *const LibsvmNode,
}
/// Safe version of `LibsvmProblem`.
pub struct SvmProblem {
nodes: Vec<Vec<LibsvmNode>>,
node_ptrs: Vec<*const LibsvmNode>,
y: Vec<f64>,
}
/// Conert a row of the X matrix to its Libsvm representation.
fn | <T: NonzeroIterable>(row: T) -> Vec<LibsvmNode> {
let mut nodes = Vec::new();
for (index, value) in row.iter_nonzero() {
nodes.push(LibsvmNode::new(index as i32, value as f64));
}
// Sentinel value for end of row
nodes.push(LibsvmNode::new(-1, 0.0));
nodes
}
impl SvmProblem {
///... | row_to_nodes | identifier_name |
ffi.rs | svm_node: *const *const LibsvmNode,
}
/// Safe version of `LibsvmProblem`.
pub struct SvmProblem {
nodes: Vec<Vec<LibsvmNode>>,
node_ptrs: Vec<*const LibsvmNode>,
y: Vec<f64>,
}
/// Conert a row of the X matrix to its Libsvm representation.
fn row_to_nodes<T: NonzeroIterable>(row: T) -> Vec<LibsvmNode> {... |
/// Returns the unsafe object that can be passed into `libsvm`.
fn build_problem(&self) -> LibsvmProblem {
LibsvmProblem {
l: self.nodes.len() as i32,
y: self.y.as_ptr(),
svm_node: self.node_ptrs.as_ptr(),
}
}
}
/// `libsvm` representation of training p... | {
let mut nodes = Vec::with_capacity(X.rows());
for row in X.iter_rows() {
let row_nodes = row_to_nodes(row);
nodes.push(row_nodes)
}
let node_ptrs = nodes.iter().map(|x| x.as_ptr()).collect::<Vec<_>>();
SvmProblem {
nodes: nodes,
... | identifier_body |
auxiliary_plots.py | of country of input df
pc = the columnnumber of the column that we want to plot
Returns:
---------
The return is a formated plot
"""
# define the columns of input
# cc = data.columns[checkcol]
#pc = data.columns[plotcol]
plt.rcParams['font.size'] = 18
... |
###
def quali_descriptive_plots(data, liste):
sns.set_theme(style="whitegrid")
fig, axes = plt.subplots(3, 2, figsize=(14, 16))
# Use the axes for plotting
axes[0,0].set_title(liste[0])
sns.violinplot(x=liste[0], y="OFn_all", data=data, ax=axes[0,0], inner = "quartiles");
axes[... | sns.set_theme(style="whitegrid")
f, axs = plt.subplots(2,1, figsize = (12,10))
plt.subplots_adjust(wspace=0.25)
plt.subplot(211)
ODA_like = data[data.flow_class == "ODA-like"].flow.value_counts()
ax = sns.barplot(y=ODA_like.index, x=ODA_like.values)
ax.set_xlabel("Number of projects")
... | identifier_body |
auxiliary_plots.py | country of input df
pc = the columnnumber of the column that we want to plot
Returns:
---------
The return is a formated plot
"""
# define the columns of input
# cc = data.columns[checkcol]
#pc = data.columns[plotcol]
plt.rcParams['font.size'] = 18
# ... |
###
def flow_class_plot(data):
sns.set_theme(style="whitegrid")
fig, ax = plt.subplots(1,2, figsize = (14,6))
plt.subplots_adjust(wspace=0.5)
plotting = data.flow_class.value_counts(1)
plt.subplot(121)
ax = sns.barplot(x=plotting.index, y=plotting.values)
ax.set_ylabel("share")... | fig.suptitle('Chinese Development Finance (probability)', fontsize=25)
world_df.plot(column=pc, ax = ax, legend=True, cmap='jet', legend_kwds={"label":"\n Probability of receiving Chinese Development Finance (2000-2014)",###ADDDDJUST!!!!!
... | conditional_block |
auxiliary_plots.py | 2f}, \nOOF-like:{plotting2[1]/1e6:.2f}, \nVague OF:{plotting2[2]/1e6:.2f}")
#print((plotting2[0]/1e6)/(plotting2.values.sum()/1e6))
return df
###
def year_plot(data):
sns.set_theme(style="whitegrid")
year = np.unique(data.year)
total_projects_year = data.year.value_counts().sort_index()
... | ax3.set_xticklabels(["2002","2003","2004","2005","2006","2007","2008","2009","2010","2011","2012","2013","2014"]); | random_line_split | |
auxiliary_plots.py | (data, cc, pc):
""" Function to plot a custom colored worldmap with help of a standart GeoPandas dataframe. I used the iso3 number of the countries
in order to clearly identify the countries and assign the choosen value (financial amount or project count) to the
specific country
For plotting, we h... | worldplot_2 | identifier_name | |
binge-watch.mock.ts | Ophüls",
description:
"A reluctant Hobbit, Bilbo Baggins, sets out to the Lonely Mountain with a spirited group of dwarves to reclaim their mountain home, and the gold within it from the dragon Smaug.",
image:
"https://m.media-amazon.com/images/M/MV5BMTcwNTE4MTUxMl5BMl5BanBnXkFtZTcwMDIyODM4OA@@... | noSeasons: 18,
director: "Marcel Ophüls",
description:
"In a wacky Rhode Island town, a dysfunctional family strive to cope with everyday life as they are thrown from one crazy scenario to another.",
image:
| random_line_split | |
lib.rs | "아나타".into());
let mut foo = "あなた당신あなた".into();
item.apply(&mut foo);
assert_eq!(foo, "아나타당신아나타");
}
#[test]
#[should_panic]
fn dict_item_empty_key_test() {
let _item = EzDictItem::new("".into(), "123".into());
}
#[test]
fn dict_item_empty_value_test() {
let item = EzDictItem::new("123".into(), "... | if !self.sort {
return;
}
self.after_dict
.sort_unstable_by(|l, r| l.key().cmp(r.key()));
}
pub fn sort(&mut self) {
self.sort_after_dict();
self.sort_before_dict();
}
}
mod dict_items {
use super::EzDictItem;
use serde::de::{MapAccess... | ;
}
self.before_dict
.sort_unstable_by(|l, r| l.key().cmp(r.key()));
}
pub fn sort_after_dict(&mut self) {
| identifier_body |
lib.rs | "아나타".into());
let mut foo = "あなた당신あなた".into();
item.apply(&mut foo);
assert_eq!(foo, "아나타당신아나타");
}
#[test]
#[should_panic]
fn dict_item_empty_key_test() {
let _item = EzDictItem::new("".into(), "123".into());
}
#[test]
fn dict_item_empty_value_test() {
let item = EzDictItem::new("123".into(), "... | impl EzDict {
pub fn sort_before_dict(&mut self) {
if !self.sort {
return;
}
self.before_dict
.sort_unstable_by(|l, r| l.key().cmp(r.key()));
}
pub fn sort_after_dict(&mut self) {
if !self.sort {
return;
}
self.after_dict... | random_line_split | |
lib.rs | "아나타".into());
let mut foo = "あなた당신あなた".into();
item.apply(&mut foo);
assert_eq!(foo, "아나타당신아나타");
}
#[test]
#[should_panic]
fn dict_item_empty_key_test() {
let _item = EzDictItem::new("".into(), "123".into());
}
#[test]
fn dict_item_empty_value_test() {
let item = EzDictItem::new("123".into(), "... | t dict = &mut self.dict;
let lib = &self.lib;
let buf = &mut self.encode_buffer;
let str_buf = &mut self.string_buffer;
self.cache.entry(text.into()).or_insert_with(move || {
str_buf.push_str(text);
let mut encoder = SHIFT_JIS.new_encoder();
let mut ... | r {
le | identifier_name |
bksv.go | "},
"comments": {c.Description},
"responserequired": {"N"},
"enquirytype": {"C"},
"submit": {"Submit complaint"},
//"submitkey": {submitkey},
"nowebtrak": {"1"},
"defaulttime": {"0"},
"webtraklinkback": {""},
"title": {""},
"homephone": {""},
"workphone": {""},
"c... |
return vals
}
// }}}
// {{{ PostComplaint
// https://complaints-staging.bksv.com/sfo2?json=1&resp=json
// {"result":"1",
// "title":"Complaint Received",
// "body":"Thank you. We have received your complaint."}
func PostComplaint(client *http.Client, c complaintdb.Complaint) (*complaintdb.Submission, error) {
... | {
vals.Add("acid", c.AircraftOverhead.Callsign)
vals.Add("aacode", c.AircraftOverhead.Id2)
vals.Add("tailnumber", c.AircraftOverhead.Registration)
//vals.Add("adflag", "??") // Operation type (A, D or O for Arr, Dept or Overflight)
//vals.Add("beacon", "??") // Squawk SSR code (eg 2100)
} | conditional_block |
bksv.go | (c complaintdb.Complaint, submitkey string) url.Values {
first,last := c.Profile.SplitName()
if c.Activity == "" { c.Activity = "Loud noise" }
address1 := ""
addr := c.Profile.GetStructuredAddress()
if addr.Street == "" {
address1 = c.Profile.Address // default to the raw string, if we don't have a structured o... | PopulateForm | identifier_name | |
bksv.go | U"},
"comments": {c.Description},
"responserequired": {"N"},
"enquirytype": {"C"},
"submit": {"Submit complaint"},
//"submitkey": {submitkey},
"nowebtrak": {"1"},
"defaulttime": {"0"},
"webtraklinkback": {""},
"title": {""},
"homephone": {""},
"workphone": {""},
"... | }
indentedBytes,_ := json.MarshalIndent(jsonMap, "", " ")
s.Log += "\n-- JsonMap:-\n"+string(indentedBytes)+"\n--\n"
/* on success ...
-- JsonMap:-
{
"body": "Thank you, your submission has been received. Would you like to save these details for next time?",
"receipt_key": "adasdsdadsdasds786dsa87d6as87d6as",... | return debug,fmt.Errorf("Returned response did not say 'received your complaint'")
} else {
debug += "Success !\n"+string(body)
}
*/ | random_line_split |
bksv.go | "},
"comments": {c.Description},
"responserequired": {"N"},
"enquirytype": {"C"},
"submit": {"Submit complaint"},
//"submitkey": {submitkey},
"nowebtrak": {"1"},
"defaulttime": {"0"},
"webtraklinkback": {""},
"title": {""},
"homephone": {""},
"workphone": {""},
"c... | for k,v := range vals { s.Log += fmt.Sprintf(" * %-20.20s: %v\n", k, v) }
s.Log += "\n"
// resp,err := client.PostForm("https://"+bksvHost+bksvPath, vals)
req,_ := http.NewRequest("POST", "https://"+bksvHost+bksvPath, strings.NewReader(vals.Encode()))
req.Header.Set("Content-Type", "application/x-www-form-urlenco... | {
// Initialize a new submission object, inheriting from previous
s := complaintdb.Submission{
Attempts: c.Submission.Attempts + 1,
Log: c.Submission.Log+fmt.Sprintf("\n--------=={ PostComplaint @ %s }==-\n", time.Now()),
Key: c.Submission.Key, // We're now keyless, should prob strip this out
T:... | identifier_body |
write.go | image in an accompanying .sig file:
sigFn := target
ext := filepath.Ext(sigFn)
if ext == "" {
return "", fmt.Errorf("BUG: cannot derive signature file name from matches[0]=%q", matches[0])
}
sigFn = strings.TrimSuffix(sigFn, ext) + ".sig"
w, err = fw.File(sigFn, st.ModTime())
if err != nil {
return... | writeRoot | identifier_name | |
write.go | string(b)
if p.Cfg.SerialConsoleOrDefault() != "off" {
config = strings.ReplaceAll(config, "enable_uart=0", "enable_uart=1")
}
w, err := fw.File("/config.txt", time.Now())
if err != nil {
return err
}
_, err = w.Write([]byte(config))
return err
}
func shortenSHA256(sum []byte) string {
hash := fmt.Sprintf... | if ent.Filename == ent2.Filename {
f = ent
break
} | random_line_split | |
write.go | b",
}
)
func (p *Pack) writeBoot(f io.Writer, mbrfilename string) error {
fmt.Printf("\n")
fmt.Printf("Creating boot file system\n")
done := measure.Interactively("creating boot file system")
fragment := ""
defer func() {
done(fragment)
}()
globs := make([]string, 0, len(firmwareGlobs)+len(kernelGlobs))
if... | {
for _, ent := range fi.Dirents {
// TODO: split path into components and compare piecemeal
if ent.Filename == path {
return ent
}
}
log.Panicf("mustFindDirent(%q) did not find directory entry", path)
return nil
} | identifier_body | |
write.go | err != nil {
return err
}
if err := copyFile(fw, "/"+filepath.Base(m), src); err != nil {
return err
}
}
}
// EEPROM update procedure. See also:
// https://news.ycombinator.com/item?id=21674550
writeEepromUpdateFile := func(globPattern, target string) (sig string, _ error) {
matches, err := f... | {
initMainPkgs, err := buildEnv.MainPackages([]string{cfg.InternalCompatibilityFlags.InitPkg})
if err != nil {
return nil, err
}
for _, pkg := range initMainPkgs {
if got, want := pkg.Basename(), "init"; got != want {
log.Printf("Error: -init_pkg=%q produced unexpected binary name: got %q, want %q", c... | conditional_block | |
circular.menu.helpers.ts | vector v such that OP1 + OP2 = v
*/
const sumCoords = (p1, p2) => ({x: p1.x + p2.x, y: p1.y + p2.y})
/**
* Comptute vector v such that k * OP1 = v where k is a scalar (aka scalar multiplication)
*/
const scalarByCoords = (p1, k) => ({x: k * p1.x, y: k * p1.y})
/**
* Compute new rectangle with same dimensions but ... | (){
// Constants to regulate the positioning algorithm
const angularSpace = Math.PI / 2;
const angularAnchor = Math.PI;
const menuExpansionSteps = 5;
// Node items involved
const navs = Array.from(document.querySelectorAll(".nav__item"));
const menu = document.querySelector(".hamburger-men... | positionMenuItem | identifier_name |
circular.menu.helpers.ts | ({x, y, width, height}, { deltaX, deltaY }) => ({x: x + deltaX, y: y + deltaY, width, height});
/**
* Set left and top style properies of node to x and y effectively moving the node to the coordinates {x,y}
*/
const moveNodeToCoords = (node, { x, y }) => ((node.style.left = `${x}px`) && (node.style.top = `${y}px`));... | {
// Constants to regulate the positioning algorithm
const angularSpace = Math.PI / 2;
const angularAnchor = Math.PI;
const menuExpansionSteps = 5;
// Node items involved
const navs = Array.from(document.querySelectorAll(".nav__item"));
const menu = document.querySelector(".hamburger-menu"... | identifier_body | |
circular.menu.helpers.ts | */
const getDistance = (p1, p2 = {x:0,y:0}) => Math.sqrt((p2.x - p1.x)*(p2.x - p1.x) + (p2.y - p1.y)*(p2.y - p1.y));
/**
* Comptute vector v such that OP1 + v = OP2
*/
const getTranslator = (p1, p2 = {x:0,y:0}) => ({deltaX: p2.x - p1.x, deltaY: p2.y - p1.y});
/**
* Comptute vector v such that OP1 + OP2 = v
*/
co... | * Distance between two points p1 and p2 | random_line_split | |
DiffStreamOplogFilter.js | OplogFilter');
const HTTP_TEST_PORT = 9090;
class MockRaftOplogStream extends stream.Readable {
constructor(entriesToEmit, refreshPeriodMs) {
super({ objectMode: true });
this.entriesToEmit = entriesToEmit;
this.refreshPeriodMs = refreshPeriodMs;
}
_read() {
if (this.entri... | }
}, 10);
} else {
setTimeout(() => {
this.push({ entry: null });
}, this.refreshPeriodMs);
}
}
}
describe('DiffStreamOplogFilter', () => {
let httpServer;
let reqCount = 0;
beforeAll(done => {
const handleGetBu... | if (this.entriesToEmit.length === 0) {
this.push({ entry: null }); | random_line_split |
DiffStreamOplogFilter.js | OplogFilter');
const HTTP_TEST_PORT = 9090;
class MockRaftOplogStream extends stream.Readable {
| (entriesToEmit, refreshPeriodMs) {
super({ objectMode: true });
this.entriesToEmit = entriesToEmit;
this.refreshPeriodMs = refreshPeriodMs;
}
_read() {
if (this.entriesToEmit.length > 0) {
// introduce a little delay between events to make sure
// the fil... | constructor | identifier_name |
DiffStreamOplogFilter.js | logFilter');
const HTTP_TEST_PORT = 9090;
class MockRaftOplogStream extends stream.Readable {
constructor(entriesToEmit, refreshPeriodMs) {
super({ objectMode: true });
this.entriesToEmit = entriesToEmit;
this.refreshPeriodMs = refreshPeriodMs;
}
_read() {
if (this.entries... |
}
// check that no other entry than what was expected has been output
expect(filteredDiffEntries.size).toEqual(0);
done();
})
.on('error', err => {
fail(`an error occurred during filtering: ${err}`);
});... | {
for (let k = 1; k <= 5; ++k) {
const bucketName = `bucket${b}-on-rs${rs}`;
const key = `key${k}`;
const outputDiffEntry = [{
key: `${bucketName}/${key}`,
... | conditional_block |
lib.rs | ::",
"core::",
"backtrace::backtrace::",
"_rust_begin_unwind",
"color_traceback::",
"__rust_",
"___rust_",
"__pthread",
"_main",
"main",
"__scrt_common_main_seh",
"BaseThreadInitThunk",
... | self, i: usize, out: &mut impl WriteColor, s: &BacktracePrinter) -> IOResult {
let is_dependency_code = self.is_dependency_code();
// Print frame index.
write!(out, "{:>2}: ", i)?;
if s.should_print_addresses() {
if let Some((module_name, module_base)) = self.module_info() ... | int(& | identifier_name |
lib.rs | ::",
"core::",
"backtrace::backtrace::",
"_rust_begin_unwind",
"color_traceback::",
"__rust_", | "___rust_",
"__pthread",
"_main",
"main",
"__scrt_common_main_seh",
"BaseThreadInitThunk",
"_start",
"__libc_start_main",
"start_thread",
];
// Inspect name.
if let Some(ref name) = self.... | random_line_split | |
lib.rs | ::",
"core::",
"backtrace::backtrace::",
"_rust_begin_unwind",
"color_traceback::",
"__rust_",
"___rust_",
"__pthread",
"_main",
"main",
"__scrt_common_main_seh",
"BaseThreadInitThunk",
... | lse {
&s.colors.crate_code
})?;
if has_hash_suffix {
write!(out, "{}", &name[..name.len() - 19])?;
if s.strip_function_hash {
writeln!(out)?;
} else {
out.set_color(if is_dependency_code {
&s.colors.depe... | &s.colors.dependency_code
} e | conditional_block |
lib.rs | ?;
out.reset()?;
} else {
writeln!(out, "{:>8} │ {}", cur_line_no, line?)?;
}
}
Ok(())
}
/// Get the module's name by walking /proc/self/maps
#[cfg(all(
feature = "resolve-modules",
unix,
not(any(target_os = "m... | Self::default()
}
| identifier_body | |
u2eve.py | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFT... | LOG.warn("WARNING: No alert message map entries loaded.") | conditional_block | |
u2eve.py | OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUR... | (self, protocol):
return proto_map.get(protocol, str(protocol))
class OutputWrapper(object):
def __init__(self, filename, fileobj=None):
self.filename = filename
self.fileobj = fileobj
if self.fileobj is None:
self.reopen()
self.isfile = True
else:
... | getprotobynumber | identifier_name |
u2eve.py | OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUR... |
def load_from_snort_conf(snort_conf, classmap, msgmap):
snort_etc = os.path.dirname(os.path.expanduser(snort_conf))
classification_config = os.path.join(snort_etc, "classification.config")
if os.path.exists(classification_config):
LOG.debug("Loading %s.", classification_config)
classmap.l... | if self.isfile:
if not os.path.exists(self.filename):
self.reopen()
self.fileobj.write(buf)
self.fileobj.write("\n")
self.fileobj.flush() | identifier_body |
u2eve.py | # Copyright (c) 2015 Jason Ish
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the ... | #! /usr/bin/env python
# | random_line_split | |
executor.go | (k *KubernetesExecutor) Registered(driver bindings.ExecutorDriver,
executorInfo *mesos.ExecutorInfo, frameworkInfo *mesos.FrameworkInfo, slaveInfo *mesos.SlaveInfo) {
if k.isDone() {
return
}
log.Infof("Executor %v of framework %v registered with slave %v\n",
executorInfo, frameworkInfo, slaveInfo)
if !k.swap... |
k.updateChan <- update
statusUpdate := &mesos.TaskStatus{
TaskId: mutil.NewTaskID(taskId),
State: mesos.TaskState_TASK_STARTING.Enum(),
Message: proto.String(messages.CreateBindingSuccess),
Data: data,
}
k.sendStatus(driver, statusUpdate)
// Delay reporting 'task running' until container is up.
g... | {
update.Pods = append(update.Pods, *p)
} | conditional_block |
executor.go | .GetData(), &pod); err != nil {
log.Warningf("Failed to extract yaml data from the taskInfo.data %v\n", err)
k.sendStatus(driver, newStatus(taskInfo.GetTaskId(), mesos.TaskState_TASK_FAILED,
messages.UnmarshalTaskDataFailure))
return
}
k.lock.Lock()
defer k.lock.Unlock()
taskId := taskInfo.GetTaskId().Ge... | } else {
log.V(2).Infof("Task %v no longer registered, stop monitoring for lost pods", taskId)
}
return true | random_line_split | |
executor.go | (from, to stateType) bool {
return atomic.CompareAndSwapInt32((*int32)(&k.state), int32(from), int32(to))
}
// New creates a new kubernetes executor.
func New(kl *kubelet.Kubelet, ch chan<- interface{}, ns string, cl *client.Client, w watch.Interface, dc dockertools.DockerInterface) *KubernetesExecutor {
//TODO(jdef... | swapState | identifier_name | |
executor.go |
func (k *KubernetesExecutor) swapState(from, to stateType) bool {
return atomic.CompareAndSwapInt32((*int32)(&k.state), int32(from), int32(to))
}
// New creates a new kubernetes executor.
func New(kl *kubelet.Kubelet, ch chan<- interface{}, ns string, cl *client.Client, w watch.Interface, dc dockertools.DockerInter... | {
return connectedState == k.getState()
} | identifier_body | |
IoManager.py | urls...
"""
df = pd.read_csv(IoManager.CARD_INFOS_FILE_PATH)
for list_feature in ["colors", "color_identity"]:
df[list_feature] = df[list_feature].apply(lambda e: e if type(e) != float else "[]")
df[list_feature] = df[list_feature].apply(ast.literal_eval)
return ... |
@staticmethod
def get_arch_presence():
return pd.read_csv(IoManager.ARCH_PRESENCE_PATH, index_col=[0])
def save_arch_presence(self, arch_presence_entries):
"""
Adds the new entries to the db
:param arch_presence_entries: [[1, 0, 0, 1, 1, 0], [1, 0, 1...]...] 1 for archetyp... | print("Initialising arch presence")
df = pd.DataFrame(columns=self.archetypes.get_archetype_names(as_feature_names=True))
df.to_csv(IoManager.ARCH_PRESENCE_PATH)
return df | identifier_body |
IoManager.py | n_cards = len(ratings["monored"])
for arch_col in archetype_cols:
ratings[arch_col] = ratings[arch_col] / (ratings[arch_col].sum() / n_cards)
return ratings
def add_ratings_sum(self, ratings):
"""
Adds a column to the ratings DataFrame: 'general'
:return:... |
@staticmethod | random_line_split | |
IoManager.py | ():
"""
:return: list of card names, cube list
"""
f = open(IoManager.CUBE_LIST_FILE_PATH, "r")
lines = f.readlines()
f.close()
# removing '\n' at then end of each name
lines = [card_name[:-1] for card_name in lines]
return lines
@staticmetho... | get_cube_list | identifier_name | |
IoManager.py | IoManager.SPRITE_DIR_PATH + sprite_name
@staticmethod
def arch_presence_exists():
return os.path.isfile(IoManager.ARCH_PRESENCE_PATH)
def init_arch_presence(self):
print("Initialising arch presence")
df = pd.DataFrame(columns=self.archetypes.get_archetype_names(as_feature_names=T... | tingsInitializer.save_csv(new_ratings)
exit()
| conditional_block | |
clean_summaries.py | 'novelguide', 'thebestnotes']
for ix, source in tqdm(enumerate(sources)):
print ("Cleaning source: ", source)
source_summary_dir_base = "../cleaning_phase/"
dest_dir_base = "../finished_summaries/"
source_summary_dir = os.path.join(source_summary_dir_base, source)
dest_dir = os.path.join(dest_... | summary = summary.replace(to_remove, "")
pat_prefix = '((.*?){}) (.*$)'.format(name)
if re.search(pat_prefix, summary, re.IGNORECASE):
matched_str = re.match(pat_prefix, summary, re.IGNORECASE)
print (matched_str.groups())
to_remove = matched_str.group(2... | if re.search(pat_suffix, summary, re.IGNORECASE):
matched_str = re.match(pat_suffix, summary, re.IGNORECASE)
to_remove = matched_str.group(2) # Everything after the Commentary keyword | random_line_split |
clean_summaries.py | 'novelguide', 'thebestnotes']
for ix, source in tqdm(enumerate(sources)):
print ("Cleaning source: ", source)
source_summary_dir_base = "../cleaning_phase/"
dest_dir_base = "../finished_summaries/"
source_summary_dir = os.path.join(source_summary_dir_base, source)
dest_dir = os.path.join(dest_... | (line):
pat = '^((.*?)summary|analysis|summary and analysis|summary & analysis)[ ]{0,}[-:]?'
if re.search(pat, line, re.IGNORECASE):
to_replace = re.match(pat, line, re.IGNORECASE).group(0)
line = line.replace(to_replace,"")
return line.strip()
def remove_chapter_p... | remove_summary_analysis_prefix | identifier_name |
clean_summaries.py | novelguide', 'thebestnotes']
for ix, source in tqdm(enumerate(sources)):
print ("Cleaning source: ", source)
source_summary_dir_base = "../cleaning_phase/"
dest_dir_base = "../finished_summaries/"
source_summary_dir = os.path.join(source_summary_dir_base, source)
dest_dir = os.path.join(dest_di... |
if 'analysis' in summary_json and summary_json['analysis'] is not None and summary_json['analysis'].strip() != "":
# print ("Analysis already present")
analysis_already_present = 1
for line in summary_json['analysis'].split("<PARAGRAPH>"):
... | ary_path = os.path.join(item_dir, section)
fp = open(summary_path,"r")
try:
summary_json = json.loads(fp.readlines()[0])
except:
print (item_dir, "=Error reading json==", section)
# continue
new_json_dict = {}... | conditional_block |
clean_summaries.py | 'novelguide', 'thebestnotes']
for ix, source in tqdm(enumerate(sources)):
print ("Cleaning source: ", source)
source_summary_dir_base = "../cleaning_phase/"
dest_dir_base = "../finished_summaries/"
source_summary_dir = os.path.join(source_summary_dir_base, source)
dest_dir = os.path.join(dest_... |
def remove_summary_analysis_prefix(line):
pat = '^((.*?)summary|analysis|summary and analysis|summary & analysis)[ ]{0,}[-:]?'
if re.search(pat, line, re.IGNORECASE):
to_replace = re.match(pat, line, re.IGNORECASE).group(0)
line = line.replace(to_replace,"")
retu... | pat_suffix = '(.*)(Commentary (.*))'
if re.search(pat_suffix, summary, re.IGNORECASE):
matched_str = re.match(pat_suffix, summary, re.IGNORECASE)
to_remove = matched_str.group(2) # Everything after the Commentary keyword
summary = summary.replace(to_remove, "")
pat_... | identifier_body |
Util.js | m = m.substring(1, m.length - 1);
}
path = path + m;
let propertyReference;
try {
propertyReference = ref[m];
} catch (e) {
// TODO: proxy has been revoked
}
if (typeof propertyReference !== 'undefined') {
ref = propertyReference;
} else {
... | getShadowRoot | identifier_name | |
Util.js | */
'use strict';
/**
* @typedef QuerySelectors
* @method {function(): string} getAll
* @method {function(i: number): string} get
*/
/**
* Common utility functions.
* @namespace Utils
*/
const Utils = {
/**
* Returns true only if object is null || undefined
* @param {object} obj The object to test.
... | ,
/**
* @namespace dom
* @memberof Utils
*/
dom: {
/**
* Gets CSS query for matching the given value in a list of specified attributes.
* @param {string} name Comma separated list of attribute names.
* @param {string} value The value to match.
* @param {QuerySelectors} appendValue... | {
try {
fn();
} catch (err) {
ctx._error = err;
if (errorCallback) errorCallback(err);
if (err && ctx.options().error) {
(ctx.options().error)
.call(ctx, err, ctx);
} else {
console.error(err);
}
}
} | identifier_body |
Util.js |
*/
'use strict';
/**
* @typedef QuerySelectors
* @method {function(): string} getAll
* @method {function(i: number): string} get
*/
/**
* Common utility functions.
* @namespace Utils
*/
const Utils = {
/**
* Returns true only if object is null || undefined
* @param {object} obj The object to test.
... | // TODO: should warn when clone is not possible
}
return temp;
},
// TODO: deprecate `hasPassiveEvents`
/**
* Returns true if browser supports passive events.
* @return {boolean} True if supported, otherwise false.
* @memberOf Utils
*/
hasPassiveEvents() {
let supportsPassive = fa... | random_line_split | |
lib.rs | mounts().unwrap() {
/// println!("{}", mount.unwrap());
/// }
pub struct Mounts {
buf_reader: std::io::BufReader<std::fs::File>
}
impl Mounts {
/// Returns a new Mounts instance. You can also call [mounts()] for convenience.
pub fn new() -> std::result::Result<Mounts, std::io::Error> {
let file = std::fs::File... | (&'a mut self) -> MountsIteratorMut<'a> {
self.into_iter()
}
}
// Encapsulate individual nom parsers in a private submodule. The `pub(self)` keyword allows the inner method [parsers::parse_line()] to be called by code within this module, but not my users of our crate.
pub(self) mod parsers {
use super::Mount;
... | iter_mut | identifier_name |
lib.rs | defined above. Let's break it down parser by parser:
// # `nom::combinator::all_consuming` generates an error if there is any leftover input. This will force nom to generate an error if there is unexpected input at the end of a line in `/proc/mounts`, for example:
// ```ignore
// /dev/sda1 /mnt/disk ext4 defaults... | Mounts::new() | random_line_split | |
lib.rs | . Let's break it down parser by parser:
// # `nom::combinator::all_consuming` generates an error if there is any leftover input. This will force nom to generate an error if there is unexpected input at the end of a line in `/proc/mounts`, for example:
// ```ignore
// /dev/sda1 /mnt/disk ext4 defaults 0 0 this_last... | {
Mounts::new()
} | identifier_body | |
stringutils.go | "mns:eventdelivery": "string",
"mns:eventdescription": "string",
"mns:eventsubscription": "string",
"mns:embeddedserviceliveagent": "string",
"mns: flow": "string",
"https://developer.salesforce.com/docs/atlas.en-us.api_meta... | "mns:embeddedservicefieldservice": "string",
"mns: customobject": "string", | random_line_split | |
stringutils.go | json.RawMessage",
"searchlayoutbuttonsdisplayed": "json.RawMessage",
"standardvalue[]": "json.RawMessage",
"subscriberpackagecsptrustedsites": "json.RawMessage",
"properties": "json.RawMessage",
"array of constructor": "json.RawMessage",
"authprovider": ... | toCamelCase | identifier_name | |
stringutils.go | ",
"mns: managedcontenttype": "json.RawMessage",
"mns:moderationrule": "json.RawMessage",
"operationparameters": "json.RawMessage",
"mns:outboundnetworkconnection": "json.RawMessage",
"subscriberpackageinstallerrors": "json.RawMessage",
"msn:pathassistant": ... | {
if vIsLow {
v += 'A'
v -= 'a'
}
} | conditional_block | |
stringutils.go | apexcomponentmetadata": "json.RawMessage",
"entitydefinition": "json.RawMessage",
"fielddefinition": "json.RawMessage",
"apexresult": "json.RawMessage",
"heapdump": "json.RawMessage",
"soqlresult": ... | {
for i := 0; i < len(commonInitialisms); i++ {
s = strings.ReplaceAll(s, commonInitialisms[i][0], commonInitialisms[i][1])
}
return s
} | identifier_body | |
deepracer_racetrack_env.py | ING_JOB_ARN')
self.target_number_of_episodes = rospy.get_param('NUMBER_OF_EPISODES')
self.target_reward_score = rospy.get_param('TARGET_REWARD_SCORE')
else:
from markov.defaults import reward_function
self.reward_function = reward_function
... | (self, action):
if node_type == SAGEMAKER_TRAINING_WORKER:
return self.observation_space.sample(), 0, False, {}
# Initialize next state, reward, done flag
self.next_state = None
self.reward = None
self.done = False
# Send this action to Gazebo and increment ... | step | identifier_name |
deepracer_racetrack_env.py | _state.pose.orientation.y,
model_state.pose.orientation.z,
model_state.pose.orientation.w])
model_location = np.array([
model_state.pose.position.x,
model_state.pose.position.y,
model_state.pose.position.z]) + \
model_orientation.apply(RELA... | self.is_simulation_done = True | conditional_block | |
deepracer_racetrack_env.py | TRAINING_JOB_ARN')
self.target_number_of_episodes = rospy.get_param('NUMBER_OF_EPISODES')
self.target_reward_score = rospy.get_param('TARGET_REWARD_SCORE')
else:
from markov.defaults import reward_function
self.reward_function = reward_function... |
# Reset the car and record the simulation start time
self.send_action(0, 0)
self.racecar_reset()
time.sleep(SLEEP_AFTER_RESET_TIME_IN_SECOND)
self.steps = 0
self.simulation_start_time = time.time()
# Compute the initial state
self.infer_reward_state(0, 0... | if node_type == SAGEMAKER_TRAINING_WORKER:
return self.observation_space.sample()
# Simulation is done - so RoboMaker will start to shut down the app.
# Till RoboMaker shuts down the app, do nothing more else metrics may show unexpected data.
if (node_type == SIMULATION_WORKER) and ... | identifier_body |
deepracer_racetrack_env.py | _wheel_state.link_state.pose.position.y),
Point(right_rear_wheel_state.link_state.pose.position.x,
right_rear_wheel_state.link_state.pose.position.y),
Point(right_front_wheel_state.link_state.pose.position.x,
right_front_wheel_state.link_state.pose.position.y)... | ], | random_line_split | |
rabbitmq_server_relations.py | deal with data being
# removed from peer storage
relation_clear(relation_id)
# Each unit needs to set the db information otherwise if the unit
# with the info dies the settings die with it Bug# 1355848
exc_list = ['hostname', 'private-address']
for rel_id... |
if is_leader():
log('Leader peer_storing cookie', level=INFO)
cookie = open(rabbit.COOKIE_PATH, 'r').read().strip()
peer_store('cookie', cookie)
peer_store('leader_node_ip', unit_private_ip())
peer_store('leader_node_hostname', rabbit.get_unit_hostname())
@hooks.hook('clu... | log('erlang cookie missing from %s' % rabbit.COOKIE_PATH,
level=ERROR)
return | conditional_block |
rabbitmq_server_relations.py |
def update_clients():
"""Update amqp client relation hooks
IFF leader node is ready. Client nodes are considered ready once the leader
has already run amqp_changed.
"""
if rabbit.leader_node_is_ready() or rabbit.client_node_is_ready():
for rid in relation_ids('amqp'):
for uni... | password = rabbit.get_rabbit_password(username)
# update vhost
rabbit.create_vhost(vhost)
rabbit.create_user(username, password, admin)
rabbit.grant_permissions(username, vhost)
# NOTE(freyes): after rabbitmq-server 3.0 the method to define HA in the
# queues is different
# http://www.rabb... | identifier_body | |
rabbitmq_server_relations.py | rabbit.get_unit_ip(config_override=rabbit.CLUSTER_OVERRIDE_CONFIG,
interface=rabbit.CLUSTER_INTERFACE),
}
relation_set(relation_id=relation_id,
relation_settings=relation_settings)
if is_relation_made('ha') and \
config('ha-vip-only') is False:
... | update_nrpe_checks | identifier_name | |
rabbitmq_server_relations.py | ceph_changed():
log('Start Ceph Relation Changed')
auth = relation_get('auth')
key = relation_get('key')
use_syslog = str(config('use-syslog')).lower()
if None in [auth, key]:
log('Missing key or auth in relation')
sys.exit(0)
ceph.configure(service=SERVICE_NAME, key=key, auth=... | hooks.execute(sys.argv)
except UnregisteredHookError as e: | random_line_split | |
preference_aggregation_featureless_online.py | 0), key=key)(None)
def set_subtract(self, key='loss'):
self.to_subtract = self.current_loss(key=key)['metrics']
def set_minibatch(self, mb):
for key, val in mb.items():
assert isinstance(key, str), key
assert isinstance(val, np.ndarray), val
self.miniba... | hst = [initial] + hst
plt.axhline(initial)
if np.min(hst) > 0:
plt.yscale('log')
plt.plot(hst)
plt.show()
def lstdct2dctlst(lst):
"""List of dictionaries -> dictionary of lists."""
keys = lst[0].keys()
res = {ke... | initial = initial_value[ind]['metrics'][key]
| random_line_split |
preference_aggregation_featureless_online.py | 0), key=key)(None)
def set_subtract(self, key='loss'):
self.to_subtract = self.current_loss(key=key)['metrics']
def set_minibatch(self, mb):
for key, val in mb.items():
assert isinstance(key, str), key
assert isinstance(val, np.ndarray), val
self.miniba... |
# setting data
online.set_minibatch(mb_np_copy)
online.set_model_tensor(model_tensor_copy)
online.set_subtract()
online.silent = True
# CONFIGURATION FOR INDICES
indices_lst = []
for i in range(model_tensor_orig.shape[0]):
indices_lst.append((i, obj1, 0))
... | online.golden_params[key] = value | conditional_block |
preference_aggregation_featureless_online.py | (object):
"""Update weights online."""
def __init__(self, hypers=None, golden_params=None):
if golden_params is None:
golden_params = {}
self.golden_params = golden_params
self.hypers = hypers
self.model_tensor = None
self.minibatch = None
s... | FeaturelessOnlineUpdater | identifier_name | |
preference_aggregation_featureless_online.py | 0), key=key)(None)
def set_subtract(self, key='loss'):
self.to_subtract = self.current_loss(key=key)['metrics']
def set_minibatch(self, mb):
for key, val in mb.items():
assert isinstance(key, str), key
assert isinstance(val, np.ndarray), val
self.miniba... | plt.show()
def lstdct2dctlst(lst):
"""List of dictionaries -> dictionary of lists."""
keys = lst[0].keys()
res = {key: [x[key] for x in lst] for key in keys}
return res
def compute_online_update(rating_value, mb_np_orig,
model_tensor_orig,
... | for ind in set(indices_lst):
res = get_history(result, ind)
res_dct = lstdct2dctlst(res)
plt.figure(figsize=(13, 3))
for i, key in enumerate(sorted(res_dct.keys()), 1):
hst = res_dct[key]
plt.subplot(1, len(res_dct), i)
plt.title(key + ' ' +... | identifier_body |
smd.rs | ::open(&fbx).unwrap());
let fbx = SimpleFbx::from_raw(&RawFbx::parse(file).unwrap()).unwrap();
let fbx_tree = ObjectTreeNode::from_simple(&fbx);
// Go over all FBX root nodes and turn them into SMD data
let mut smd = Smd::new();
process_fbx_node(
&fbx,
&fbx_tree, &mut smd,
&... | }
fn calculate_parent_after_rot_translation(fbx: &SimpleFbx, obj: &Object) -> Vector3<f32> {
// First actually get the parent's model data
let parent_obj = if let Some(v) = fbx.parent_of(obj.id) {
if v == 0 {
// At root, no extra translation
return Vector3::new(0.0, 0.0, 0.0)
... | (translation, rotation) | random_line_split |
smd.rs | filter(|&(_, o)| o.class.type_name() == "Model") {
// For this model, look up the matching BoneId in the reference SMD
if let Some(bone_id) = ref_smd.id_of_bone(&id_name(&model.name).unwrap()) {
// Now that we have a model and a bone, we need the current translation and rotation
... | euler_rotation_to_matrix | identifier_name | |
smd.rs | ::open(&fbx).unwrap());
let fbx = SimpleFbx::from_raw(&RawFbx::parse(file).unwrap()).unwrap();
let fbx_tree = ObjectTreeNode::from_simple(&fbx);
// Go over all FBX root nodes and turn them into SMD data
let mut smd = Smd::new();
process_fbx_node(
&fbx,
&fbx_tree, &mut smd,
&... | ));
let post_rotation = Quaternion::from(Euler::new(
Deg(properties.post_rotation[0]), Deg(properties.post_rotation[1]), Deg(properties.post_rotation[2])
));
let total_rotation = if !flip {
Euler::from(post_rotation.invert() * rotation * pre_rotation)
} else {
Euler::from(po... | {
let properties = ModelProperties::from_generic(&obj.properties);
// Get the bone's translation
let parent_after_rot_translation = calculate_parent_after_rot_translation(fbx, obj);
let prop_translation: Vector3<_> = properties.translation.into();
let prop_rot_offset: Vector3<_> = properties.rotati... | identifier_body |
smd.rs | ::open(&fbx).unwrap());
let fbx = SimpleFbx::from_raw(&RawFbx::parse(file).unwrap()).unwrap();
let fbx_tree = ObjectTreeNode::from_simple(&fbx);
// Go over all FBX root nodes and turn them into SMD data
let mut smd = Smd::new();
process_fbx_node(
&fbx,
&fbx_tree, &mut smd,
&... | else {
Euler::from(post_rotation.invert() * rotation.invert() * pre_rotation)
};
let rotation = Vector3::new(
total_rotation.x.0,
total_rotation.y.0,
total_rotation.z.0,
);
(translation, rotation)
}
fn calculate_parent_after_rot_translation(fbx: &SimpleFbx, obj: &Objec... | {
Euler::from(post_rotation.invert() * rotation * pre_rotation)
} | conditional_block |
tf_linear_reg.py | == "adadelta":
return tf.train.AdadeltaOptimizer(learning_rate)
elif optimizer == "adagrad":
return tf.train.AdagradOptimizer(learning_rate)
elif optimizer == "adam":
return tf.train.AdamOptimizer(learning_rate)
elif optimizer == "ftrl":
return tf.train.FtrlOptimizer(learning_rate)
elif optimiz... | (filenames, batch_size):
# Define a `tf.contrib.data.Dataset` for iterating over one epoch of the data.
dataset = (tf.data.TFRecordDataset(filenames).
shuffle(buffer_size=MIN_AFTER_DEQUEUE).
batch(batch_size))
return dataset.make_initializable_iterator()
def get_features... | input_pipeline | identifier_name |
tf_linear_reg.py | elif optimizer == "adadelta":
return tf.train.AdadeltaOptimizer(learning_rate)
elif optimizer == "adagrad":
return tf.train.AdagradOptimizer(learning_rate)
elif optimizer == "adam":
return tf.train.AdamOptimizer(learning_rate)
elif optimizer == "ftrl":
return tf.train.FtrlOptimizer(learning_rate)... | tfr_vald_filenames = [loaddir+"SynDataset2.tfrecords",loaddir+"SynDataset4.tfrecords"]
tf.reset_default_graph()
# Get a batch of y and X in tr_features
train_iterator, train_features = get_features(tfr_tr_filenames, BATCH_SIZE)
batch_labels = train_features["label"]
batch_ids = train_features[... | # Get all FileNames
tfr_tr_filenames = [loaddir+"SynDataset1.tfrecords",loaddir+"SynDataset3.tfrecords"] | random_line_split |
tf_linear_reg.py | optimizer == "adadelta":
return tf.train.AdadeltaOptimizer(learning_rate)
elif optimizer == "adagrad":
return tf.train.AdagradOptimizer(learning_rate)
elif optimizer == "adam":
return tf.train.AdamOptimizer(learning_rate)
elif optimizer == "ftrl":
return tf.train.FtrlOptimizer(learning_rate)
el... |
def get_features(tfrecords_file,batch_size):
iterator = input_pipeline(tfrecords_file, batch_size)
features_obj = iterator.get_next()
features = tf.parse_example(
features_obj,
# Defaults are not specified since both keys are required.
features={
... | dataset = (tf.data.TFRecordDataset(filenames).
shuffle(buffer_size=MIN_AFTER_DEQUEUE).
batch(batch_size))
return dataset.make_initializable_iterator() | identifier_body |
tf_linear_reg.py | == "adadelta":
return tf.train.AdadeltaOptimizer(learning_rate)
elif optimizer == "adagrad":
return tf.train.AdagradOptimizer(learning_rate)
elif optimizer == "adam":
return tf.train.AdamOptimizer(learning_rate)
elif optimizer == "ftrl":
return tf.train.FtrlOptimizer(learning_rate)
elif optimiz... |
def linear_reg_inference(sparse_ids,sparse_values,hidden_nodes,num_layers):
# train_sz = np.shape(Xtrain)[0]
W_ls = []
Bias_ls = []
# Reset the graph
# tf.reset_default_graph()
with tf.variable_scope("linear_reg"):
W_ls.append(tf.get_variable(
"weights_0", ... | print("Unknow optimizer, exit now")
exit(1) | conditional_block |
lexer.go | l *Lexer) NextToken() token.Token {
// Ignore any number of sequential whitespace.
l.consumeWhitespace()
switch {
case l.char == 0:
l.assignToken(token.EOF, "")
case l.char == '=':
switch l.peek() {
case '=': // ==
l.advance()
l.assignToken(token.EQ, "==")
case '>': // =>
l.advance()
l.assignT... |
// Check characters ahead but don't move the cursor.
func (l *Lexer) peek() rune {
rn, err := l.reader.Peek()
if err != nil {
l.reportError(fmt.Sprintf("Invalid '%s' character in source file", string(rn)))
}
return rn
}
// Move the cursor to the previous character.
func (l *Lexer) rewind() {
if err := l.read... | {
rn, err := l.reader.Advance()
if err != nil {
l.reportError(fmt.Sprintf("Invalid '%s' character in source file", string(rn)))
}
// Don't move the location if it was a
// rewind, or it will report an incorrect
// line and column.
if !l.rewinded {
l.moveLocation()
}
l.rewinded = false
l.char = rn
} | identifier_body |
lexer.go | l *Lexer) NextToken() token.Token {
// Ignore any number of sequential whitespace.
l.consumeWhitespace()
switch {
case l.char == 0:
l.assignToken(token.EOF, "")
case l.char == '=':
switch l.peek() {
case '=': // ==
l.advance()
l.assignToken(token.EQ, "==")
case '>': // =>
l.advance()
l.assignT... | () {
switch l.char {
case '\n':
l.row += 1
l.col = 2
default:
l.col += 1
}
}
// Pass a token to the active token cursor.
func (l *Lexer) assignToken(toktype token.TokenType, value string) {
l.token = token.Token{
Type: toktype,
Lexeme: value,
Location: token.Location{Row: l.row, Col: l.col},
}
... | moveLocation | identifier_name |
lexer.go | ANGE, "..")
}
default: // .
l.assignToken(token.DOT, string(l.char))
}
case l.char == '|':
switch l.peek() {
case '|': // ||
l.advance()
l.assignToken(token.OR, "||")
case '>': // |>
l.advance()
l.assignToken(token.PIPE, "|>")
default: // |
l.assignToken(token.BITOR, string(l.char))
... | break loop
default:
l.reportError("Unexpected comment line ending")
break loop | random_line_split | |
lexer.go | l *Lexer) NextToken() token.Token {
// Ignore any number of sequential whitespace.
l.consumeWhitespace()
switch {
case l.char == 0:
l.assignToken(token.EOF, "")
case l.char == '=':
switch l.peek() {
case '=': // ==
l.advance()
l.assignToken(token.EQ, "==")
case '>': // =>
l.advance()
l.assignT... |
l.rewinded = false
l.char = rn
}
// Check characters ahead but don't move the cursor.
func (l *Lexer) peek() rune {
rn, err := l.reader.Peek()
if err != nil {
l.reportError(fmt.Sprintf("Invalid '%s' character in source file", string(rn)))
}
return rn
}
// Move the cursor to the previous character.
func (l *... | {
l.moveLocation()
} | conditional_block |
watcher.go | the QueryOptions
Namespace string
//TLSConfig TLSConfig
}
type service struct {
name string
instances []*api.ServiceEntry
intentions []*api.Intention
gatewayService *api.GatewayService
leaf *certLeaf
ready sync.WaitGroup
done bool
}
type certLeaf struct {
Cert []byte
Key [... | lastIndex = meta.LastIndex
if changed {
log.Infof("linked services changed for gateway %s", w.name)
if first && len(gwServices) == 0 {
log.Infof("no linked services defined for gateway %s", w.name)
continue
}
w.handleProxyChange(first, &gwServices)
}
if first {
log.Infof("linked services... | {
var lastIndex uint64
first := true
for {
gwServices, meta, err := w.consul.Catalog().GatewayServices(w.name, &api.QueryOptions{
WaitTime: 10 * time.Minute,
WaitIndex: lastIndex,
})
if err != nil {
log.Errorf("error fetching linked services for gateway %s: %s", w.name, err)
time.Sleep(errorWaitTi... | identifier_body |
watcher.go | the QueryOptions
Namespace string
//TLSConfig TLSConfig
}
type service struct {
name string
instances []*api.ServiceEntry
intentions []*api.Intention
gatewayService *api.GatewayService
leaf *certLeaf
ready sync.WaitGroup
done bool
}
type certLeaf struct {
Cert []byte
Key [... |
log.Infof("initializing Consul watcher for gateway: %+v", gatewayName)
w.name = gatewayName
w.namespace = namespace
w.settings = *api.DefaultConfig()
w.settings.Address = c.Address
w.settings.Scheme = c.Scheme
w.settings.Token = c.Token
w.settings.Namespace = c.Namespace
w.consul, err = api.NewClient(&w.setti... | var err error | random_line_split |
watcher.go | Leaf
ready sync.WaitGroup
done bool
}
type certLeaf struct {
Cert []byte
Key []byte
done bool
}
//Watcher struct for TG config
type Watcher struct {
settings api.Config
id string
name string
namespace string
address string
port int
consul *api.Client
token string
C ... | {
for _, down := range *gwServices {
keep[down.Service.Name] = true
w.lock.Lock()
_, ok := w.services[down.Service.Name]
w.lock.Unlock()
if !ok {
if first {
w.ready.Add(3)
}
w.startService(down, first)
}
}
} | conditional_block | |
watcher.go | the QueryOptions
Namespace string
//TLSConfig TLSConfig
}
type service struct {
name string
instances []*api.ServiceEntry
intentions []*api.Intention
gatewayService *api.GatewayService
leaf *certLeaf
ready sync.WaitGroup
done bool
}
type certLeaf struct {
Cert []byte
Key [... | () {
w.C <- w.genCfg()
}
func (w *Watcher) watchLeaf(service string, first bool) {
log.Debugf("watching leaf cert for %s", service)
dFirst := true
var lastIndex uint64
for {
if w.services[service] == nil {
return
} else if w.services[service].done {
return
}
cert, meta, err := w.consul.Agent().Conne... | Reload | identifier_name |
practice.js | overlayLayer); // Add layers
// Detection
createjs.Touch.enable(stage);
// Initialize global variables for layout and sizing
initializeVariables(canvas.width, canvas.height);
// Preload all assets (crucial for first rendering)
preload.loadManifest(manifest);
}
function initGame() {
... | console.log("Ques x: " + questions[q].x + " y: " + questions[q].y );
}
}
function initializeAnswerPositions() {
for (a = 0; a < 5; a++) {
// x and y of the CENTER of the container. (not top left)
answers[a].x = (properties.ANS_SIZE / 2) + (a)*(properties.ANS_SIZE);
console.log... | {
for (q=0; q<3; q++) {
switch (q) {
case 0:
questions[q].y = layout.MID3; // Lowest
questions[q].scaleY = 1.66;
questions[q].txt.scaleY = 1.00;
questions[q].txt.scaleX = 1.66;
break;
case 1:
... | identifier_body |
practice.js | ();
updateCurrentAnswer();
// Initial positions and sizing
initializeAnswerPositions();
initializeQuestionPositions();
// Looper
createjs.Ticker.setFPS(60);
// Handles all the update logic
createjs.Ticker.on("tick", handleTick);
// Achievements
// No condition
checkAchiev... | {
var numA = getRandomInt(1,10);
} | conditional_block | |
practice.js | 0;
questions[q].txt.scaleX = 1.66;
break;
case 1:
questions[q].y = layout.MID2;
questions[q].txt.scaleX = questions[q].txt.scaleY = 1.00;
break;
case 2:
questions[q].y = layout.MID1; // Most upper
... | checkAnswer | identifier_name | |
practice.js | overlayLayer); // Add layers
// Detection
createjs.Touch.enable(stage);
// Initialize global variables for layout and sizing
initializeVariables(canvas.width, canvas.height);
// Preload all assets (crucial for first rendering)
preload.loadManifest(manifest);
}
function initGame() {
... | function prepareNextQuestion() {
// Obtain information about the current board
var availableArray = [];
// Note: foreach loop not working very well
for (a=0; a<answers.length; a++) {
if (answers[a].available == true) {
availableArray.push(answers[a]);
}
}
// Select o... | // Gathers are all the necessary info before generating the next answer | random_line_split |
in_memory.rs | ) fn new<T>(consumer: T) -> Self
where
T: Into<Option<Consumer<HashId>>>,
{
Self {
hashes: IndexMap::new(),
values: IndexMap::new(),
free_ids: consumer.into(),
new_ids: Vec::with_capacity(1024),
values_bytes: 0,
}
}
pub... | self.free_ids.as_mut()?.pop().ok()
}
pub(crate) fn insert_value_at(
&mut self,
hash_id: HashId,
value: Arc<[u8]>,
) -> Result<(), HashIdError> {
self.values_bytes = self.values_bytes.saturating_add(value.len());
if let Some(old) = self.values.insert_at(hash_i... |
fn get_free_id(&mut self) -> Option<HashId> { | random_line_split |
in_memory.rs | (old_value.len());
}
(free_id, self.hashes.get_mut(free_id)?.ok_or(HashIdError)?)
} else {
self.hashes.get_vacant_entry()?
};
self.new_ids.push(hash_id);
Ok(VacantObjectHash {
entry: Some(entry),
hash_id,
})
}
... | {
let mut hasher = DefaultHasher::new();
hasher.write(context_hash.as_ref());
let hashed = hasher.finish();
self.context_hashes.get(&hashed).cloned()
} | identifier_body | |
in_memory.rs | ) fn new<T>(consumer: T) -> Self
where
T: Into<Option<Consumer<HashId>>>,
{
Self {
hashes: IndexMap::new(),
values: IndexMap::new(),
free_ids: consumer.into(),
new_ids: Vec::with_capacity(1024),
values_bytes: 0,
}
}
pub... | (&self) -> RepositoryMemoryUsage {
let values_bytes = self.values_bytes;
let values_capacity = self.values.capacity();
let hashes_capacity = self.hashes.capacity();
let total_bytes = values_bytes
.saturating_add(values_capacity * size_of::<Option<Arc<[u8]>>>())
.s... | get_memory_usage | identifier_name |
ghttp_server_router.go | &Router {
Uri : uri,
Domain : domain,
Method : method,
Priority : strings.Count(uri[1:], "/"),
}
router.RegRule, router.RegNames = s.patternToRegRule(uri)
// 注册对象
registerItem := &handlerRegisterItem {
handler : handler,
hooks : make(map[strin... | 如:/user/*acti | identifier_name | |
ghttp_server_router.go | " {
if v, ok := p.(map[string]interface{})["*list"]; !ok {
p.(map[string]interface{})["*list"] = list.New()
lists = append(lists, p.(map[string]interface{})["*list"].(*list.List))
} else {
lists = append(lists, v.(*list.List))
}
... | default:
s, _ := gregex.ReplaceStringFunc(`{[\w\.\-]+}`, v, func(s string) string {
names = append(names, s[1 : len(s) - 1])
return `([\w\.\-]+)` | random_line_split | |
ghttp_server_router.go | }
if array, err := gregex.MatchString(`(.+)@([\w\.\-]+)`, uri); len(array) > 1 && err == nil {
uri = array[1]
domain = array[2]
}
if uri == "" {
err = errors.New("invalid pattern")
}
// 去掉末尾的"/"符号,与路由匹配时处理一直
if uri != "/" {
uri = strings.TrimRight(uri, "/")
... | pushed = true
break
}
}
if pushed {
if len(address) > 0 {
pushedItemSet.Add(address)
}
} else {
l.PushBack(registerItem)
}
}
}
//gutil.Dump(s.han... | if s.compareRouterPriority(router, item.router) {
l.InsertBefore(registerItem, e)
| conditional_block |
ghttp_server_router.go | // 当前节点的规则链表
lists := make([]*list.List, 0)
array := ([]string)(nil)
if strings.EqualFold("/", uri) {
array = []string{"/"}
} else {
array = strings.Split(uri[1:], "/")
}
// 键名"*fuzz"代表模糊匹配节点,其下会有一个链表;
// 键名"*list"代表链表,叶子节点和模糊匹配节点都有该属性;
for k, v := range array {
... | 1 {
if _, ok := p.(map[string]interface{})["*fuzz"]; ok {
p = p.(map[string]interface{})["*fuzz"]
}
if _, ok := p.(map[string]interface{})["*list"]; ok {
lists = append(lists, p.(map[string]interface{})["*list"].(*list.List))
... | identifier_body | |
licensePlateDetectorOptimized.py | Contours(edged.copy(), cv.RETR_EXTERNAL, cv.CHAIN_APPROX_NONE) #Get the contours of the Canny image [remember that this will return more contours than we need
#Because Canny just returns lines]
contours = imutils.grab... | print("VIDEO PAUSED @ FRAME {}".format(cap.get(cv.CAP_PROP_POS_FRAMES)))
while True:
key = cv.waitKey(25) & 0xFF
if key == ord('p'): #unpause
break
elif key == ord('q'): #quit the program bu... | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.