text
stringlengths 27
775k
|
|---|
const _ = require('lodash');
const helper = require('./helper');
const node = require('./node');
module.exports = {
one(schema, driver, criteria, doc, options, cb) {
const updates = _.merge(_.pick(doc, _.keys(criteria)), criteria);
let match_clause = `MATCH (doc:${helper.getLabel(doc)} {m_id: '${doc._id}'})`;
let set_clause = '';
const neo_updates = node.convertToNeoUpdates(schema, updates, doc);
set_clause += this.writePropertiesToCypher(neo_updates.props, '', false);
const subdocs_query = this.writeSubDocumentsToCypher(neo_updates.subdocs, doc);
match_clause += subdocs_query.match_clause;
set_clause += subdocs_query.set_clause;
set_clause = set_clause === '' ? '' : ' SET' + set_clause.substring(1);
// const doc_query = + ;
const { relationships_query, relationships_del_query } = this.writeRelationshipsToCypher(neo_updates.rels, doc);
const neo_query = match_clause + relationships_query + set_clause;
const session = driver.session();
if (relationships_del_query !== '') {
return session.run(relationships_del_query).then((del_result) => {
return Promise.all([
doc.updateOne(criteria, options, cb),
session.run(neo_query),
del_result
]);
}).then((results) => {
session.close();
return results;
});
} else {
return Promise.all([
doc.updateOne(criteria, options, cb),
session.run(neo_query)
]).then((results) => {
session.close();
return results;
});
}
},
writePropertiesToCypher(properties, doc_identifier, is_subdoc) {
let neo_query = "";
let i = 0;
const prefix = is_subdoc ? 'sub' : '';
_.forEach(properties, (value, key) => {
if(key == 'm_id') {
return;
} else {
neo_query += `, ${prefix}doc${doc_identifier}.${key} = ${helper.valueToPlainString(value)}`;
}
i++;
});
return neo_query;
},
writeSubDocumentsToCypher(subdocs) {
let match_clause = "";
let set_clause = "";
_.forEach(subdocs, (subdoc, index) => {
const doc_identifier = helper.romanize(index+1);
match_clause += ` MATCH (subdoc${doc_identifier}:${subdoc.label} {m_id: '${subdoc.properties.m_id}'})`;
set_clause += this.writePropertiesToCypher(subdoc.properties, doc_identifier, true);
});
return { match_clause, set_clause };
},
writeRelationshipsToCypher(relationships, doc) {
let relationships_query = '';
let relationships_del_query = '';
if(!_.isEmpty(relationships)) {
let relationLabels = [];
let match_clause = [];
let create_clause = [];
_.forEach(relationships, (rel, index) => {
const identifier = helper.romanize(index+1);
const properties = rel.rel_props ? ` ${helper.toPlainString(rel.rel_props)}` : '';
relationLabels.push(rel.rel_name);
match_clause.push(`(${identifier}:${rel.rel_label} {m_id: '${rel.m_id}'})`);
create_clause.push(`(doc)-[:${rel.rel_name}${properties}]->(${identifier})`);
});
relationships_query = `, ${_.join(match_clause, ', ')} CREATE ${_.join(create_clause, ', ')} `;
relationLabels = JSON.stringify(_.uniq(relationLabels));
relationships_del_query = `MATCH (doc)-[r]->(n) WHERE type(r) IN ${relationLabels} DELETE r`;
}
return { relationships_query, relationships_del_query };
}
};
|
# CraftCMS Locations Locator Plugin
> still in development...
A CraftCMS plugin which allows you to add a location finder to your pages.
## Instructions coming soon...
## License
MIT License - Created by [jamescgarrett](https://github.com/jamescgarrett/craftcms-locations)
|
use num::*;
use sound::*;
use std::fmt;
use std::ops::{Div, Mul};
/// unison (1:1)
pub const INTERVAL_UNISON: Interval = Interval {
numerator: 1,
denominator: 1,
ratio: 1.0,
reciprocal: 1.0,
};
/// Harmonic musical interval (of frequencies), represented by a rational number.
#[derive(Debug, Copy, Clone)]
pub struct Interval {
numerator: u16,
denominator: u16,
ratio: SampleCalc,
reciprocal: SampleCalc,
}
impl Default for Interval {
fn default() -> Interval {
Interval {
numerator: 1,
denominator: 1,
ratio: 1.0,
reciprocal: 1.0,
}
}
}
impl Interval {
/// custom constructor
pub fn new(numerator: u16, denominator: u16) -> SoundResult<Interval> {
let mut interval = Interval::default();
interval.set(numerator, denominator)?;
Ok(interval)
}
/// Reduces to lowest terms with dividing by the greatest common divisor.
fn reduce(&mut self) {
let d = self.numerator.gcd(&self.denominator);
self.numerator /= d;
self.denominator /= d;
}
/// Changes the interval.
pub fn set(&mut self, numerator: u16, denominator: u16) -> SoundResult<()> {
if numerator == 0 {
return Err(Error::NumeratorInvalid);
};
if denominator == 0 {
return Err(Error::DenominatorInvalid);
};
self.numerator = numerator;
self.denominator = denominator;
self.ratio = numerator as SampleCalc / denominator as SampleCalc;
self.reciprocal = denominator as SampleCalc / numerator as SampleCalc;
self.reduce();
Ok(())
}
/// Returns the ratio of the frequency interval.
pub fn get_ratio(&self) -> SampleCalc {
self.ratio
}
/// Returns the reciprocal of the frequency interval.
pub fn get_recip(&self) -> SampleCalc {
self.reciprocal
}
/// True, if the interval is `1:1`, aka. unison.
pub fn is_unison(&self) -> bool {
self.numerator == self.denominator
}
/// Gives the common name of the interval (if there is any).
pub fn get_name(&self) -> &str {
let ratio = if self.numerator > self.denominator {
(self.numerator, self.denominator)
} else {
(self.denominator, self.numerator)
};
// https://en.wikipedia.org/wiki/List_of_pitch_intervals
// https://gist.github.com/endolith/3098720
match ratio {
(1, 1) => "unison",
(2, 1) => "octave",
(3, 2) => "perfect fifth",
(4, 3) => "perfect fourth",
(5, 4) => "major third",
(5, 3) => "major sixth",
(6, 5) => "minor third",
(7, 6) => "septimal minor third",
(7, 5) => "lesser septimal tritone",
(7, 4) => "augmented sixth", // "harmonic seventh", "septimal minor seventh" too
(8, 7) => "septimal major second",
(8, 5) => "minor sixth",
(9, 8) => "major second", // "major tone" too
(9, 7) => "septimal major third",
(9, 5) => "minor seventh",
(10, 9) => "minor tone",
// (10, 8) => "",
(10, 7) => "greater septimal tritone",
// (11, 6) => "major seventh",
(11, 8) => "lesser undecimal tritone",
// (12, 11) => "minor second",
(13, 8) => "acute minor sixth",
(15, 8) => "major seventh",
(16, 15) => "semitone", // "minor second" too
(16, 9) => "grave minor seventh",
// (29, 16) => "minor seventh", // "twenty-ninth harmonic"
(31, 16) => "augmented seventh",
(45, 32) => "augmented fourth",
(64, 45) => "diminished fifth",
_ => "",
}
}
/// Change a frequency according to the interval.
pub fn change_frequency(&self, frequency: SampleCalc) -> SoundResult<SampleCalc> {
let new_frequency = frequency * self.ratio;
if new_frequency < TONE_FREQUENCY_MIN {
return Err(Error::FrequencyTooLow);
};
if new_frequency > TONE_FREQUENCY_MAX {
return Err(Error::FrequencyTooHigh);
};
Ok(new_frequency)
}
/// Change a frequency according to the interval's reciprocal.
pub fn reverse_frequency(&self, frequency: SampleCalc) -> SoundResult<SampleCalc> {
let new_frequency = frequency * self.reciprocal;
if new_frequency < TONE_FREQUENCY_MIN {
return Err(Error::FrequencyTooLow);
};
if new_frequency > TONE_FREQUENCY_MAX {
return Err(Error::FrequencyTooHigh);
};
Ok(new_frequency)
}
/// Change a frequency according to the interval.
pub fn transpose(&self,
base_frequency: &[SampleCalc],
result: &mut [SampleCalc])
-> SoundResult<()> {
if base_frequency.len() != result.len() {
return Err(Error::BufferSize);
}
for (new_frequency, frequency) in result.iter_mut().zip(base_frequency) {
*new_frequency = *frequency * self.ratio;
if *new_frequency < TONE_FREQUENCY_MIN {
return Err(Error::FrequencyTooLow);
};
if *new_frequency > TONE_FREQUENCY_MAX {
return Err(Error::FrequencyTooHigh);
};
}
Ok(())
}
}
impl Mul for Interval {
type Output = Interval;
fn mul(self, rhs: Interval) -> Interval {
let mut interval = Interval::default();
interval.numerator = self.numerator * rhs.numerator;
interval.denominator = self.denominator * rhs.denominator;
interval.reduce();
interval.ratio = interval.numerator as SampleCalc / interval.denominator as SampleCalc;
interval.reciprocal = interval.denominator as SampleCalc / interval.numerator as SampleCalc;
interval
}
}
impl Div for Interval {
type Output = Interval;
fn div(self, rhs: Interval) -> Interval {
let mut interval = Interval::default();
interval.numerator = self.numerator * rhs.denominator;
interval.denominator = self.denominator * rhs.numerator;
interval.reduce();
interval.ratio = interval.numerator as SampleCalc / interval.denominator as SampleCalc;
interval.reciprocal = interval.denominator as SampleCalc / interval.numerator as SampleCalc;
interval
}
}
impl From<Interval> for SampleCalc {
fn from(interval: Interval) -> Self {
interval.ratio
}
}
impl fmt::Display for Interval {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}", self.numerator, self.denominator)
}
}
|
import styled from 'styled-components';
export const Container = styled.div`
align-items: center;
background: rgba( 136, 237, 196, 0.6 );
display: flex;
height: 100%;
justify-content: center;
left: 0;
position: fixed;
top: 0;
width: 100%;
@media (max-width: 500px) {
box-sizing: border-box;
padding: 10px;
}
`;
export const Content = styled.div`
background-color: var(--green-lightest-most);
border-radius: 10px;
box-shadow: 0 0 5px 1px gray;
box-sizing: border-box;
padding: 10px;
width: 500px;
@media (max-width: 500px) {
box-sizing: border-box;
padding: 0;
}
`;
export const Header = styled.div`
align-items: center;
box-sizing: border-box;
padding: 10px;
display: flex;
justify-content: space-between;
`;
export const Title = styled.h3`
margin: 0;
padding: 0;
`;
export const IconAddImg = styled.img`
height: 20px;
width: 20px;
`;
export const IconCloseImg = styled.img`
height: 20px;
width: 20px;
`;
export const Form = styled.form`
align-items: center;
box-sizing: border-box;
display: flex;
flex-direction: column;
padding: 30px;
`;
export const Label = styled.label`
color: gray;
display: flex;
flex-direction: column;
font-size: 16px;
font-weight: bold;
margin: 5px 0;
width: 100%;
`;
export const InputName = styled.input`
border: 0.5px solid gray;
border-radius: 5px;
line-height: 25px;
padding-left: 10px;
`;
export const InputLink = styled.input`
border: 0.5px solid gray;
border-radius: 5px;
line-height: 25px;
padding-left: 10px;
`;
export const InputTags = styled.input`
border: 0.5px solid gray;
border-radius: 5px;
line-height: 25px;
padding-left: 10px;
`;
export const TextareaDescription = styled.textarea`
border: 0.5px solid gray;
border-radius: 5px;
box-sizing: border-box;
height: 100%;
padding: 10px;
resize: vertical;
`;
export const ButtonSubmit = styled.button`
background-color: var(--green-dark);
border-radius: 5px;
color: var(--white);
font-weight: bolder;
line-height: 20px;
width: 100px;
`;
|
<?php declare(strict_types=1);
namespace PHPLLVM\Type;
use PHPLLVM\Type;
interface Element extends Type {
public function getElementType(): Type;
}
|
<?php
/**
* Created by PhpStorm.
* User: YingQuan-han
* Date: 2018/11/1
* Time: 10:06
*/
namespace App\Models;
use Laravel\Passport\HasApiTokens;
use Illuminate\Notifications\Notifiable;
use Illuminate\Foundation\Auth\User as Authenticatable;
class User extends Authenticatable
{
use HasApiTokens, Notifiable;
protected $table = 'sys_users';
protected $primaryKey = 'id';
/**
* Using multiple fields to issue tokens
* @param $username
* @return mixed
*/
public static function findForPassport($username)
{
return User::orWhere('email', $username)->orWhere('name', $username)->first();
}
}
|
---
layout: post
title: web.dev LIVE wrap-up
subhead: >
A summary of the major news and updates that were announced during our
3-day online community event, and a reminder about upcoming regional events.
description: >
A summary of the major news and updates that were announced during our
3-day online community event, and a reminder about upcoming regional events.
date: 2020-07-06
hero: image/admin/H60ns6FN1VtNrlx8e3EU.png
thumbnail: image/admin/d2asSQy3UHgqRF8sx1Xk.png
alt: The web.dev LIVE logo.
tags:
- blog
- web-vitals
- security
- capabilities
- accessibility
- metrics
- devtools
- lighthouse
- privacy
- progressive-web-apps
- install
---
We just wrapped up our three-day event, [web.dev LIVE](/live), where some of the web
community came together online to talk about the state of web development. We kicked off each day in
a different regional timezone, and Googlers shared a round of updates, news, and tips in the spirit
of helping developers with tools and guidance to keep the web stable, powerful, and accessible.
If you missed some of the live stream, all of the
[sessions are recorded](/live) and available for you to watch on YouTube.
We've also got upcoming [regional events](/live/#regional-events) all around
the world which are organized by Google Developer Groups and will provide
deep-dive sessions on topics related to what we covered during web.dev LIVE.
Let's dive into some of the news and updates that were shared over the three days.
## Web Vitals
The Chrome team
[announced](https://blog.chromium.org/2020/05/introducing-web-vitals-essential-metrics.html) the
[Web Vitals](/vitals) initiative to provide unified guidance, metrics, and tools to
help developers deliver great user experiences on the web. The Google Search team also [recently
announced](https://webmasters.googleblog.com/2020/05/evaluating-page-experience.html) that they will
be evaluating page experience as a ranking criteria, and will include [Core Web
Vitals](/vitals/#core-web-vitals) metrics as its foundation.
The three pillars of the 2020 Core Web Vitals are loading, interactivity, and visual stability of
page content, which are captured by the following metrics:
<figure class="w-figure">
{% Img src="image/admin/kzOdl2pRyEEPEQI0U2lQ.png", alt="An illustration of the Core Web Vitals.", width="800", height="232" %}
</figure>
+ [Largest Contentful Paint](/lcp/) measures perceived load speed and
marks the point in the page load timeline when a page's main content has likely loaded.
+ [First Input Delay](/fid/) measures responsiveness and quantifies the
experience users feel when trying to first interact with a page.
+ [Cumulative Layout Shift](/cls/) measures visual stability and quantifies
the amount of unexpected movement of page content.
At web.dev LIVE, we shared best practices on how to [optimize for Core Web
Vitals](https://youtu.be/AQqFZ5t8uNc) and how to use [Chrome DevTools to explore your site's
vitals values](https://youtu.be/OHb3xZIqUeU). We also shared plenty of other performance-related
talks that you can find at [web.dev/live](/live) in the Day 1 schedule.
## tooling.report
Developing for a platform as broad as the web can be challenging. Build tools are often at the heart
of your web development project, taking on a key role in handling your developer and product
lifecycle.
We have all seen unwieldy build config files, so to help web developers _and_ tooling authors
conquer the complexity of the web, we built [tooling.report](/introducing-tooling-report). It's a website
that helps you choose the right build tool for your next project, decide if migrating from one tool
to another is worth it, or figure out how to incorporate best practices into your tooling
configuration and code base.
We designed a suite of tests to determine which build tools allow you to follow
web development best practices. We worked with the build tool authors to
make sure we used their tools correctly and represented them fairly.
<figure class="w-figure">
{% Img src="image/admin/awFmvfMqFv3gvbpIICJY.png", alt="A screenshot of the tooling.report UI.", width="800", height="316", class="w-screenshot" %}
</figure>
The initial release of tooling.report covers webpack v4, Rollup v2, Parcel v2, and Browserify with Gulp,
which appear to be the most popular build tools right now. We built tooling.report with the
flexibility of adding more build tools and additional tests with help from the community.
If we're missing a best practice that should be tested, please [propose it in a GitHub
issue](https://github.com/GoogleChromeLabs/tooling.report/issues/new). If you're up for writing a
test or adding a new tool we did not include in the initial set, we welcome you to
[contribute](https://github.com/GoogleChromeLabs/tooling.report/blob/dev/CONTRIBUTING.md)!
In the meantime, you can read more about our [approach towards building
tooling.report](/introducing-tooling-report) and watch our [session from web.dev
LIVE](https://youtu.be/vsMJiNtQWvw).
## Privacy and security on the web
Chrome believes in an open web that is respectful of users' privacy and maintains key use cases that
keep the web working for everyone.
In 2019, Chrome
[proposed](https://blog.chromium.org/2019/05/improving-privacy-and-security-on-web.html) an update
to the cookie standard to restrict cookies to first-party contexts by default and require cookies
for third-party contexts to be explicitly marked as such. Specifically, this provides a line of defense
against Cross-Site Request Forgery attacks. The proposal is now being adopted by Chrome, Firefox,
Edge, and other browsers.
While Chrome decided to [temporarily
rollback](https://blog.chromium.org/2020/04/temporarily-rolling-back-samesite.html) these changes
in light of COVID-19, sadly, during a crisis when people are most vulnerable, you also see these
kinds of attacks increase. So, with the Chrome 84 Stable release (mid-July 2020), the changes will
[start to roll out again](https://www.chromium.org/updates/same-site?pli=1#20200528) across all
Chrome versions from 80 and up. Check out the [SameSite cookies
guidance](/samesite-cookies-explained/) as well as the [web.dev LIVE
session](https://youtu.be/Fet6-IiX69E) to learn more.
Moreover, under the banner of the [Privacy
Sandbox](https://blog.chromium.org/2020/01/building-more-private-web-path-towards.html) Chrome is
introducing a number of standards proposals that aim to support the use cases that let people make
their living using the web platform, but do it in a way that better respects user privacy. Chrome is
actively seeking feedback on these proposals, and is participating within the open forums of the W3C
to discuss the proposals as well as those submitted by other parties. Learn more about this
initiative in the [Security and privacy for the open web](https://youtu.be/8Tl0uQdVpxU) session.
Finally, looking at user security, [Spectre](https://meltdownattack.com/) was a vulnerability that
meant malicious code running in one browser process might be able to read any data associated with
that process even if it's from a different origin. One of the browser mitigations for this is site
isolation, i.e. putting each site into a separate process. Watch the web.dev LIVE session on the
[new Cross-Origin Opener and Embedder Policies](https://youtu.be/XLNJYhjA-0c) (COOP and COEP) to
learn more.
## Building a web with powerful capabilities
Chrome wants you to be free to create the highest quality web apps that give you the biggest reach
to users across devices. Combining the installability and reliability of PWAs, with the
[capabilities project](/fugu-status/) (Project Fugu), Chrome is focusing on three
things to close the gap between platform-specific apps and the web, to help you build and deliver great
experiences.
First, Chrome teams have been working hard to give web developers and users [more control over the install
experience](/customize-install/), [adding an install promotion to the
omnibox](/install-criteria/), and
[more](/promote-install/#browser-promotion). Despite the web's ubiquity, it's still
important for some businesses to have their app in the store. To help, Chrome launched
[Bubblewrap](https://github.com/GoogleChromeLabs/bubblewrap), a library and CLI that makes it
trivial to get your PWA into the Play Store. In fact, [PWABuilder.com](http://PWABuilder.com) now
uses Bubblewrap under the hood. In just a few mouse clicks, you can generate an APK and upload your
PWA to the Play Store, as long as you meet the
[criteria](https://blog.chromium.org/2020/06/changes-to-quality-criteria-for-pwas.html).
Second, Chrome is providing tighter integration with the operating system, such as the ability to
share a photo, song, or whatever by invoking the system-level share service with the [Web Share
API](/web-share/), or the ability to [receive content when shared from a different
installed app](/web-share-target/). You can keep users up-to-date, or subtly notify
them of new activity with [app badging](/badging-api/). Also, it's now easier for
users to quickly start an action using [App Shortcuts](/app-shortcuts/), which will
land in Chrome 84 (mid-July 2020).
And finally, Chrome has been working on new capabilities that enable new scenarios that weren't
possible before, like editors that [read and write to files on the user's local file
system](/file-system-access/), or get the list of locally installed fonts so that
users can use them in their designs.
During web.dev LIVE, we spoke about [lots of other capabilities and
features](https://youtu.be/NXCT3htg9nk) that can enable you to deliver the same kind of experience,
with the same capabilities, as platform-specific apps. See all sessions at [web.dev/live](/live)
in the Day 2 schedule.
## What's new in Chrome DevTools and Lighthouse 6.0
### Chrome Devtools: new Issues tab, color deficiencies emulator, and Web Vitals support
One of the most powerful features of Chrome DevTools is its ability to spot issues on a web page and
bring them to the developer's attention. This is most pertinent as we move into the next phase of a
[privacy-first web](https://blog.chromium.org/2020/01/building-more-private-web-path-towards.html).
To reduce notification fatigue and clutter in the Console, Chrome DevTools launched the [Issues
tab](https://developers.google.com/web/tools/chrome-devtools/issues) which focuses on three types of
critical issues to start with: [cookie problems](/samesite-cookies-explained),
[mixed content](https://developers.google.com/web/fundamentals/security/prevent-mixed-content/what-is-mixed-content),
and [COEP issues](/coop-coep/). Watch the web.dev LIVE session on [finding and fixing
problems with the Issues tab](https://youtu.be/1TbkSxQb4bI) to get started.
<figure class="w-figure">
{% Img src="image/admin/G7AmzK1btOMBUPEhnFhV.png", alt="A screenshot of the Issues tab.", width="800", height="535", class="w-screenshot w-screenshot--filled" %}
</figure>
Moreover, with the [Core Web Vitals](/vitals/#core-web-vitals) becoming one of the
most critical sets of metrics for web developers to track and measure, DevTools wants to ensure
developers are able to easily track how they perform against these thresholds. So these three
metrics are now in the Chrome DevTools Performance panel.
And finally, with an increasing number of developers focusing on accessibility, DevTools also
introduced a [color vision deficiencies
emulator](https://twitter.com/mathias/status/1237393102635012101?) that allows developers to
emulate blurred vision and other types of vision deficiencies. You
can learn more about this and many other features in the [What's new in
DevTools](https://youtu.be/6yrJZHqJe2k) session.
<figure class="w-figure">
{% Img src="image/tcFciHGuF3MxnTr1y5ue01OGLBn2/3gTvVdPvTN3IUyhSN8gn.png", alt="A screenshot of the vision deficiencies emulator.", width="800", height="509", class="w-screenshot w-screenshot--filled" %}
</figure>
### Lighthouse 6.0: New metrics, Core Web Vitals lab measurements, an updated Performance score, and new audits
[Lighthouse](https://developers.google.com/web/tools/lighthouse) is an open-source automated tool
that helps developers improve their site's performance. In its latest version, the Lighthouse team
focused on providing insights based on metrics that give you a balanced view of the quality of your
user experience against critical dimensions.
To ensure consistency, Lighthouse added support for the Core Web Vitals:
[LCP](/lcp/), [TBT](/tbt/) (a proxy for
[FID](/fid/) since Lighthouse is a lab tool and FID can only be
measured in the field) and [CLS](/cls/). Lighthouse also removed
three old metrics: [First Meaningful
Paint](/first-meaningful-paint/), [First CPU
Idle](/first-cpu-idle/), and [Max Potential
FID](/lighthouse-max-potential-fid/). These removals are due to
considerations like metric variability and newer metrics offering better
reflections of the part of user experience that Lighthouse is trying to measure.
Additionally, Lighthouse also made some adjustments to how much each metric
factors into the overall Performance score based on user feedback.
Lighthouse also added a [scoring
calculator](https://googlechrome.github.io/lighthouse/scorecalc/) to help you explore your
performance scoring, by providing a comparison between version 5 and 6 scores. When you run an audit
with Lighthouse 6.0, the report comes with a link to the calculator with your results populated.
And finally, Lighthouse added a bunch of [new
audits](/lighthouse-whats-new-6.0/#new-audits), with a focus on JavaScript analysis
and accessibility.
<figure class="w-figure">
{% Img src="image/tcFciHGuF3MxnTr1y5ue01OGLBn2/qAVFUVHR7Ad0tm05J1d3.png", alt="A list of the new audits.", width="800", height="450", class="w-screenshot" %}
</figure>
Learn more by watching the [What's new in speed tooling](https://youtu.be/yDHfrhCGFQw)
session.
## Learn more
Thank you to everyone in the community who joined us to discuss the web
platform's opportunities and challenges.
This post summarized some of the highlights of the event, but there was so much
more. Make sure to check out all the [sessions](/live) and
[subscribe to the web.dev newsletter](/newsletter) if you'd like
more content straight to your inbox. And visit the [Regional
Events](/live#regional-events) section on web.dev/live to find an upcoming community
event in your timezone!
|
mod instructions;
use rand::SeedableRng;
use rand::rngs::SmallRng;
use crate::apu::Apu;
use crate::gpu::Gpu;
use crate::rom::Rom;
use crate::util::*;
use instructions::*;
pub struct Cpu {
/// CPU registers
regs: Registers,
/// The `ROM` / `RAM` memory, it has a size of 64 KB
memory: Vec<u8>,
/// The GPU
gpu: Gpu,
/// The APU
apu: Apu,
/// The random number generator
rng: SmallRng,
/// Flag to signal that the cpu is waiting for `VBLNK`
wait_vblank: bool,
}
struct Registers {
/// program counter `PC`
pc: u16,
/// stack pointer `SP` (start at 0xFDF0/512 B)
sp: u16,
/// general purpose registers (`R0` .. `RF`)
r: [i16; 16],
/// flag register `FLAGS` (`carry`, `zero`, `overflow`, `negative`)
flags: u8,
}
impl Cpu {
pub fn new(gpu: Gpu, apu: Apu, rom: &Rom) -> Cpu {
let regs = Registers {
pc: rom.start(),
sp: 0xFDF0,
r: [0; 16],
flags: 0,
};
let mut memory = vec![0; 2usize.pow(16)];
memory[..rom.size() as usize].copy_from_slice(rom.rom());
Cpu {
regs: regs,
memory: memory,
gpu: gpu,
apu: apu,
rng: SmallRng::from_entropy(),
wait_vblank: false,
}
}
/// Execute one CPU cycle
pub fn step(&mut self) {
self.wait_vblank = false;
// Fetch `pc`, increase `pc` and run instruction at `pc`
let pc = self.regs.pc;
self.regs.pc += 4;
if let Err(e) = run_instruction(self, pc as usize) {
panic!("Invalid instruction at 0x{:02X} ({})", pc, e);
}
self.gpu.set_vblank(false);
}
pub fn format_instruction(&self, addr: u16) -> Result<String, String> {
format_instruction(self, addr as usize)
}
/// Read one value from the memory at the specified address
pub fn read<T: Copy>(&self, addr: u16) -> T {
*deserialize(&self.memory[addr as usize..])
}
/// Write one value to the memory at the specified address
pub fn write<T>(&mut self, addr: u16, val: T) {
let addr = addr as usize;
let buf = serialize(&val);
self.memory[addr..addr + buf.len()].copy_from_slice(buf);
}
pub fn render(&mut self, buffer: &mut [u32]) {
self.gpu.render(buffer);
}
/// Get the carry flag
pub fn carry(&self) -> bool {
bitflag(self.regs.flags, 1)
}
/// Set the carry flag
pub fn set_carry(&mut self, val: bool) {
set_bitflag(&mut self.regs.flags, 1, val);
}
/// Get the zero flag
pub fn zero(&self) -> bool {
bitflag(self.regs.flags, 2)
}
/// Set the zero flag
pub fn set_zero(&mut self, val: bool) {
set_bitflag(&mut self.regs.flags, 2, val);
}
/// Get the overflow flag
pub fn overflow(&self) -> bool {
bitflag(self.regs.flags, 6)
}
/// Set the overflow flag
pub fn set_overflow(&mut self, val: bool) {
set_bitflag(&mut self.regs.flags, 6, val);
}
/// Get the negative flag
pub fn negative(&self) -> bool {
bitflag(self.regs.flags, 7)
}
/// Set the negative flag
pub fn set_negative(&mut self, val: bool) {
set_bitflag(&mut self.regs.flags, 7, val);
}
pub fn pc(&self) -> u16 {
self.regs.pc
}
pub fn sp(&self) -> u16 {
self.regs.sp
}
pub fn r(&self, index: u8) -> i16 {
self.regs.r[index as usize]
}
pub fn set_r(&mut self, index: u8, value: i16) {
self.regs.r[index as usize] = value;
}
pub fn set_input(&mut self, (one, two): (u8, u8)) {
self.memory[0xFFF0] = one;
self.memory[0xFFF2] = two;
}
pub fn wait_vblank(&self) -> bool {
self.wait_vblank
}
}
|
import 'dart:async';
import 'dart:html' as html;
import 'dart:js_util' as jsutil;
import 'package:flutter/services.dart';
import '../interface/media_stream.dart';
import '../interface/rtc_video_renderer.dart';
import 'media_stream_impl.dart';
import 'ui_fake.dart' if (dart.library.html) 'dart:ui' as ui;
// An error code value to error name Map.
// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code
const Map<int, String> _kErrorValueToErrorName = {
1: 'MEDIA_ERR_ABORTED',
2: 'MEDIA_ERR_NETWORK',
3: 'MEDIA_ERR_DECODE',
4: 'MEDIA_ERR_SRC_NOT_SUPPORTED',
};
// An error code value to description Map.
// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code
const Map<int, String> _kErrorValueToErrorDescription = {
1: 'The user canceled the fetching of the video.',
2: 'A network error occurred while fetching the video, despite having previously been available.',
3: 'An error occurred while trying to decode the video, despite having previously been determined to be usable.',
4: 'The video has been found to be unsuitable (missing or in a format not supported by your browser).',
};
// The default error message, when the error is an empty string
// See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/message
const String _kDefaultErrorMessage =
'No further diagnostic information can be determined or provided.';
class RTCVideoRendererWeb extends VideoRenderer {
html.AudioElement? _audioElement;
RTCVideoRendererWeb() : _textureId = _textureCounter++;
static int _textureCounter = 1;
html.MediaStream? _videoStream;
html.MediaStream? _audioStream;
MediaStreamWeb? _srcObject;
final int _textureId;
bool _mirror = false;
final _subscriptions = <StreamSubscription>[];
String _objectFit = 'contain';
bool _muted = false;
set objectFit(String fit) =>
findHtmlView()?.style?.objectFit = _objectFit = fit;
bool get mirror => _mirror;
set mirror(bool mirror) {
_mirror = mirror;
}
@override
int get videoWidth => value.width.toInt();
@override
int get videoHeight => value.height.toInt();
@override
int get textureId => _textureId;
@override
bool get muted => _muted;
@override
set muted(bool mute) => _audioElement?.muted = _muted = mute;
@override
bool get renderVideo => _srcObject != null;
void _updateAllValues() {
var element = findHtmlView();
value = value.copyWith(
rotation: 0,
width: element?.videoWidth?.toDouble() ?? 0.0,
height: element?.videoHeight?.toDouble() ?? 0.0,
renderVideo: renderVideo,
);
}
@override
MediaStream? get srcObject => _srcObject;
@override
set srcObject(MediaStream? stream) {
_srcObject = stream as MediaStreamWeb;
if (null != _srcObject) {
if (stream.getVideoTracks().isNotEmpty) {
_videoStream = html.MediaStream();
for (var track in _srcObject!.jsStream.getVideoTracks()) {
_videoStream!.addTrack(track);
}
}
if (stream.getAudioTracks().isNotEmpty) {
_audioStream = html.MediaStream();
for (var track in _srcObject!.jsStream.getAudioTracks()) {
_audioStream!.addTrack(track);
}
}
} else {
_videoStream = null;
_audioStream = null;
}
if (null != _audioStream) {
if (null == _audioElement) {
_audioElement = html.AudioElement()
..id = 'audio_RTCVideoRenderer-$textureId'
..muted = stream.ownerTag == 'local'
..autoplay = true;
getAudioManageDiv().append(_audioElement!);
}
_audioElement?.srcObject = _audioStream;
}
findHtmlView()?.srcObject = _videoStream;
value = value.copyWith(renderVideo: renderVideo);
}
html.DivElement getAudioManageDiv() {
var div = html.document.getElementById('html_webrtc_audio_manage_list');
if (null != div) {
return div as html.DivElement;
}
div = html.DivElement();
div.id = 'html_webrtc_audio_manage_list';
div.style.display = 'none';
html.document.body!.append(div);
return div as html.DivElement;
}
html.VideoElement? findHtmlView() {
var video =
html.document.getElementById('video_RTCVideoRenderer-$textureId');
if (null != video) {
return video as html.VideoElement;
}
final fltPv = html.document.getElementsByTagName('flt-platform-view');
if (fltPv.isEmpty) return null;
var child = (fltPv.first as html.Element).shadowRoot!.childNodes;
for (var item in child) {
if ((item as html.Element).id == 'video_RTCVideoRenderer-$textureId') {
return item as html.VideoElement;
}
}
return null;
}
@override
Future<void> dispose() async {
await _srcObject?.dispose();
_srcObject = null;
_subscriptions.forEach((s) => s.cancel());
var element = findHtmlView();
element?.removeAttribute('src');
element?.load();
getAudioManageDiv().remove();
return super.dispose();
}
@override
Future<bool> audioOutput(String deviceId) async {
try {
var element = findHtmlView();
if (null != element && jsutil.hasProperty(element, 'setSinkId')) {
await jsutil.promiseToFuture<void>(
jsutil.callMethod(element, 'setSinkId', [deviceId]));
return true;
}
} catch (e) {
print('Unable to setSinkId: ${e.toString()}');
}
return false;
}
@override
Future<void> initialize() async {
var id = 'RTCVideoRenderer-$textureId';
// // ignore: undefined_prefixed_name
ui.platformViewRegistry.registerViewFactory(id, (int viewId) {
_subscriptions.forEach((s) => s.cancel());
_subscriptions.clear();
var element = html.VideoElement()
..autoplay = true
..muted = true
..controls = false
..style.objectFit = _objectFit
..style.border = 'none'
..srcObject = _videoStream
..id = "video_$id"
..setAttribute('playsinline', 'true');
_subscriptions.add(
element.onCanPlay.listen((dynamic _) {
_updateAllValues();
// print('RTCVideoRenderer: videoElement.onCanPlay ${value.toString()}');
}),
);
_subscriptions.add(
element.onResize.listen((dynamic _) {
_updateAllValues();
onResize?.call();
// print('RTCVideoRenderer: videoElement.onResize ${value.toString()}');
}),
);
// The error event fires when some form of error occurs while attempting to load or perform the media.
_subscriptions.add(
element.onError.listen((html.Event _) {
// The Event itself (_) doesn't contain info about the actual error.
// We need to look at the HTMLMediaElement.error.
// See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/error
var error = element.error;
print('RTCVideoRenderer: videoElement.onError, ${error.toString()}');
throw PlatformException(
code: _kErrorValueToErrorName[error!.code]!,
message:
error.message != '' ? error.message : _kDefaultErrorMessage,
details: _kErrorValueToErrorDescription[error.code],
);
}),
);
_subscriptions.add(
element.onEnded.listen((dynamic _) {
// print('RTCVideoRenderer: videoElement.onEnded');
}),
);
return element;
});
}
}
|
package react.redux
import react.*
import redux.*
fun RBuilder.provider(store: Store<*, *, *>, handler: RHandler<ProviderProps>) =
child<ProviderProps, Provider> {
attrs.store = store
handler()
}
|
import { useStaticQuery, graphql } from 'gatsby';
let cachedImages: any = null;
export const queryImages = (...names: string[]) => {
const images = (cachedImages =
cachedImages ||
useStaticQuery(graphql`
query {
allFile(filter: { relativePath: { glob: "assets/*.(jpg|JPG)" } }) {
nodes {
base
name
childImageSharp {
fluid {
...GatsbyImageSharpFluid
}
}
}
}
}
`));
return names.map(name =>
images.allFile.nodes.find((n: any) => n.name === name)
);
};
|
---
layout: post
title: testig things
---
## This
This is very nice!
* one
* two
* three
|
(defpackage #:mariko-examples
(:use #:cl)
(:export #:tile-map-test
#:basic-sprite-test
#:anim-test))
|
# WarnableModels
require File.dirname(__FILE__) + '/warnings'
module Freegenie
module WarnableModels
module ClassMethods
def acts_as_warnable(options={})
class_eval("@@warnable_options = #{options.inspect}")
send :include, InstanceMethods
before_save :load_warnings
end
def warnable_options
class_eval("@@warnable_options")
end
end
module InstanceMethods
def warnings
@warnings ||= load_warnings
end
def clear_warnings!
@warnings = nil
end
protected
def load_warnings
# ---------------------
# initialize warning object
# ---------------------
@warnings = Freegenie::WarnableModels::Warnings.new
raise "warnings should be empty" if !@warnings.empty?
if !self.methods.include? 'run_warnings'
raise "You must implement a 'run_warnings' method on your model."
end
self.run_warnings
if !self.class.warnable_options[:store_count].nil?
instance_eval("self.#{self.class.warnable_options[:store_count]} = #{@warnings.size}")
end
if !self.class.warnable_options[:store_yaml].nil?
if @warnings.size > 0
instance_eval("self.#{self.class.warnable_options[:store_yaml]} = \"#{@warnings.to_yaml }\" ")
else
instance_eval("self.#{self.class.warnable_options[:store_yaml]} = nil ")
end
end
@warnings
end
end
def self.included(receiver)
receiver.extend ClassMethods
receiver.send :include, InstanceMethods
end
end
end
|
#![allow(unused)]
use std::{borrow::Cow, cmp::Ordering};
use proc_macro2::{Ident, TokenStream};
use syn::{punctuated::Punctuated, token::Comma, Attribute, FnArg, ItemFn};
use crate::path::PathOperation;
#[cfg(feature = "actix_extras")]
pub mod actix;
#[cfg(feature = "rocket_extras")]
pub mod rocket;
#[cfg_attr(feature = "debug", derive(Debug))]
pub enum Argument<'a> {
Value(ArgumentValue<'a>),
TokenStream(TokenStream),
}
#[cfg_attr(feature = "debug", derive(Debug))]
pub struct ArgumentValue<'a> {
pub name: Option<Cow<'a, str>>,
pub argument_in: ArgumentIn,
pub ident: Option<&'a Ident>,
pub is_array: bool,
pub is_option: bool,
}
#[cfg_attr(feature = "debug", derive(Debug))]
#[derive(PartialEq)]
pub enum ArgumentIn {
Path,
Query,
}
#[cfg_attr(feature = "debug", derive(Debug))]
pub struct ResolvedPath {
pub path: String,
pub args: Vec<ResolvedArg>,
}
#[derive(PartialEq, Eq, PartialOrd, Ord)]
#[cfg_attr(feature = "debug", derive(Debug))]
pub enum ResolvedArg {
Path(ArgValue),
Query(ArgValue),
}
impl ResolvedArg {
fn by_name(a: &ResolvedArg, b: &ResolvedArg) -> Ordering {
a.get_value().name.cmp(&b.get_value().name)
}
fn get_value(&self) -> &ArgValue {
match self {
ResolvedArg::Path(path) => path,
ResolvedArg::Query(query) => query,
}
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord)]
#[cfg_attr(feature = "debug", derive(Debug))]
pub struct ArgValue {
pub name: String,
pub original_name: String,
}
#[cfg_attr(feature = "debug", derive(Debug))]
pub struct ResolvedOperation {
pub path_operation: PathOperation,
pub path: String,
}
pub trait ArgumentResolver {
fn resolve_path_arguments(
_: &Punctuated<FnArg, Comma>,
_: Option<Vec<ResolvedArg>>,
) -> Option<Vec<Argument<'_>>> {
None
}
}
pub trait PathResolver {
fn resolve_path(_: &Option<String>) -> Option<ResolvedPath> {
None
}
}
pub trait PathOperationResolver {
fn resolve_operation(_: &ItemFn) -> Option<ResolvedOperation> {
None
}
}
pub struct PathOperations;
// #[cfg(not(feature = "actix_extras"))]
#[cfg(not(any(feature = "actix_extras", feature = "rocket_extras")))]
impl ArgumentResolver for PathOperations {}
// #[cfg(not(feature = "actix_extras"))]
#[cfg(not(any(feature = "actix_extras", feature = "rocket_extras")))]
impl PathResolver for PathOperations {}
// #[cfg(all(not(feature = "actix_extras"), not(feature = "rocket_extras")))]
#[cfg(not(any(feature = "actix_extras", feature = "rocket_extras")))]
impl PathOperationResolver for PathOperations {}
|
{-# LANGUAGE FlexibleContexts #-}
import Control.Monad.Memo
main = print $ startEvalMemo (getNumberOfPaths 30 30)
getNumberOfPaths :: (MonadMemo (Integer, Integer) Integer m) => Integer -> Integer -> m Integer
getNumberOfPaths 0 _ = return 1
getNumberOfPaths _ 0 = return 1
getNumberOfPaths x y = do
n1 <- for2 memo getNumberOfPaths (x-1) y
n2 <- for2 memo getNumberOfPaths x (y-1)
return (n1 + n2)
|
#!/bin/bash
/opt/symform/SymformNode.sh service contrib 2> /dev/null &
/opt/symform/SymformNode.sh service sync 2> /dev/null &
/opt/symform/SymformNode.sh service web 2> /dev/null &
C="0"
symform_pid () {
PID=$(ps aux | egrep -i "symform(sync|web|contrib)" | grep -v exe| awk '{ print $2 }')
if [[ $(echo $PID | wc -w) -gt 2 ]]; then
return 0
else
return 1
fi
}
until symform_pid; do
if [[ $C -eq "0" ]]; then
echo -ne "Waiting for Symform to start..\r"
C="1"
else
echo -ne "Waiting for Symform to start. \r"
C="0"
fi
sleep 1
done
echo "================ "
echo "Container written by Joshi Friberg"
echo "Symform is running with pids $(echo $PID)"
echo "Start time was: $(date)"
while symform_pid; do
sleep 60
echo -ne "Still running: $(date)\r"
done
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using VideoRentalApp.Models;
using VideoRentalApp.ViewModels;
using System.Data.Entity;
namespace VideoRentalApp.Controllers
{
public class MoviesController : Controller
{
// ----------- with database ------------
private ApplicationDbContext _context;
public MoviesController()
{
_context = new ApplicationDbContext();
}
protected override void Dispose(bool disposing)
{
_context.Dispose();
}
public ActionResult Details(int id)
{
var movie = _context.movies.Include(m => m.genre).SingleOrDefault(m => m.Id == id);
if (movie == null)
{
return HttpNotFound();
}
return View(movie);
}
[ValidateAntiForgeryToken]
[HttpPost]
public ActionResult Save(Movie movie)
{
if (!ModelState.IsValid)
{
var viewModel = new MovieFormViewModel(movie) {Genre=_context.genre.ToList() };
return View("EditOrAddMovie", viewModel);
}
if (movie.Id == 0)
{
_context.movies.Add(movie);
}
else
{
var movieInDb = _context.movies.Single(c => c.Id == movie.Id);
movieInDb.Name = movie.Name;
movieInDb.DateAdded = DateTime.Today;
movieInDb.ReleseDate = movie.ReleseDate;
movieInDb.GenreId= movie.GenreId;
movieInDb.NumberInStuck = movie.NumberInStuck;
}
//movie.genre = _context.genre.SingleOrDefault(c => c.Id == movie.GenreId);
_context.SaveChanges();
return RedirectToAction("Random", "Movies");
}
public ActionResult Edit(int id)
{
var movie = _context.movies.SingleOrDefault(c => c.Id == id);
if (movie == null)
{
return HttpNotFound();
}
var viewModel = new MovieFormViewModel(movie)
{
Genre = _context.genre.ToList()
};
return View("EditOrAddMovie", viewModel);
}
// GET: Movies
public ActionResult Random()
{
// ---------- this part without database
//var movie = new Movie() { Name = "sherk" };
//var movie1 = new List<Movie>();
//List<Movie> movie2 = new List<Movie>() {
// new Movie{Name="a123"},new Movie{Name="c123"},new Movie{Name="123"}
// ,new Movie{Name="f123"},
// new Movie{Name="fr123"},
// new Movie{Name="jh123"}
//};
// return View(movie2);
// return RedirectToAction("Index", "Home", new { page = 1, sortBy = "name" });
// ----------- with database ------------
var movies = _context.movies.Include(m => m.genre).ToList();
return View(movies);
}
//[Route("movies/byrealeasdate/{year}/{month:regex(\\d{2}):range(1 , 12)}")]
//public ActionResult ByRealeaseYear(int year , int month)
//{
// return Content(year+"/"+ month);
//}
//public ActionResult ListOfCustomerRentedOnefilm()
//{
// var movie = new Movie() { Name = "sherk" };
// var customers = new List<Customer>
// {
// new Customer{Name="saeid"},
// new Customer{Name="sara"},
// new Customer{Name="sara2"},
// new Customer{Name="sahar"},
// new Customer{Name="samira"},
// };
// var viewModel = new CustomerMovieModels
// {
// Movie = movie,
// Customers = customers
// };
// return View(viewModel);
//}
public ActionResult EditOrAddMovie()
{
var Genre = _context.genre;
var ViewModel = new MovieFormViewModel { Genre = Genre };
return View("EditOrAddMovie", ViewModel);
}
}
}
|
//! STUN client and server structs and functions.
pub mod async_stun_client;
pub mod async_stun_server;
|
# gridijkstra
Python package wrapping scipy's dijkstra with a grid-based interface
```python
>>> import gridijkstra
>>> import numpy as np
>>> costs = np.ones((50, 60))
>>> costs[10:15, :20] = 1e30 # np.inf also works, but is less convenient for plotting
>>> costs[20:25, 25:55] = 1e30
>>> costs[30:40, 30:40] = 1e30
>>> start = (2, 2)
>>> target = (48, 58)
>>> total_cost, path = gridijkstra.plan(costs, start, target, return_path=True)
>>> print(f'Full path length: {total_cost}')
'Full path length: 102.0'
```
Three use cases are shown below. See scripts/examples.ipynb for a notebook with examples



|
# Connect-AzAccount #dont worry if already logedin
#check the sandbox subcriptions ID
# Get-AzSubscription
$conciergesub = Get-AzSubscription -SubscriptionName "Concierge Subscription"
$conciergesub.Id
# set the context sandbox
$context = Get-AzSubscription -SubscriptionId $conciergesub.Id
Set-AzContext $context
# set default group to the one automatically created in sandbox
# Set-AzDefault -ResourceGroupName learn-8966c842-dd73-4d69-a4a3-66739dbee752
# Set-AzDefault -ResourceGroupName learn-be51576f-624f-4fb6-9010-6fd70f3906c1
# Set-AzDefault -ResourceGroupName learn-7a9863cf-0ad4-48e7-9ea3-c5aa470bcc09
Set-AzDefault -ResourceGroupName learn-944f12c8-d2d7-45e2-a59a-27686b2e7c59
|
export {hasViewportRelativeCoordinates} from './hasViewportRelativeCoordinates';
export {isTouchEvent} from './isTouchEvent';
|
/*
* Copyright (C) Hao Feng
*/
import sbt.Credentials
import sbt.Keys.{ credentials, publishTo }
lazy val common = Seq(
organization := "io.0ops",
version := "2.4.4",
scalaVersion := "2.11.12",
/* BuildPaths.defaultGlobalBase => ~/.sbt */
credentials += Credentials(BuildPaths.defaultGlobalBase / ".credentials")
)
/*
* default builds:
* => core
* => httputils => http
* => filesystem
* => kafka
*/
lazy val distribution = Project(
id = "atiesh-distribution",
base = file(".")
).aggregate(core, httputils, filesystem, kafka, http)
.settings(common)
/* atiesh core project */
lazy val dependencies = Seq(
// akka-actor
"com.typesafe.akka" %% "akka-actor" % "2.5.32",
// typesafe config
"com.typesafe" % "config" % "1.4.1",
// kamon
"io.kamon" %% "kamon-core" % "2.0.5",
"io.kamon" %% "kamon-system-metrics" % "2.0.1",
"io.kamon" %% "kamon-prometheus" % "2.0.1",
// logger
"org.slf4j" % "slf4j-api" % "1.7.32",
"ch.qos.logback" % "logback-classic" % "1.2.3", // scalalogging docs
"com.typesafe.scala-logging" %% "scala-logging" % "3.9.4"
).map(_.excludeAll(ExclusionRule("io.kamon", "kamon-core_2.11"),
ExclusionRule("com.typesafe", "config"),
ExclusionRule("org.slf4j", "slf4j-api")))
/* atiesh core framework */
lazy val core = (project in file("core"))
.settings(
common,
name := "atiesh",
libraryDependencies ++= dependencies
)
/* atiesh utils - http */
lazy val httputils = (project in file("utils/http"))
.settings(
common,
name := "atiesh-utils-http",
libraryDependencies ++= Seq(
"com.typesafe.akka" %% "akka-stream" % "2.5.32",
"com.typesafe.akka" %% "akka-http" % "10.1.15",
)
).dependsOn(core)
/* atiesh semantics - http */
lazy val http = (project in file("semantics-http"))
.settings(
common,
name := "atiesh-semantics-http",
libraryDependencies ++= Seq(
"com.typesafe.akka" %% "akka-stream" % "2.5.26",
"com.typesafe.akka" %% "akka-http" % "10.1.10"
)
).dependsOn(httputils)
/* atiesh semantics - files */
lazy val filesystem = (project in file("semantics-filesystem"))
.settings(
common,
name := "atiesh-semantics-filesystem",
libraryDependencies ++= dependencies
).dependsOn(core)
/* atiesh semantics - kafka */
lazy val kafka = (project in file("semantics-kafka"))
.settings(
common,
name := "atiesh-semantics-kafka",
libraryDependencies ++= Seq(
"org.apache.kafka" % "kafka-clients" % "2.4.1"
exclude("org.slf4j", "slf4j-api"),
)
).dependsOn(core)
/*
* atiesh semantics - syslog (experiment),
* we don't build this by default
*
* you can build it manual via sbt shell:
* sbt> project syslog
* sbt:atiesh-semantics-syslog> clean
* sbt:atiesh-semantics-syslog> package (or publish)
*/
lazy val syslog = (project in file("semantics-syslog"))
.settings(
common,
name := "atiesh-semantics-syslog",
libraryDependencies ++= Seq(
"com.cloudbees" % "syslog-java-client" % "1.1.7"
)
).dependsOn(core)
/*
* atiesh semantics - aliyun,
* too much deps with conflicts warns,
* we don't build this by default
*
* you can build it manual via sbt shell:
* sbt> project aliyun
* sbt:atiesh-semantics-aliyun> clean
* sbt:atiesh-semantics-aliyun> package (or publish)
*/
lazy val aliyun = (project in file("semantics-aliyun"))
.settings(
common,
name := "atiesh-semantics-aliyun",
libraryDependencies ++= Seq(
"com.aliyun.openservices" % "aliyun-log-producer" % "0.2.0"
exclude("org.slf4j", "slf4j-api") exclude("org.slf4j", "slf4j-log4j12")
exclude("ch.qos.logback", "logback-core") exclude("ch.qos.logback", "logback-classic"),
)
).dependsOn(core)
|
use std::prelude::v1::*;
use teaclave_attestation::verifier;
use teaclave_config::RuntimeConfig;
use teaclave_config::BUILD_CONFIG;
use teaclave_proto::teaclave_access_control_service::*;
use teaclave_rpc::config::SgxTrustedTlsClientConfig;
use teaclave_rpc::endpoint::Endpoint;
use teaclave_types::EnclaveInfo;
pub fn run_tests() -> bool {
use teaclave_test_utils::*;
run_tests!(
test_authorize_data_success,
test_authorize_data_fail,
test_authorize_function_success,
test_authorize_function_fail,
test_authorize_task_success,
test_authorize_task_fail,
test_authorize_staged_task_success,
test_authorize_staged_task_fail,
test_concurrency,
)
}
fn get_client() -> TeaclaveAccessControlClient {
let runtime_config = RuntimeConfig::from_toml("runtime.config.toml").expect("runtime");
let enclave_info =
EnclaveInfo::from_bytes(&runtime_config.audit.enclave_info_bytes.as_ref().unwrap());
let enclave_attr = enclave_info
.get_enclave_attr("teaclave_access_control_service")
.expect("access_control");
let config = SgxTrustedTlsClientConfig::new().attestation_report_verifier(
vec![enclave_attr],
BUILD_CONFIG.as_root_ca_cert,
verifier::universal_quote_verifier,
);
let channel = Endpoint::new(
&runtime_config
.internal_endpoints
.access_control
.advertised_address,
)
.config(config)
.connect()
.unwrap();
TeaclaveAccessControlClient::new(channel).unwrap()
}
fn test_authorize_data_success() {
let mut client = get_client();
let request = AuthorizeDataRequest::new("mock_user_a", "mock_data");
let response_result = client.authorize_data(request);
assert!(response_result.is_ok());
assert!(response_result.unwrap().accept);
}
fn test_authorize_data_fail() {
let mut client = get_client();
let request = AuthorizeDataRequest::new("mock_user_d", "mock_data");
let response_result = client.authorize_data(request);
assert!(response_result.is_ok());
assert!(!response_result.unwrap().accept);
let request = AuthorizeDataRequest::new("mock_user_a", "mock_data_b");
let response_result = client.authorize_data(request);
assert!(response_result.is_ok());
assert!(!response_result.unwrap().accept);
}
fn test_authorize_function_success() {
let mut client = get_client();
let request =
AuthorizeFunctionRequest::new("mock_public_function_owner", "mock_public_function");
let response_result = client.authorize_function(request);
assert!(response_result.is_ok());
assert!(response_result.unwrap().accept);
let request =
AuthorizeFunctionRequest::new("mock_private_function_owner", "mock_private_function");
let response_result = client.authorize_function(request);
assert!(response_result.is_ok());
assert!(response_result.unwrap().accept);
let request =
AuthorizeFunctionRequest::new("mock_private_function_owner", "mock_public_function");
let response_result = client.authorize_function(request);
assert!(response_result.is_ok());
assert!(response_result.unwrap().accept);
}
fn test_authorize_function_fail() {
let mut client = get_client();
let request =
AuthorizeFunctionRequest::new("mock_public_function_owner", "mock_private_function");
let response_result = client.authorize_function(request);
assert!(response_result.is_ok());
assert!(!response_result.unwrap().accept);
}
fn test_authorize_task_success() {
let mut client = get_client();
let request = AuthorizeTaskRequest::new("mock_participant_a", "mock_task");
let response_result = client.authorize_task(request);
assert!(response_result.is_ok());
assert!(response_result.unwrap().accept);
let request = AuthorizeTaskRequest::new("mock_participant_b", "mock_task");
let response_result = client.authorize_task(request);
assert!(response_result.is_ok());
assert!(response_result.unwrap().accept);
}
fn test_authorize_task_fail() {
let mut client = get_client();
let request = AuthorizeTaskRequest::new("mock_participant_c", "mock_task");
let response_result = client.authorize_task(request);
assert!(response_result.is_ok());
assert!(!response_result.unwrap().accept);
}
fn test_authorize_staged_task_success() {
let mut client = get_client();
let request = AuthorizeStagedTaskRequest {
subject_task_id: "mock_staged_task".to_string(),
object_function_id: "mock_staged_allowed_private_function".to_string(),
object_input_data_id_list: vec![
"mock_staged_allowed_data1".to_string(),
"mock_staged_allowed_data2".to_string(),
"mock_staged_allowed_data3".to_string(),
],
object_output_data_id_list: vec![
"mock_staged_allowed_data1".to_string(),
"mock_staged_allowed_data2".to_string(),
"mock_staged_allowed_data3".to_string(),
],
};
let response_result = client.authorize_staged_task(request);
assert!(response_result.is_ok());
assert!(response_result.unwrap().accept);
}
fn test_authorize_staged_task_fail() {
let mut client = get_client();
let request = AuthorizeStagedTaskRequest {
subject_task_id: "mock_staged_task".to_string(),
object_function_id: "mock_staged_disallowed_private_function".to_string(),
object_input_data_id_list: vec![],
object_output_data_id_list: vec![],
};
let response_result = client.authorize_staged_task(request);
assert!(response_result.is_ok());
assert!(!response_result.unwrap().accept);
let request = AuthorizeStagedTaskRequest {
subject_task_id: "mock_staged_task".to_string(),
object_function_id: "mock_staged_allowed_private_function".to_string(),
object_input_data_id_list: vec!["mock_staged_disallowed_data1".to_string()],
object_output_data_id_list: vec![],
};
let response_result = client.authorize_staged_task(request);
assert!(response_result.is_ok());
assert!(!response_result.unwrap().accept);
let request = AuthorizeStagedTaskRequest {
subject_task_id: "mock_staged_task".to_string(),
object_function_id: "mock_staged_allowed_private_function".to_string(),
object_input_data_id_list: vec![],
object_output_data_id_list: vec!["mock_staged_disallowed_data2".to_string()],
};
let response_result = client.authorize_staged_task(request);
assert!(response_result.is_ok());
assert!(!response_result.unwrap().accept);
}
fn test_concurrency() {
let mut thread_pool = Vec::new();
for _i in 0..10 {
let child = std::thread::spawn(move || {
for _j in 0..10 {
test_authorize_data_fail();
test_authorize_function_fail();
test_authorize_task_success();
test_authorize_staged_task_fail();
}
});
thread_pool.push(child);
}
for thr in thread_pool.into_iter() {
assert!(thr.join().is_ok());
}
}
|
# frozen_string_literal: true
require File.expand_path("base", File.dirname(__FILE__))
Daemons::Base.new("auto_cancel").start do
canceler = AutoCanceler.new
canceler.cancel_reservations
sleep 1.minute.to_i
end
|
package planit.planitPages.loginPage;
import org.openqa.selenium.By;
public class LoginPageLocators {
protected String releaseManagerUser = "RMP_bot_testing_Manager";
protected String releaseManagerPassword = "RMP121PMP2000";
By loginField = By.name("login");
By passwordField = By.name("password");
By signInButton = By.name("commit");
}
|
//import "angular";
//import "angular-route";
//import "angular-aria";
//import "angular-animate";
//import "../../deps/angular-material.min.js";
import "../../tmp/templates/templates"
import RouteConfig from "./routes/routeConfig";
import "./constants/constants.module";
import "./persistence/persistence.module";
import "./api/api.module";
import "./files/files.module";
import "./alerts/alerts.module";
import "./login/login.module";
import "./upload/upload.module";
(() => {
"use strict";
angular.module("modwatchuploader", [
"ngRoute",
"modwatchuploader.constants",
"modwatchuploader.template",
"modwatchuploader.persistence",
"modwatchuploader.api",
"modwatchuploader.files",
"modwatchuploader.alerts",
"modwatchuploader.login",
"modwatchuploader.upload"
])
.config(RouteConfig);
})();
|
module Eternity.Foldl.General
where
import Eternity.Prelude hiding (maximum)
import Control.Foldl
import qualified Data.Attoparsec.Text as B
parsing :: B.Parser parsed -> Fold (Either Text parsed) folded -> Fold Text folded
parsing parser =
premap (bimap fromString id . B.parseOnly parser)
ignoringLeft :: Fold right folded -> Fold (Either left right) folded
ignoringLeft (Fold rightProgress rightStart rightStop) =
Fold progress rightStart rightStop
where
progress !state =
\ case
Right right -> rightProgress state right
Left _ -> state
|
# todo-list lab
Project for COMP4711 lab 5, lab 6, lab 7, lab 8, Fall 2017
Start date:
12, Oct, 2017 -- lab5 Todo List (Working With Models & Views)
19, Oct, 2017 -- lab6 Building Out Our TODO List Manager
02, Nov, 2017 -- lab7 Unit Testing for Our TODO List Manager
16, Nov, 2017 -- lab8 A Taste of XML
03, Dec, 2017 -- lab9 RESTful server
Members: Yuheng Song (CAPTAIN), Haihua Tan, Junnan Tang
|
package com.team9.motors.controller;
import java.util.List;
import javax.validation.Valid;
import com.team9.motors.interfacemethods.InventoryInterface;
import com.team9.motors.interfacemethods.ProductInterface;
import com.team9.motors.interfacemethods.StockUsageInventoryInterface;
import com.team9.motors.interfacemethods.SupplierInterface;
import com.team9.motors.model.Inventory;
import com.team9.motors.model.Product;
import com.team9.motors.model.StockUsageInventory;
import com.team9.motors.model.Supplier;
import com.team9.motors.service.InventoryImplementation;
import com.team9.motors.service.ProductImplementation;
import com.team9.motors.service.StockUsageInventoryImplementation;
import com.team9.motors.service.SupplierImplementation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.ui.ModelMap;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.*;
@Controller
@RequestMapping("")
@SessionAttributes("userdetails")
public class SupplierController {
@Autowired
private SupplierInterface sservice;
@Autowired
public void setSupplierInterface(SupplierImplementation simpl) {
this.sservice = simpl;
}
@Autowired
private ProductInterface pservice;
@Autowired
public void setProductService(ProductImplementation pserviceImpl) {
this.pservice = pserviceImpl;
}
@Autowired
private InventoryInterface iservice;
@Autowired
public void setInventoryService(InventoryImplementation iserviceImpl) {
this.iservice = iserviceImpl;
}
@Autowired
private StockUsageInventoryInterface suiservice;
@Autowired
public void setStockUsageInventoryService(StockUsageInventoryImplementation suiserviceImpl) {
this.suiservice = suiserviceImpl;
}
@RequestMapping(value = "/all/supplier/list")
public String listusers(Model model) {
return listByPage(model, 1,"id","asc");
}
@GetMapping("all/supplier/page/{pageNumber}")
public String listByPage(Model model, @PathVariable("pageNumber") int currentPage,
@Param("sortField")String sortField,
@Param("sortDir") String sortDir){
Page<Supplier> page = sservice.listAllSuppliers(currentPage,sortField,sortDir);
long total = page.getTotalElements();
int totalPages = page.getTotalPages();
List<Supplier> list = page.getContent();
model.addAttribute("currentPage", currentPage);
model.addAttribute("total", total);
model.addAttribute("totalPages", totalPages);
model.addAttribute("list", list);
model.addAttribute("sortField", sortField);
model.addAttribute("sortDir", sortDir);
String reverseSortDir=sortDir.equals("asc")?"desc":"asc";
model.addAttribute("reverseSortDir", reverseSortDir);
model.addAttribute("supplierlist", sservice.listAllSuppliers(currentPage,sortField,sortDir));
return "supplierlist";
}
@RequestMapping(value = "/all/supplier/detail/{id}")
public String showSupplier(@PathVariable("id") Integer id, ModelMap model) {
model.addAttribute("supplier", sservice.findSupplierById(id));
model.addAttribute("products", pservice.findBySupplierId(id));
return "showsupplier";
}
@RequestMapping(value = "/admin/supplier/edit/{id}")
public String editForm(@PathVariable("id") Integer id, Model model) {
model.addAttribute("supplier", sservice.findSupplierById(id));
return "EditSupplierDetails";
}
@GetMapping(value = "/admin/supplier/add")
public String addForm(Model model) {
model.addAttribute("supplier", new Supplier());
return "addsupplier";
}
@RequestMapping(value = "/admin/supplier/save")
public String saveSupplier(@ModelAttribute("supplier") @Valid Supplier supplier, BindingResult bindingResult,
Model model) {
if (bindingResult.hasErrors()) {
return "addsupplier";
}
sservice.createSupplier(supplier);
return "forward:/all/supplier/detail/"+supplier.getId();
}
@RequestMapping(value = "/admin/supplier/delete/{id}")
public String deleteSupplier(@PathVariable("id") Integer id) {
Supplier supplier = sservice.findSupplierById(id);
List<Product> supplierProducts = supplier.getProductList();
for (Product p : supplierProducts) {
Inventory productInventory = p.getInventory();
List<StockUsageInventory> inventoryRecords = productInventory.getStockUsageInventory();
for (StockUsageInventory record : inventoryRecords) {
suiservice.deleteStockUsageInventory(record);
}
iservice.deleteInventory(productInventory);
pservice.deleteProduct(p);
}
sservice.deleteSupplier(supplier);
return "forward:/all/supplier/list";
}
}
|
import 'dotenv/config';
export = {
host: process.env.DB_HOST,
username: process.env.POSTGRES_USER,
password: process.env.POSTGRES_PASSWORD,
database: 'discord-bot',
dialect: 'postgres',
logging: false,
pool: { max: 40, min: 2, acquire: 20000, idle: 5000 },
retry: { max: 10 }
};
|
package org.cqfn.save.api
/**
* Authorization data
*
* @property userInformation user source and name, separated by `@`
* @property token
*/
data class Authorization(
val userInformation: String,
val token: String? = null
)
|
#
# Cookbook Name:: bacula-server
# Recipe:: storage
#
# Copyright 2015 Pavel Yudin
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
databag = data_bag_item(node['bacula']['databag_name'], node['bacula']['databag_item'])
package node['bacula']['storage_package'] do
action :install
version node['bacula']['version'] if node['bacula']['version']
end
directory node['bacula']['storage']['volumes_dir'] do
owner node['bacula']['storage']['user']
group node['bacula']['storage']['group']
mode '0700'
action :create
end
template '/etc/sysconfig/bacula-sd' do
source 'bacula-sd.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, 'service[bacula-sd]'
end
template '/etc/bacula/bacula-sd.conf' do
source 'bacula-sd.conf.erb'
owner 'root'
group 'root'
mode '0644'
variables(sd_password: databag['sd_password'])
notifies :restart, 'service[bacula-sd]'
end
service 'bacula-sd' do
supports status: true, restart: true, reload: true
action [:enable, :start]
end
|
# https://github.com/kingreza/quantization
# Create several different smaller models of
# the original model from CoreML
import coremltools
mode_name = "MegaNic50"
model = coremltools.models.MLModel(mode_name+".mlmodel")
functions = ["linear", "linear_lut", "kmeans"]
for function in functions :
for bit in [16,8,7,6,5,4,3,2,1]:
print("processing ",function," on ",bit,".")
lin_quant_model = coremltools.models.neural_network.quantization_utils.quantize_weights(model, bit, function)
lin_quant_model.short_description = str(bit)+" bit per quantized weight, using "+function+"."
lin_quant_model.save(mode_name+"_"+function+"_"+str(bit)+".mlmodel")
|
# mosn.io
Source for mosn.io site <https://mosn.io>.
Powered by [hugo](https://gohugo.io) with [docsy theme](https://github.com/google/docsy).
## Notice
This webiste is built under hugo version v0.55.5-A83256B9C/extended. There may be unknown errors when compiling on other versions.
|
using ClassificationTree.Infraestructure.Base;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Wrapper.Spacy;
using static ClassificationTree.Infraestructure.Base.IFunction;
using static ClassificationTree.Infraestructure.Base.IMoviments;
namespace ClassificationTree.Nodes.Base
{
public abstract class ANodes :INodes
{
protected IList<(function, string)> left = new List<(function, string)>();
protected IList<(function, string)> right = new List<(function, string)>();
public ANodes() {
Result = new List<(function, string)>();
}
public function Function { get; set; }
public IList<(function, string)> Result { get; set; }
public string AntecedentPos { get; set; }
public string AntecedentLemma { get; set; }
public abstract IList<(function, string)> Action(Token token,in IList<Token> sentence);
protected static moviment RelativePosition(Token root, Token tokenRelative,in IList<Token> sentence) {
int rootLoc = sentence.IndexOf(root);
int position = sentence.IndexOf(tokenRelative);
if (position < rootLoc)
return moviment.Left;
if (position > rootLoc)
return moviment.Right;
return moviment.None;
}
}
}
|
/*
Copyright (c) 2005-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#define TBB_PREVIEW_MUTEXES 1
#include "../conformance/conformance_mutex.h"
#include "test_mutex.h"
#include "oneapi/tbb/mutex.h"
#include "oneapi/tbb/rw_mutex.h"
//! \file test_adaptive_mutex.cpp
//! \brief Test for [preview] functionality
//! \brief \ref error_guessing
TEST_CASE("test upgrade/downgrade with rw_mutex") {
test_rwm_upgrade_downgrade<tbb::rw_mutex>();
}
//! \brief \ref error_guessing
TEST_CASE("test mutex with native threads") {
test_with_native_threads::test<tbb::mutex>();
}
//! \brief \ref error_guessing
TEST_CASE("test rw_mutex with native threads") {
test_with_native_threads::test<tbb::rw_mutex>();
test_with_native_threads::test_rw<tbb::rw_mutex>();
}
//! Testing Mutex requirements
//! \brief \ref interface \ref requirement
TEST_CASE("Basic Locable requirement test") {
// BasicLockable
GeneralTest<oneapi::tbb::mutex>("Adaptive Mutex");
GeneralTest<oneapi::tbb::rw_mutex>("Adaptive RW Mutex");
}
//! \brief \ref interface \ref requirement
TEST_CASE("Lockable requirement test") {
// Lockable - single threaded try_acquire operations
TestTryAcquire<oneapi::tbb::mutex>("Adaptive Mutex");
TestTryAcquire<oneapi::tbb::rw_mutex>("Adaptive RW Mutex");
}
//! Testing ReaderWriterMutex requirements
//! \brief \ref interface \ref requirement
TEST_CASE("Shared mutexes (reader/writer) test") {
// General reader writer capabilities + upgrade/downgrade
TestReaderWriterLock<oneapi::tbb::rw_mutex>("Adaptive RW Mutex");
TestRWStateMultipleChange<oneapi::tbb::rw_mutex>("Adaptive RW Mutex");
}
//! Testing ISO C++ Mutex and Shared Mutex requirements.
//! Compatibility with the standard
//! \brief \ref interface \ref requirement
TEST_CASE("ISO interface test") {
GeneralTest<TBB_MutexFromISO_Mutex<oneapi::tbb::mutex> >("ISO Adaprive Mutex");
GeneralTest<TBB_MutexFromISO_Mutex<oneapi::tbb::rw_mutex>>("ISO Adaptive RW Mutex");
TestTryAcquire<TBB_MutexFromISO_Mutex<oneapi::tbb::mutex> >("ISO Adaptive Mutex");
TestTryAcquire<TBB_MutexFromISO_Mutex<oneapi::tbb::rw_mutex>>("ISO Adaptive RW Mutex");
TestTryAcquireReader<TBB_MutexFromISO_Mutex<oneapi::tbb::rw_mutex>>("ISO Adaptive RW Mutex");
TestReaderWriterLock<TBB_MutexFromISO_Mutex<oneapi::tbb::rw_mutex>>("ISO Adaptive RW Mutex");
}
|
package munit
import sbt.testing.Event
// Framework tests needs to be manually added to FrameworkSuite.tests
class FrameworkTest(
val cls: Class[_ <: FunSuite],
val expected: String,
val tags: Set[Tag] = Set.empty,
val format: FrameworkTestFormat = SbtFormat,
val arguments: Array[String] = Array(),
val onEvent: Event => String = _ => ""
)(implicit val location: Location)
sealed abstract class FrameworkTestFormat
case object SbtFormat extends FrameworkTestFormat
case object StdoutFormat extends FrameworkTestFormat
|
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import json
import os
from pants.testutil.pants_run_integration_test import PantsRunIntegrationTest
from pants.util.contextutil import temporary_dir
class TestJvmDependencyUsageIntegration(PantsRunIntegrationTest):
def _run_dep_usage(self, workdir, target, clean_all=False, extra_args=None, cachedir=None):
if cachedir:
config = {'cache.dep-usage.jvm': {'write_to': [cachedir], 'read_from': [cachedir]}}
else:
config = {}
with temporary_dir() as outdir:
outfile = os.path.join(outdir, 'out.json')
args = [
# Enable the on-line equivalent of this check, to confirm consistency.
'--lint-jvm-dep-check-unnecessary-deps=fatal',
'dep-usage',
target,
'--dep-usage-jvm-output-file={}'.format(outfile),
] + (extra_args if extra_args else [])
if clean_all:
args.insert(0, 'clean-all')
# Run, and then parse the report from json.
self.assert_success(self.run_pants_with_workdir(args, workdir, config))
with open(outfile, 'r') as f:
return json.load(f)
def _assert_non_zero_usage(self, dep_usage_json):
for entry in dep_usage_json:
self.assertGreater(entry['max_usage'], 0.0, 'Usage was 0.0 in: `{}`'.format(entry))
def test_dep_usage(self):
target = 'testprojects/src/java/org/pantsbuild/testproject/unicode/main'
with self.temporary_workdir() as workdir:
# Run twice.
run_one = self._run_dep_usage(workdir, target, clean_all=True)
run_two = self._run_dep_usage(workdir, target, clean_all=False)
# Confirm that usage is non-zero, and that the reports match.
self._assert_non_zero_usage(run_two)
self.assertEqual(run_one, run_two)
def test_use_cached_without_cache_not_fail(self):
target = 'testprojects/src/java/org/pantsbuild/testproject/unicode/main'
with self.temporary_workdir() as workdir:
# Should be successful.
run_one = self._run_dep_usage(workdir, target, clean_all=True, extra_args=['--dep-usage-jvm-use-cached'])
run_two = self._run_dep_usage(workdir, target, clean_all=False)
self.assertNotEqual(run_one, run_two)
def test_use_cached_results_should_be_equal_to_direct(self):
target = 'testprojects/src/java/org/pantsbuild/testproject/unicode/main'
with self.temporary_cachedir() as cachedir:
with self.temporary_workdir() as workdir:
run_one = self._run_dep_usage(workdir, target, clean_all=True, cachedir=cachedir)
with self.temporary_workdir() as workdir:
run_two = self._run_dep_usage(workdir, target, clean_all=True, cachedir=cachedir,
extra_args=['--dep-usage-jvm-use-cached'])
# Confirm that usage is non-zero, and that the reports match.
self._assert_non_zero_usage(run_two)
self.assertEqual(run_one, run_two)
def test_no_summary_works(self):
target = 'testprojects/src/java/org/pantsbuild/testproject/unicode/main'
with self.temporary_cachedir() as cachedir, \
self.temporary_workdir() as workdir:
for compiler in ['rsc']:
self._run_dep_usage(workdir, target, clean_all=True, cachedir=cachedir,
extra_args=['--no-dep-usage-jvm-summary', '--jvm-platform-compiler={}'.format(compiler)])
def test_dep_usage_target_with_no_deps(self):
target = 'testprojects/src/java/org/pantsbuild/testproject/nocache'
with self.temporary_cachedir() as cachedir, \
self.temporary_workdir() as workdir:
self._run_dep_usage(workdir, target, clean_all=True, cachedir=cachedir,
extra_args=['--no-dep-usage-jvm-summary'])
|
<?php
/**
*
* Abstract base class for all SMTP-based transports
*
* @author Ashley Schroder (aschroder.com)
* @copyright Copyright (c) 2014 Ashley Schroder
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
class Aschroder_SMTPPro_Model_Transports_Basesmtp {
public function getTransport($storeId) {
$_helper = Mage::helper('smtppro');
$name = $this->getName($storeId);
$email = $this->getEmail($storeId);
$password = $this->getPassword($storeId);
$host = $this->getHost($storeId);
$port = $this->getPort($storeId);
$auth = $this->getAuth($storeId);
$ssl = $this->getSsl($storeId);
$_helper->log("Using $name Transport.");
$config = array();
if ($auth != "none") {
$config['auth'] = $auth;
$config['username'] = $email;
$config['password'] = $password;
}
if ($port) {
$config['port'] = $port;
}
if ($ssl != "none" ) {
$config['ssl'] = $ssl;
}
$emailTransport = new Zend_Mail_Transport_Smtp($host, $config);
return $emailTransport;
}
}
|
package com.anujandankit.foodrunner.adapter
import android.content.Context
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.TextView
import androidx.recyclerview.widget.DefaultItemAnimator
import androidx.recyclerview.widget.LinearLayoutManager
import androidx.recyclerview.widget.RecyclerView
import com.anujandankit.foodrunner.R
import com.anujandankit.foodrunner.model.FoodItem
import com.anujandankit.foodrunner.model.OrderDetails
import java.text.SimpleDateFormat
import java.util.*
import kotlin.collections.ArrayList
class OrderHistoryRecyclerViewAdapter(
val context: Context,
private val orderHistoryList: ArrayList<OrderDetails>
) :
RecyclerView.Adapter<OrderHistoryRecyclerViewAdapter.OrderHistoryViewHolder>() {
class OrderHistoryViewHolder(view: View) : RecyclerView.ViewHolder(view) {
val restaurantName: TextView = view.findViewById(R.id.restaurantName)
val orderPlaced: TextView = view.findViewById(R.id.orderTime)
val orderTotal: TextView = view.findViewById(R.id.totalPrice)
val recyclerResHistory: RecyclerView = view.findViewById(R.id.recyclerResHistoryItems)
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): OrderHistoryViewHolder {
val view = LayoutInflater.from(context)
.inflate(R.layout.recycler_view_order_history_row, parent, false)
return OrderHistoryViewHolder(view)
}
override fun getItemCount(): Int {
return orderHistoryList.size
}
override fun onBindViewHolder(holder: OrderHistoryViewHolder, position: Int) {
val orderHistory = orderHistoryList[position]
holder.orderTotal.text = "₹${orderHistory.totalCost}"
holder.restaurantName.text = orderHistory.restaurantName
holder.orderPlaced.text = formatDate(orderHistory.orderPlacedAt)
setUpRecyclerView(holder.recyclerResHistory, orderHistory)
}
private fun setUpRecyclerView(
recyclerResHistory: RecyclerView,
orderHistoryList: OrderDetails
) {
val foodItemsList = ArrayList<FoodItem>()
for (i in 0 until orderHistoryList.foodItems.length()) {
val foodJson = orderHistoryList.foodItems.getJSONObject(i)
foodItemsList.add(
FoodItem(
foodJson.getString("food_item_id"),
foodJson.getString("name"),
foodJson.getString("cost").toInt()
)
)
}
val cartItemAdapter = CartItemRecyclerViewAdapter(foodItemsList, context)
val mLayoutManager = LinearLayoutManager(context)
recyclerResHistory.layoutManager = mLayoutManager
recyclerResHistory.itemAnimator = DefaultItemAnimator()
recyclerResHistory.adapter = cartItemAdapter
}
private fun formatDate(dateString: String): String? {
val inputFormatter = SimpleDateFormat("dd-MM-yy HH:mm:ss", Locale.ENGLISH)
val date: Date = inputFormatter.parse(dateString) as Date
val outputFormatter = SimpleDateFormat("MMM dd, yyyy hh:mm a", Locale.ENGLISH)
return outputFormatter.format(date)
}
}
|
(function() {
Vue.component("data-center-modal", {
template: Kooboo.getTemplate(
"/_Admin/View/Market/Scripts/components/DataCenterModal.html"
),
props: {
isShow: Boolean, // is-show.sync
available: Array
},
data: function() {
return {
dataCenter: ""
};
},
methods: {
onSave: function() {
var self = this;
if (confirm(Kooboo.text.confirm.changeDataCenter)) {
Kooboo.Organization.updateDataCenter({
datacenter: self.dataCenter
}).then(function(res) {
if (res.success) {
window.info.done(Kooboo.text.info.update.success);
var loc = self.available.find(function(dc) {
return dc.value == self.dataCenter;
});
Kooboo.EventBus.publish("kb/market/datacenter/updated", {
loc: loc.displayName
});
self.onHide();
setTimeout(function() {
window.location.href =
res.model.redirectUrl || Kooboo.Route.User.LoginPage;
}, 300);
}
});
}
},
onHide: function() {
this.dataCenter = "";
this.$emit("update:isShow", false);
}
},
watch: {
isShow: function(val) {
if (!val && this.available && this.available[0]) {
this.dataCenter = this.available[0].value;
}
},
available: function() {
if (this.available && this.available[0]) {
this.dataCenter = this.available[0].value;
}
}
}
});
})();
|
import 'dart:developer';
import 'package:flutter/material.dart';
import 'package:provider/provider.dart';
import 'package:star_citizen_app/Screens/data_screen.dart';
import 'package:star_citizen_app/Screens/widgets/component_selection.dart';
import 'package:star_citizen_app/Screens/widgets/stats_dashboard.dart';
import 'package:star_citizen_app/Services/providers/backdrop_provider.dart';
import 'package:star_citizen_app/Services/providers/content_provider.dart';
import '../constants.dart';
import 'backdrop.dart';
class MobileFramework extends StatefulWidget {
MobileFramework({Key? key}) : super(key: key);
@override
_MobileFrameworkState createState() => _MobileFrameworkState();
}
class _MobileFrameworkState extends State<MobileFramework>
with SingleTickerProviderStateMixin {
late AnimationController controller;
@override
void initState() {
controller = AnimationController(
duration: Duration(milliseconds: 300), value: 1.0, vsync: this);
super.initState();
}
@override
void dispose() {
controller.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
return ChangeNotifierProvider(
create: (context) =>
ContentProvider(controller: controller, velocity: kFlingVelocity),
builder: (context, child) {
ContentProvider contentState = Provider.of<ContentProvider>(context);
return SafeArea(
child: Scaffold(
backgroundColor: Theme.of(context).colorScheme.background,
appBar: AppBar(
title: contentState.pageName == 'Calculator'
? BackdropTitle(
onPress: contentState.toggleBackdropLayerVisibility,
frontTitle: 'Calculator',
backTitle: 'Build',
listenable: controller.view)
: Row(
children: [
SizedBox(
width: 72.0,
child: Icon(Icons.data_usage_sharp),
),
Text(contentState.pageName)
],
),
),
body:
contentState.getCurrentPage(),
endDrawer: NavigationDrawer(),
),
);
});
}
}
class NavigationDrawer extends StatelessWidget {
NavigationDrawer({Key? key}) : super(key: key);
Widget buildMenu(BuildContext context) {
ContentProvider contentState = Provider.of<ContentProvider>(context);
return ListView.builder(
itemCount: contentState.drawerList.length,
itemBuilder: (BuildContext context, int index) {
// String routeName = '/${drawerList[index]}';
if (index == 3 || index == 14) {
return Divider(color: kGreyOnSurface, thickness: 2.0);
} else {
return ListTile(
selected: index == contentState.currentPage,
selectedTileColor: kPrimaryNavyVariant,
leading: Icon(Icons.data_usage_sharp),
title: Text(contentState.drawerList[index]),
onTap: () {
Navigator.pop(context);
contentState.changePage(index);
});
}
});
}
@override
Widget build(BuildContext context) {
ContentProvider contentProvider = Provider.of<ContentProvider>(context);
ThemeData themeData = Theme.of(context);
return Drawer(
child: Column(mainAxisSize: MainAxisSize.max, children: [
DrawerHeader(
margin: EdgeInsets.all(0.0),
decoration:
BoxDecoration(color: Theme.of(context).colorScheme.primary),
child: Column(
mainAxisSize: MainAxisSize.max,
mainAxisAlignment: MainAxisAlignment.spaceAround,
children: [
RichText(
text: TextSpan(
text: '#DPS',
style: themeData.textTheme.headline5!
.copyWith(color: themeData.colorScheme.secondary),
children: [
TextSpan(
text: 'Calculator',
style: themeData.textTheme.headline5),
TextSpan(
text: 'LIVE',
style: themeData.textTheme.headline5!
.copyWith(color: themeData.colorScheme.secondary),
)
]),
),
SwitchListTile(
title: Text('3.13.1-LIVE.7491200'),
value: contentProvider.ptuLive,
activeColor: themeData.colorScheme.secondary,
activeTrackColor:
themeData.colorScheme.secondary.withOpacity(0.5),
onChanged: (bool value) =>
contentProvider.changePTULive(value))
],
)),
Expanded(child: buildMenu(context))
]));
}
}
|
import 'package:arbor/api/responses/base_response.dart';
import 'package:arbor/core/constants/arbor_constants.dart';
import 'package:arbor/core/constants/hive_constants.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter_secure_storage/flutter_secure_storage.dart';
import 'package:hive/hive.dart';
import 'package:shared_preferences/shared_preferences.dart';
import 'package:url_launcher/url_launcher.dart';
class SettingsProvider extends ChangeNotifier {
launchURL({required String url}) async {
await canLaunch(url) ? await launch(url) : throw 'Could not launch $url';
}
Future<BaseResponse> deleteArborData() async {
try {
final prefs = await SharedPreferences.getInstance();
prefs.remove(ArborConstants.IS_FIRST_TIME_USER_KEY);
final FlutterSecureStorage secureStorage = const FlutterSecureStorage();
await secureStorage.deleteAll();
await Hive.deleteBoxFromDisk(HiveConstants.walletBox);
await Hive.deleteBoxFromDisk(HiveConstants.transactionsBox);
return BaseResponse(
success: true,
error:
"Your Arbor data was deleted. Please restart/reinstall the app.");
} catch (error) {
print('Error: ${error.toString()}');
return BaseResponse(
success: false,
error: "We couldn't delete the data. Error: ${error.toString()}");
}
}
}
|
require 'cipisek/model'
module Cipisek
class Response < Model
class User < Cipisek::Model
attributes :userId, :username, :agencyStatus, :walletCredit,
:walletCreditWithWat, :walletVerified, :dayBudgetSum, :accountLimit
end
end
end
|
#!/bin/bash
BACKUP_NAME=cacerts-GA-backup.original
SYS_TRUSTED_CA="${JRE_SECURITY_FOLDER}/${TRUSTED_CA}"
cp -v $SYS_TRUSTED_CA "${JRE_SECURITY_FOLDER}/${BACKUP_NAME}"
cp -v $SYS_TRUSTED_CA "../${JRE_VERSION}/GA/lib/security/cacerts.GA"
|
require 'mongo'
connection = Mongo::MongoClient.new("localhost", 27017)
connection.database_names.each do |name|
if name.start_with? 'm_'
connection[name].collections.each do |collection|
collection.remove {} if collection.name != "system.indexes"
end
end
end
|
package iog
import (
"math"
"runtime"
"sync"
"sync/atomic"
)
type HandlePanic func(rcv interface{})
type IOG struct {
tasksPending *int32
tasksChan chan task
workersPool sync.Pool
waitGroup sync.WaitGroup
firstTimeCallGo sync.Once
cap int
}
func New(cap int) *IOG {
if cap == 0 {
cap = math.MaxInt32
}
wp := &IOG{
tasksPending: new(int32),
cap: cap,
}
wp.workersPool.New = func() interface{} {
if wp.Idle() {
return nil
}
return newWorker(wp)
}
return wp
}
// submit task
// just use panicHandler[0]
func (wp *IOG) Go(fn func(), panicHandler ...func(rcv interface{})) {
wp.firstTimeCallGo.Do(wp.start)
t := task{job: fn}
if panicHandler != nil {
t.panicHandler = panicHandler[0]
}
wp.tasksChan <- t
wp.incrementPending()
wp.waitGroup.Add(1)
}
func (wp *IOG) ForceClose() {
close(wp.tasksChan)
}
func (wp *IOG) Wait() {
wp.waitGroup.Wait()
}
func (wp *IOG) Close() {
wp.ForceClose()
wp.Wait()
}
// return tasksChan number waiting for execute
func (wp *IOG) TaskPending() int32 {
return atomic.LoadInt32(wp.tasksPending)
}
// return idle state
// equal true when tasksPending task = 0
func (wp *IOG) Idle() bool {
return wp.TaskPending() <= 0
}
func (wp *IOG) start() {
var t task
var w *worker
wp.tasksChan = make(chan task, wp.cap)
go func() {
for t = range wp.tasksChan {
w, _ = wp.workersPool.Get().(*worker)
if w == nil {
runtime.Gosched()
} else {
wp.decrementPending()
w.taskChan <- t
}
}
}()
}
func (wp *IOG) taskDone() {
wp.waitGroup.Done()
}
func (wp *IOG) incrementPending() {
atomic.AddInt32(wp.tasksPending, 1)
}
func (wp *IOG) decrementPending() {
atomic.AddInt32(wp.tasksPending, -1)
}
|
import { Image } from '@graphcommerce/image'
import { SxProps, Theme } from '@mui/material'
import { extractImageBackgroundProps } from './extractImageBackgroundProps'
import { ImageBackgroundProps } from './getImageBackgroundProps'
type ImageBackgroundComponentProps = ImageBackgroundProps & {
sx?: SxProps<Theme>
}
export function ImageBackground(props: ImageBackgroundComponentProps) {
const { sx = [] } = props
const [
{
desktopImage,
mobileImage,
backgroundSize,
backgroundRepeat, // @todo implement backgroundRepeat
backgroundPosition,
backgroundAttachment, // @todo implement backgroundAttachment
},
] = extractImageBackgroundProps(props)
const objectFit = backgroundSize as React.CSSProperties['objectFit']
const objectPosition = backgroundPosition as React.CSSProperties['objectPosition']
return (
<>
{desktopImage && (
<Image
src={desktopImage}
layout='fill'
sizes='100vw'
sx={{ objectFit, objectPosition }}
pictureProps={{ sx: [...(Array.isArray(sx) ? sx : [sx])] }}
/>
)}
{mobileImage && (
<Image
src={mobileImage}
layout='fill'
sizes='100vw'
sx={{ objectFit, objectPosition }}
pictureProps={{ sx: [...(Array.isArray(sx) ? sx : [sx])] }}
/>
)}
</>
)
}
|
module M2yFast
require 'savon'
require 'digest/md5'
require 'active_support/core_ext'
class Base
def self.get_client
Savon.client(
wsdl: M2yFast.configuration.wsdl,
log: true,
proxy: M2yFast.configuration.proxy,
log_level: M2yFast.configuration.production? ? :info : :debug,
pretty_print_xml: true,
open_timeout: 15,
read_timeout: 15
)
end
def self.fixie
URI.parse M2yFast.configuration.proxy
end
def self.base_headers
headers = {}
headers['Content-Type'] = 'application/json'
headers
end
def self.soap_post(body)
url = M2yFast.configuration.wsdl.gsub('?wsdl', '')
xml_headers = {}
xml_headers['Content-Type'] = 'text/xml'
xml_headers['charset'] = 'utf-8'
post(url, body, xml_headers)
end
def self.post(url, body, headers = nil)
if headers.nil?
headers = base_headers
end
puts "Sending POST request to URL: #{url}"
puts body
HTTParty.post(url, headers: headers, body: body,
http_proxyaddr: fixie.host,
http_proxyport: fixie.port,
http_proxyuser: fixie.user,
http_proxypass: fixie.password)
end
def self.trace
rand(100000..999999)
end
def self.full_response(parsed_response, request, original_response, endpoint = nil)
parsed_response = { body: parsed_response } if parsed_response.is_a?(Array)
parsed_response = {} unless parsed_response.is_a?(Hash)
parsed_response[:original_response] = original_response.to_json
parsed_response[:original_request] = request
parsed_response[:url] = endpoint.to_s
parsed_response
end
end
end
|
package visualisation.graph
import calculations.CoordinationNeedsMatrixCalculation
import dataProcessor.AssignmentMatrixDataProcessor
import dataProcessor.FileDependencyMatrixDataProcessor
import kotlinx.serialization.encodeToString
import kotlinx.serialization.json.Json
import miners.gitMiners.FileDependencyMatrixMiner
import miners.gitMiners.UserChangedFilesMiner
import util.HeapNStorage
import util.HelpFunctionsUtil
import visualisation.entity.EdgeInfo
import visualisation.entity.EdgeThreeJS
import visualisation.entity.GraphDataThreeJS
import visualisation.entity.NodeThreeJS
import java.io.File
class CoordinationNeedsGraph(
val data: Array<out FloatArray>,
val idToUser: Map<Int, String> = HashMap(),
) :
GraphThreeJS("graphCN.js") {
override fun generateData(size: Int, descending: Boolean): GraphDataThreeJS {
val comparator = if (descending) compareByDescending<EdgeInfo> { it.weight } else compareBy { it.weight }
val edgeStorage = HeapNStorage(size, comparator)
for (i in 0..data.size) {
for (j in i + 1 until data.size) {
val user1 = idToUser[i] ?: "user: $i"
val user2 = idToUser[j] ?: "user: $j"
edgeStorage.add(
EdgeInfo(
user1,
user2,
data[i][j]
)
)
}
}
val nodes = mutableSetOf<NodeThreeJS>()
for (edge in edgeStorage) {
nodes.add(NodeThreeJS(edge.source))
nodes.add(NodeThreeJS(edge.target))
}
return GraphDataThreeJS(
nodes.toList(),
edgeStorage.map {
EdgeThreeJS(
it.source,
it.target,
it.weight,
value = it.weight,
edgeColor(it.weight)
)
})
}
}
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
module Day12 where
import Control.Lens (Lens', makeLenses, over, (^.), _1, _2)
import Data.Monoid (Endo (Endo, appEndo))
import Test.QuickCheck (Property, ioProperty, property)
import Test.QuickCheck.All (quickCheckAll)
data Cardinal = N | S | E | W
deriving (Show, Eq)
data Direction = L | R
deriving (Show, Eq)
data Instr = Move Cardinal Int | Turn Direction Int | Forward Int
deriving (Show, Eq)
data Ferry = Ferry {_ferryPos :: (Int, Int), _waypointDelta :: (Int, Int)}
deriving (Show, Eq)
makeLenses ''Ferry
manhattan :: Ferry -> Int
manhattan f = abs (f ^. ferryPos . _1) + abs (f ^. ferryPos . _2)
type Delta = Ferry -> Ferry
move :: Lens' Ferry (Int, Int) -> Cardinal -> Int -> Delta
move l N i = over (l . _2) (+ i)
move l S i = over (l . _2) (+ (- i))
move l E i = over (l . _1) (+ i)
move l W i = over (l . _1) (+ (- i))
forward :: Int -> Delta
forward i (Ferry (fx, fy) w@(wx, wy)) = Ferry (fx + i * wx, fy + i * wy) w
turn :: Direction -> Int -> Delta
turn d i = (!! div i 90) . iterate once
where
once (Ferry f (wx, wy)) =
case d of
R -> Ferry f (wy, - wx)
L -> Ferry f (- wy, wx)
eval :: Lens' Ferry (Int, Int) -> Instr -> Delta
eval l (Move c i) = move l c i
eval _ (Turn d i) = turn d i
eval _ (Forward i) = forward i
simulate1 :: [Instr] -> Ferry
simulate1 = (`appEndo` Ferry (0, 0) (1, 0)) . foldMap (Endo . eval ferryPos) . reverse
simulate2 :: [Instr] -> Ferry
simulate2 = (`appEndo` Ferry (0, 0) (10, 1)) . foldMap (Endo . eval waypointDelta) . reverse
part1 :: IO ()
part1 = print . manhattan . simulate1 =<< input
part2 :: IO ()
part2 = print . manhattan . simulate2 =<< input
-- Input handling
readInstr :: String -> Instr
readInstr ('N' : xs) = Move N $ read xs
readInstr ('S' : xs) = Move S $ read xs
readInstr ('E' : xs) = Move E $ read xs
readInstr ('W' : xs) = Move W $ read xs
readInstr ('L' : xs) = Turn L $ read xs
readInstr ('R' : xs) = Turn R $ read xs
readInstr ('F' : xs) = Forward $ read xs
readInstr xs = error $ "cannot parse '" ++ xs ++ "' as instruction"
readInstrs :: String -> [Instr]
readInstrs = map readInstr . lines
input :: IO [Instr]
input = readInstrs <$> readFile "data/Day12.txt"
-- Testing
prop_regression :: Property
prop_regression =
ioProperty $
( \xs ->
(manhattan . simulate1) xs == 1186
&& (manhattan . simulate2) xs == 47806
)
<$> input
testInstrs :: [Instr]
testInstrs =
readInstrs
"F10\n\
\N3\n\
\F7\n\
\R90\n\
\F11"
prop_simulate1Unit :: Property
prop_simulate1Unit =
property $
let f = simulate1 testInstrs
in f == Ferry (17, -8) (0, -1)
&& manhattan f == 25
prop_simulate2Unit :: Property
prop_simulate2Unit =
property $
let f = simulate2 testInstrs
in f == Ferry (214, -72) (4, -10)
&& manhattan f == 286
prop_forwardUnit :: Property
prop_forwardUnit =
property $
forward 3 (Ferry (0, 0) (1, 0)) == Ferry (3, 0) (1, 0)
&& forward 2 (Ferry (0, 0) (-5, -1)) == Ferry (-10, -2) (-5, -1)
prop_turnUnit :: Property
prop_turnUnit =
property $
turn R 90 (Ferry (0, 0) (3, 2)) == Ferry (0, 0) (2, -3)
&& turn R 180 (Ferry (0, 0) (3, 2)) == Ferry (0, 0) (-3, -2)
&& turn L 90 (Ferry (0, 0) (3, 2)) == Ferry (0, 0) (-2, 3)
&& turn L 180 (Ferry (0, 0) (3, 2)) == Ferry (0, 0) (-3, -2)
prop_moveUnit :: Property
prop_moveUnit =
property $
move ferryPos N 4 (Ferry (0, 0) (1, 0)) == Ferry (0, 4) (1, 0)
&& move ferryPos W 3 (Ferry (0, 0) (1, 0)) == Ferry (-3, 0) (1, 0)
return []
runTests :: IO Bool
runTests = $quickCheckAll
|
#!/bin/sh
setup_git() {
# Set the user name and email to match the API token holder
# This will make sure the git commits will have the correct photo
# and the user gets the credit for a checkin
git config --global user.email "travis@noreply.spring-media.de"
git config --global user.name "Travis"
git config --global push.default matching
# Get the credentials from a file
git config credential.helper "store --file=.git/credentials"
# This associates the API Key with the account
echo "https://${GITHUB_TOKEN}:@github.com" > .git/credentials
}
make_version() {
# Make sure that the workspace is clean
# It could be "dirty" if
# 1. package-lock.json is not aligned with package.json
# 2. npm install is run
git checkout -- .
# Echo the status to the log so that we can see it is OK
git status
# Run the deploy build and increment the package versions
# %s is the placeholder for the created tag
lerna publish --canary --yes --cd-version patch -m "chore: release version %s"
}
upload_files() {
# This make sure the current work area is pushed to the tip of the current branch
git push origin HEAD:$TRAVIS_BRANCH
# This pushes the new tag
git push --tags
}
setup_git
make_version
upload_files
|
package main
import (
"bytes"
"flag"
"io"
"log"
"net"
"time"
)
var (
proxyAddr = flag.String("proxy-addr", ":8080", "Reverse Proxy Address")
registerAddr = flag.String("register-addr", ":8081", "Register Address")
clientMode = flag.Bool("client", false, "Start client mode")
clientPreConnect = flag.Int("client-preconnect", 2, "Pre-connect connection")
)
func main() {
flag.Parse()
if *clientMode {
go startClientService()
} else {
go startProxyService()
go startRegisterService()
}
select {}
}
var (
connList = make(chan net.Conn) // unbuffered channel, only accept connection when there are any waiting client
)
func startProxyService() {
log.Printf("start proxy service on %s", *proxyAddr)
lis, err := net.Listen("tcp", *proxyAddr)
if err != nil {
log.Fatalf("can not start proxy service; %v", err)
}
defer lis.Close()
h := func(conn net.Conn) {
var srvConn net.Conn
for {
srvConn = <-connList
// check is srvConn timed out
_, err := srvConn.Read([]byte{})
if err == nil {
break
}
log.Printf("connection timed out from %s", srvConn)
}
defer conn.Close()
defer srvConn.Close()
log.Printf("tunneling %s <=> %s", conn.RemoteAddr(), srvConn.RemoteAddr())
done := make(chan struct{})
go func() {
io.Copy(conn, srvConn)
done <- struct{}{}
}()
go func() {
io.Copy(srvConn, conn)
done <- struct{}{}
}()
<-done
}
for {
conn, err := lis.Accept()
if err != nil {
if ne, ok := err.(net.Error); ok && ne.Temporary() {
time.Sleep(5 * time.Millisecond)
continue
}
return
}
log.Printf("received proxy connection from %s", conn.RemoteAddr())
go h(conn)
}
}
func startRegisterService() {
log.Printf("start register service on %s", *registerAddr)
lis, err := net.Listen("tcp", *registerAddr)
if err != nil {
log.Fatalf("can not start register service; %v", err)
}
defer lis.Close()
for {
conn, err := lis.Accept()
if err != nil {
if ne, ok := err.(net.Error); ok && ne.Temporary() {
time.Sleep(5 * time.Millisecond)
continue
}
return
}
log.Printf("received register connection from %s", conn.RemoteAddr())
connList <- conn
}
}
func startClientService() {
log.Printf("start client service")
log.Printf("proxy address: %s", *proxyAddr)
log.Printf("register address=%s", *registerAddr)
if *clientPreConnect <= 0 {
*clientPreConnect = 1
}
sem := make(chan struct{}, *clientPreConnect)
h := func() {
log.Printf("dialing register service...")
regConn, err := net.Dial("tcp", *registerAddr)
if err != nil {
log.Printf("dial error retrying...; err=%v", err)
time.Sleep(2 * time.Second)
<-sem
return
}
defer regConn.Close()
log.Printf("waiting first byte...")
var buf [1]byte
_, err = regConn.Read(buf[:])
if err != nil {
<-sem
return
}
<-sem
log.Printf("dialing proxy...")
proxyConn, err := net.Dial("tcp", *proxyAddr)
if err != nil {
return
}
defer proxyConn.Close()
log.Printf("tunnel %s <=> %s", regConn.RemoteAddr(), proxyConn.RemoteAddr())
io.Copy(proxyConn, bytes.NewReader(buf[:]))
done := make(chan struct{})
go func() {
io.Copy(proxyConn, regConn)
done <- struct{}{}
}()
go func() {
io.Copy(regConn, proxyConn)
done <- struct{}{}
}()
<-done
}
for {
sem <- struct{}{}
go h()
}
}
|
from sklearn.metrics.scorer import _BaseScorer
from xgboostextension.xgbranker import XGBRanker, _preprare_data_in_groups
from xgboostextension.scorer.util import _make_grouped_metric
class RankingScorer(_BaseScorer):
def __init__(self, score_func, sign=1):
"""
Base class for applying scoring functions to ranking problems.
This class transforms a ranking metric into a scoring function
that can be applied to estimations that take a group indicator in
their first column.
Parameters
----------
"""
if not score_func.__module__ == 'xgboostextension.scorer.metrics':
raise ValueError(
'Only score functions included with this package are supported'
)
super(RankingScorer, self).__init__(
_make_grouped_metric(score_func),
sign,
{}
)
self._ungrouped_score_func = score_func
def __call__(self, estimator, X, y, sample_weight=None):
sizes, X_sorted, _, y_sorted, _ = _preprare_data_in_groups(X, y)
y_predicted = estimator.predict(X_sorted)
return self._sign * self._score_func(sizes, y_sorted, y_predicted)
def __repr__(self):
if hasattr(self._ungrouped_score_func, '__name__'):
return "RankingScorer({0})".format(
self._ungrouped_score_func.__name__
)
elif hasattr(self._ungrouped_score_func, '__class__'):
return "RankingScorer({0})".format(
self._ungrouped_score_func.__class__.__name__
)
else:
return "RankingScorer({0})".format('unkown')
|
package be.artisjaap.polyglot.core.model;
import be.artisjaap.common.model.AbstractDocument;
import org.bson.types.ObjectId;
import org.springframework.data.mongodb.core.mapping.Document;
import java.time.LocalDateTime;
@Document(collection = "PolTranslationPractice")
public class TranslationPractice extends AbstractDocument {
private ObjectId translationId;
private ObjectId languagePairId;
private ObjectId userId;
private ProgressStatus progressStatus;
private KnowledgeStatus knowledgeStatus;
private KnowledgeStatus knowledgeStatusReverse;
private Integer knowledgeCounterSuccess;
private Integer knowledgeCounterSuccessReverse;
private Integer knowledgeCounterMiss;
private Integer knowledgeCounterMissReverse;
private LocalDateTime lastSuccess;
private LocalDateTime lastSuccessReverse;
private LocalDateTime lastMiss;
private LocalDateTime lastMissReverse;
private Integer answerChecked;
private Integer answerCheckedReverse;
private TranslationPractice(){}
private TranslationPractice(Builder builder) {
buildCommon(builder);
translationId = builder.translationId;
languagePairId = builder.languagePairId;
userId = builder.userId;
progressStatus = builder.progressStatus;
knowledgeStatus = builder.knowledgeStatus;
knowledgeStatusReverse = builder.knowledgeStatusReverse;
knowledgeCounterSuccess = builder.knowledgeCounterSuccess;
knowledgeCounterSuccessReverse = builder.knowledgeCounterSuccessReverse;
knowledgeCounterMiss = builder.knowledgeCounterMiss;
knowledgeCounterMissReverse = builder.knowledgeCounterMissReverse;
lastSuccess = builder.lastSuccess;
lastSuccessReverse = builder.lastSuccessReverse;
lastMiss = builder.lastMiss;
lastMissReverse = builder.lastMissReverse;
answerChecked = builder.answerChecked;
answerCheckedReverse = builder.answerCheckedReverse;
}
public static Builder newBuilder() {
return new Builder();
}
public ObjectId getTranslationId() {
return translationId;
}
public ObjectId getUserId() {
return userId;
}
public KnowledgeStatus getKnowledgeStatus() {
return knowledgeStatus;
}
public Integer getKnowledgeCounterSuccess() {
return knowledgeCounterSuccess;
}
public Integer getKnowledgeCounterMiss() {
return knowledgeCounterMiss;
}
public LocalDateTime getLastSuccess() {
return lastSuccess;
}
public LocalDateTime getLastMiss() {
return lastMiss;
}
public ObjectId getLanguagePairId() {
return languagePairId;
}
public Integer getAnswerChecked() {
return answerChecked;
}
public KnowledgeStatus getKnowledgeStatusReverse() {
return knowledgeStatusReverse;
}
public Integer getKnowledgeCounterSuccessReverse() {
return knowledgeCounterSuccessReverse;
}
public Integer getKnowledgeCounterMissReverse() {
return knowledgeCounterMissReverse;
}
public LocalDateTime getLastSuccessReverse() {
return lastSuccessReverse;
}
public LocalDateTime getLastMissReverse() {
return lastMissReverse;
}
public Integer getAnswerCheckedReverse() {
return answerCheckedReverse;
}
public ProgressStatus getProgressStatus() {
return progressStatus;
}
public void setProgressStatus(ProgressStatus progressStatus) {
this.progressStatus = progressStatus;
}
public void increaseAnswerChecked(){
answerChecked++;
}
public void increaseAnswerCheckedReverse(){
answerCheckedReverse++;
}
public void answerCorrect() {
knowledgeCounterSuccess++;
lastSuccess = LocalDateTime.now();
}
public void answerCorrectReverse() {
knowledgeCounterSuccessReverse++;
lastSuccessReverse = LocalDateTime.now();
}
public void answerIncorrect() {
knowledgeCounterMiss++;
lastMiss = LocalDateTime.now();
}
public void answerIncorrectReverse() {
knowledgeCounterMissReverse++;
lastMissReverse = LocalDateTime.now();
}
public static final class Builder extends AbstractBuilder<Builder> {
private ObjectId translationId;
private ObjectId userId;
private ProgressStatus progressStatus;
private KnowledgeStatus knowledgeStatus;
private KnowledgeStatus knowledgeStatusReverse;
private ObjectId languagePairId;
private Integer knowledgeCounterSuccess;
private Integer knowledgeCounterSuccessReverse;
private Integer knowledgeCounterMiss;
private Integer knowledgeCounterMissReverse;
private LocalDateTime lastSuccess;
private LocalDateTime lastSuccessReverse;
private LocalDateTime lastMiss;
private LocalDateTime lastMissReverse;
private Integer answerChecked;
private Integer answerCheckedReverse;
private Builder() {
}
public Builder withTranslationId(ObjectId translationId) {
this.translationId = translationId;
return this;
}
public Builder withUserId(ObjectId userId) {
this.userId = userId;
return this;
}
public Builder withProgressStatus(ProgressStatus progressStatus) {
this.progressStatus = progressStatus;
return this;
}
public Builder withKnowledgeStatus(KnowledgeStatus knowledgeStatus) {
this.knowledgeStatus = knowledgeStatus;
return this;
}
public Builder withKnowledgeStatusReverse(KnowledgeStatus knowledgeStatusReverse) {
this.knowledgeStatusReverse = knowledgeStatusReverse;
return this;
}
public Builder withKnowledgeCounterSuccess(Integer knowledgeCounterSuccess) {
this.knowledgeCounterSuccess = knowledgeCounterSuccess;
return this;
}
public Builder withKnowledgeCounterSuccessReverse(Integer knowledgeCounterSuccessReverse) {
this.knowledgeCounterSuccessReverse = knowledgeCounterSuccessReverse;
return this;
}
public Builder withKnowledgeCounterMiss(Integer knowledgeCounterMiss) {
this.knowledgeCounterMiss = knowledgeCounterMiss;
return this;
}
public Builder withKnowledgeCounterMissReverse(Integer knowledgeCounterMissReverse) {
this.knowledgeCounterMissReverse = knowledgeCounterMissReverse;
return this;
}
public Builder withLastSuccess(LocalDateTime lastSuccess) {
this.lastSuccess = lastSuccess;
return this;
}
public Builder withLastSuccessReverse(LocalDateTime lastSuccessReverse) {
this.lastSuccessReverse = lastSuccessReverse;
return this;
}
public Builder withLastMiss(LocalDateTime lastMiss) {
this.lastMiss = lastMiss;
return this;
}
public Builder withLastMissReverse(LocalDateTime lastMissReverse) {
this.lastMissReverse = lastMissReverse;
return this;
}
public Builder withLanguagePairId(ObjectId languagePairId) {
this.languagePairId = languagePairId;
return this;
}
public Builder withAnswerChecked(Integer answerChecked) {
this.answerChecked = answerChecked;
return this;
}
public Builder withAnswerCheckedReverse(Integer answerCheckedReverse) {
this.answerCheckedReverse = answerCheckedReverse;
return this;
}
public TranslationPractice build() {
return new TranslationPractice(this);
}
}
}
|
package typingsSlinky.stormReactDiagrams
import typingsSlinky.stormReactDiagrams.abstractLabelFactoryMod.AbstractLabelFactory
import typingsSlinky.stormReactDiagrams.defaultLabelModelMod.DefaultLabelModel
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
object defaultLabelFactoryMod {
@JSImport("storm-react-diagrams/dist/src/defaults/factories/DefaultLabelFactory", "DefaultLabelFactory")
@js.native
class DefaultLabelFactory () extends AbstractLabelFactory[DefaultLabelModel]
}
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module HaskellCodeExplorer.Types where
import Control.DeepSeq (NFData)
import qualified Data.Aeson as A
import Data.Aeson.Types (Options, defaultOptions, omitNothingFields)
import Data.Generics
( Constr
, Data(..)
, DataType
, Fixity(..)
, constrIndex
, gcast2
, mkConstr
, mkDataType
)
import qualified Data.HashMap.Strict as HM
import Data.Hashable (Hashable)
import qualified Data.IntMap.Strict as IM
import qualified Data.IntervalMap.Strict as IVM
import qualified Data.List as L
import Data.Maybe (fromMaybe, isJust)
import Data.Serialize (Get, Serialize(..))
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Data.Text.Lazy (toStrict)
import qualified Data.Vector as V
import Data.Version (Version(..),showVersion)
import Documentation.Haddock.Types
( DocH(..)
, Example(..)
, Header(..)
, Hyperlink(..)
, Picture(..)
#if MIN_VERSION_GLASGOW_HASKELL(8,4,3,0)
, Table(..)
, TableCell(..)
, TableRow(..)
#endif
)
import GHC.Generics (Generic)
import Prelude hiding (id)
import Text.Blaze.Html.Renderer.Text (renderHtml)
import qualified Text.Blaze.Html5 as Html
import qualified Text.Blaze.Html5.Attributes as Attr
--------------------------------------------------------------------------------
-- Package info
--------------------------------------------------------------------------------
data PackageInfo modInfo = PackageInfo
{ id :: PackageId
, moduleMap :: HM.HashMap HaskellModulePath modInfo
, moduleNameMap :: HM.HashMap HaskellModuleName (HM.HashMap ComponentId HaskellModulePath)
, directoryTree :: DirTree
, externalIdInfoMap :: Trie Char ExternalIdentifierInfo
-- ^ All external identifiers defined in the package
, externalIdOccMap :: HM.HashMap ExternalId (S.Set IdentifierSrcSpan)
-- ^ All occurrences of each external identifier in the package
} deriving (Show, Eq, Generic, Data)
data PackageId = PackageId
{ name :: T.Text
, version :: Data.Version.Version
} deriving (Show, Eq, Ord, Generic, Data)
packageIdToText :: PackageId -> T.Text
packageIdToText (PackageId name version) =
T.concat [name, "-", T.pack $ showVersion version]
packageName :: PackageInfo a -> T.Text
packageName =
(name :: (PackageId -> T.Text)) . (id :: PackageInfo a -> PackageId)
data IdentifierSrcSpan = IdentifierSrcSpan
{ modulePath :: HaskellModulePath
, line :: Int
, startColumn :: Int
, endColumn :: Int
} deriving (Show, Eq, Ord, Generic, Data)
data DirTree
= Dir { name :: FilePath
, contents :: [DirTree] }
| File { name :: FilePath
, path :: FilePath
, isHaskellModule :: Bool }
deriving (Show, Eq, Generic, Data)
newtype ComponentId = ComponentId
{ getComponentId :: T.Text
} deriving (Show, Eq, Ord, Generic, A.ToJSONKey, Data, Hashable)
data ComponentType
= Setup
| Lib
| SubLib T.Text
| FLib T.Text
| Exe T.Text
| Test T.Text
| Bench T.Text
deriving (Show, Eq, Generic, Data)
isLibrary :: ComponentType -> Bool
isLibrary Lib = True
isLibrary (SubLib _) = True
isLibrary (FLib _) = True
isLibrary _ = False
packageInfoBinaryFileName :: FilePath
packageInfoBinaryFileName = "packageInfo"
packageInfoJsonFileName :: FilePath
packageInfoJsonFileName = "packageInfo.json"
defaultOutputDirectoryName :: FilePath
defaultOutputDirectoryName = ".haskell-code-explorer"
--------------------------------------------------------------------------------
-- A simple Trie implementation
--------------------------------------------------------------------------------
data Trie k v = Trie
{ values :: S.Set v
, children :: HM.HashMap k (Trie k v)
} deriving (Show, Eq, Generic, Data)
emptyTrie :: Trie k v
emptyTrie = Trie S.empty HM.empty
insertToTrie ::
(Hashable k, Eq k, Ord v)
=> (v -> S.Set v -> S.Set v)
-> [k]
-> v
-> Trie k v
-> Trie k v
insertToTrie f [] v (Trie vals children) = Trie (f v vals) children
insertToTrie f word@(first:rest) val (Trie vals children) =
case HM.lookup first children of
Just trie ->
Trie vals (HM.insert first (insertToTrie f rest val trie) children)
Nothing ->
insertToTrie f word val (Trie vals (HM.insert first emptyTrie children))
match :: (Hashable k, Eq k, Ord v) => [k] -> Trie k v -> S.Set v
match (first:rest) (Trie _ children) =
maybe S.empty (match rest) (HM.lookup first children)
match [] (Trie val children) =
S.union val $
S.unions
[S.union v $ match [] trie | (_, trie@(Trie v _)) <- HM.toList children]
--------------------------------------------------------------------------------
-- Module info
--------------------------------------------------------------------------------
data ModuleInfo = ModuleInfo
{ id :: HaskellModulePath
, name :: HaskellModuleName
, source :: V.Vector T.Text
-- ^ Source code of the module
, transformation :: SourceCodeTransformation
, exprInfoMap :: ExpressionInfoMap
-- ^ Type of each expression in the module
, idOccMap :: IdentifierOccurrenceMap
-- ^ All occurrences of each identifier in the module
, idInfoMap :: IdentifierInfoMap
-- ^ Information about each identifier in the module
, declarations :: [Declaration]
, definitionSiteMap :: DefinitionSiteMap
-- ^ Definition site of each top-level value, type, and type class instance
, externalIds :: [ExternalIdentifierInfo]
} deriving (Show, Eq, Generic, Data)
type ExpressionInfoMap = IVM.IntervalMap (Int, Int) ExpressionInfo
type IdentifierOccurrenceMap = IM.IntMap [((Int, Int), IdentifierOccurrence)]
type IdentifierInfoMap = HM.HashMap InternalId IdentifierInfo
data DefinitionSiteMap = DefinitionSiteMap
{ values :: HM.HashMap OccName DefinitionSite
, types :: HM.HashMap OccName DefinitionSite
, instances :: HM.HashMap T.Text DefinitionSite
} deriving (Show, Eq, Generic, Data)
data DefinitionSite = DefinitionSite
{ location :: LocationInfo
, documentation :: Maybe HTML
} deriving (Show, Eq, Generic, Data)
type HTML = T.Text
newtype OccName = OccName
{ getOccName :: T.Text
} deriving (Show, Eq, Ord, Generic, A.ToJSONKey, Data, Hashable)
-- | 'CompactModuleInfo' contains a subset of fields of 'ModuleInfo'.
data CompactModuleInfo = CompactModuleInfo
{ id :: HaskellModulePath
, name :: HaskellModuleName
, exprInfoMap :: ExpressionInfoMap
, definitionSiteMap :: DefinitionSiteMap
, source :: V.Vector T.Text
} deriving (Show, Eq, Generic, Data)
haskellPreprocessorExtensions :: [FilePath]
haskellPreprocessorExtensions =
[".hsc", ".chs", ".cpphs", ".gc", ".x", ".y", ".ly"]
toCompactPackageInfo :: PackageInfo ModuleInfo -> PackageInfo CompactModuleInfo
toCompactPackageInfo PackageInfo {..} =
PackageInfo
{ id = id
, moduleMap = HM.map toCompactModuleInfo moduleMap
, moduleNameMap = moduleNameMap
, directoryTree = directoryTree
, externalIdOccMap = externalIdOccMap
, externalIdInfoMap = externalIdInfoMap
}
toCompactModuleInfo :: ModuleInfo -> CompactModuleInfo
toCompactModuleInfo ModuleInfo {..} =
CompactModuleInfo
{ id = id
, name = name
, exprInfoMap = exprInfoMap
, definitionSiteMap = definitionSiteMap
, source = source
}
newtype HaskellModuleName = HaskellModuleName
{ getHaskellModuleName :: T.Text
} deriving (Show, Eq, Ord, Generic, A.ToJSONKey, Data)
newtype HaskellModulePath = HaskellModulePath
{ getHaskellModulePath :: T.Text
} deriving (Show, Eq, Ord, Generic, A.ToJSONKey, Data)
newtype HaskellFilePath = HaskellFilePath
{ getHaskellFilePath :: T.Text
} deriving (Show, Eq, Ord, Generic, A.ToJSONKey, Data)
-- | Haskell identifier (value or type)
data IdentifierInfo = IdentifierInfo
{ sort :: NameSort
, occName :: OccName
, demangledOccName :: T.Text
, nameSpace :: NameSpace
, locationInfo :: LocationInfo
, idType :: Type
, details :: Maybe IdDetails
, doc :: Maybe HTML
, internalId :: InternalId
, externalId :: Maybe ExternalId
, isExported :: Bool
} deriving (Show, Eq, Ord, Generic, Data)
data NameSort
= External
| Internal
deriving (Show, Eq, Ord, Generic, Data)
data NameSpace
= VarName
| DataName
| TvName
| TcClsName
deriving (Show, Eq, Ord, Generic, Data)
data IdDetails
= VanillaId
| RecSelId
| RecSelIdNaughty
| DataConWorkId
| DataConWrapId
| ClassOpId
| PrimOpId
| FCallId
| TickBoxOpId
| DFunId
| CoVarId
| JoinId
deriving (Show, Eq, Ord, Generic, Data)
-- | Each Haskell identifier has an 'InternalId' that is unique within a single module
newtype InternalId = InternalId
{ getInternalId :: T.Text
} deriving (Show, Eq, Ord, Generic, Data, Hashable, A.ToJSONKey)
newtype ExternalId = ExternalId
{ getExternalId :: T.Text
} deriving (Show, Eq, Ord, Generic, Data, Hashable, A.ToJSONKey)
newtype ExternalIdentifierInfo = ExternalIdentifierInfo
{ getIdentifierInfo :: IdentifierInfo
} deriving (Eq, Show, Generic, Data)
instance Ord ExternalIdentifierInfo where
compare (ExternalIdentifierInfo i1) (ExternalIdentifierInfo i2) =
case compare
(T.length . demangledOccName $ i1)
(T.length . demangledOccName $ i2) of
GT -> GT
LT -> LT
EQ ->
case compare (demangledOccName i1) (demangledOccName i2) of
GT -> GT
LT -> LT
EQ ->
compare
(internalId (i1 :: IdentifierInfo))
(internalId (i2 :: IdentifierInfo))
data ExpressionInfo = ExpressionInfo
{ description :: T.Text
, exprType :: Maybe Type
} deriving (Show, Eq, Generic, Data)
-- | Occurrence of an identifier in a source code
data IdentifierOccurrence = IdentifierOccurrence
{ internalId :: Maybe InternalId
, internalIdFromRenamedSource :: Maybe InternalId
, isBinder :: Bool
, instanceResolution :: Maybe InstanceResolution
, idOccType :: Maybe Type
-- ^ Instantiated type of an identifier
, typeArguments :: Maybe [Type]
, description :: T.Text
, sort :: IdentifierOccurrenceSort
} deriving (Show, Eq, Ord, Generic, Data)
data IdentifierOccurrenceSort
= ValueId
| TypeId
| ModuleId LocationInfo
deriving (Show, Eq, Ord, Generic, Data)
data Type = Type
{ components :: [TypeComponent]
, componentsExpanded :: Maybe [TypeComponent]
-- ^ Components of a type with all type synonyms expanded
} deriving (Show, Eq, Ord, Generic, Data)
data TypeComponent
= Text T.Text
| TyCon { internalId :: InternalId
, name :: T.Text }
deriving (Show, Eq, Ord, Generic, Data)
-- | Tree of instances
data InstanceResolution =
Instance
{ name :: T.Text
-- ^ Type of an instance, e.g., "instance Show a => ClassName a"
, instanceType :: Type
, types :: [Type]
-- ^ Types at which type variables of a class are instantiated
, location :: LocationInfo
, instances :: [InstanceResolution]
}
| Stop
deriving (Show,Eq,Ord,Generic,Data)
data SourceCodeTransformation = SourceCodeTransformation
{ totalLines :: Int
, filePath :: HaskellModulePath
, linePragmas :: S.Set LinePragma
, fileIndex :: HM.HashMap HaskellFilePath (S.Set FileLocation)
-- ^ Map from an original filename to its locations in a preprocessed source code
} deriving (Show, Eq, Generic, Data)
-- | Location of a file included by a preprocessor
data FileLocation = FileLocation
{ lineStart :: Int
, lineEnd :: Int
, offset :: Int
-- ^ (line number in a preprocessed file) - (line number in an original file) + 1
} deriving (Show, Eq, Generic, Data)
-- | Line pragma inserted by a preprocessor
data LinePragma = LinePragma
{ filePath :: HaskellFilePath
, lineNumberPreprocessed :: Int
, lineNumberOriginal :: Int
} deriving (Show, Eq, Generic, Data)
fromOriginalLineNumber ::
SourceCodeTransformation -> (HaskellFilePath, Int) -> Either T.Text Int
fromOriginalLineNumber SourceCodeTransformation {linePragmas = pragmas} (_originalFileName, originalLineNumber)
| S.null pragmas = Right originalLineNumber
fromOriginalLineNumber SourceCodeTransformation {fileIndex = index} (originalFileName, originalLineNumber) =
case HM.lookup originalFileName index of
Just set ->
-- lookupGE finds smallest element greater or equal to the given one
case S.lookupGE (FileLocation 1 originalLineNumber 1) set of
Just FileLocation {..} -> Right $ originalLineNumber + offset
Nothing ->
Left $
T.concat
[ "Cannot find "
, T.pack . show $ (originalFileName, originalLineNumber)
, " in "
, T.pack $ show index
]
Nothing ->
Left $
T.concat
[ "Cannot find file "
, T.pack . show $ originalFileName
, " in "
, T.pack $ show index
]
data Declaration = Declaration
{ sort :: DeclarationSort
, name :: T.Text
, declType :: Maybe Type
, isExported :: Bool
, lineNumber :: Int
} deriving (Show, Eq, Ord, Generic, Data)
data DeclarationSort
= TyClD
| InstD
| ValD
| ForD
deriving (Show, Eq, Ord, Generic, Data)
data LocationInfo
= ExactLocation { packageId :: PackageId
, modulePath :: HaskellModulePath
, moduleName :: HaskellModuleName
, startLine :: Int
, endLine :: Int
, startColumn :: Int
, endColumn :: Int }
| ApproximateLocation { packageId :: PackageId
, moduleName :: HaskellModuleName
, entity :: LocatableEntity
, name :: T.Text
, haddockAnchorId :: Maybe T.Text
, componentId :: ComponentId }
| UnknownLocation T.Text
deriving (Show, Eq, Ord, Generic, Data)
data LocatableEntity
= Typ
| Val
| Inst
| Mod
deriving (Show, Eq, Ord, Generic, Data)
--------------------------------------------------------------------------------
-- Instances
--------------------------------------------------------------------------------
deriving instance (Data k) => Data (IVM.Interval k)
instance (Data k, Data v, Eq k, Ord k, Data (IVM.Interval k)) =>
Data (IVM.IntervalMap k v) where
gfoldl f z m = z IVM.fromList `f` IVM.toList m
toConstr _ = fromListConstr
gunfold k z c =
case constrIndex c of
1 -> k (z IVM.fromList)
_ -> error "gunfold"
dataTypeOf _ = intervalMapDataType
dataCast2 = gcast2
fromListConstr :: Constr
fromListConstr = mkConstr intervalMapDataType "fromList" [] Prefix
intervalMapDataType :: DataType
intervalMapDataType = mkDataType "Data.IntervalMap" [fromListConstr]
deriving instance Generic (IVM.Interval k)
instance Hashable HaskellModuleName
instance Serialize HaskellModuleName
instance Hashable HaskellModulePath
instance Serialize HaskellModulePath
instance Hashable HaskellFilePath
instance Serialize HaskellFilePath
instance (Serialize k, Serialize v, Ord k) =>
Serialize (IVM.IntervalMap k v) where
put = put . IVM.toAscList
get = IVM.fromAscList <$> Data.Serialize.get
instance Ord LinePragma where
compare p1 p2 =
compare
(lineNumberPreprocessed (p1 :: LinePragma))
(lineNumberPreprocessed (p2 :: LinePragma))
instance Ord FileLocation where
compare l1 l2 = compare (lineEnd l1) (lineEnd l2)
instance Serialize LinePragma
instance Serialize FileLocation
instance Serialize SourceCodeTransformation
instance Serialize IdentifierInfo
instance Serialize InternalId
instance Serialize ExternalId
instance Serialize ExternalIdentifierInfo where
put (ExternalIdentifierInfo info) = put info
get = ExternalIdentifierInfo <$>(get :: Get IdentifierInfo)
instance Serialize InstanceResolution
instance Serialize OccName
instance Serialize IdDetails
instance Serialize NameSpace
instance Serialize DefinitionSiteMap
instance Serialize DefinitionSite
instance Serialize Declaration
instance Serialize NameSort
instance Serialize DeclarationSort
instance Serialize PackageId
instance Serialize Data.Version.Version
instance Serialize (PackageInfo ModuleInfo)
instance Serialize (PackageInfo CompactModuleInfo)
instance Serialize IdentifierSrcSpan
instance Serialize DirTree
instance Serialize ComponentId
instance Serialize ComponentType
instance Serialize T.Text where
put = put . encodeUtf8
get = decodeUtf8 <$> Data.Serialize.get
instance (Serialize k, Serialize v, Eq k,Hashable k) => Serialize (HM.HashMap k v) where
put = put . HM.toList
get = HM.fromList <$> get
instance Serialize ModuleInfo
instance Serialize CompactModuleInfo
instance (Serialize k) => Serialize (IVM.Interval k)
instance Serialize LocationInfo
instance Serialize IdentifierOccurrence
instance Serialize IdentifierOccurrenceSort
instance Serialize TypeComponent
instance (Serialize a) => Serialize (V.Vector a) where
put = put . V.toList
get = (\l -> V.fromListN (L.length l) l) <$> get
instance Serialize Type
instance Serialize ExpressionInfo
instance Serialize LocatableEntity
instance (Serialize k,Ord k,Serialize v,Ord v,Hashable k) => Serialize (Trie k v)
instance NFData HaskellModuleName
instance NFData HaskellModulePath
instance NFData HaskellFilePath
instance NFData LinePragma
instance NFData FileLocation
instance NFData SourceCodeTransformation
instance NFData IdentifierInfo
instance NFData InternalId
instance NFData ExternalId
instance NFData ExternalIdentifierInfo
instance NFData InstanceResolution
instance NFData IdDetails
instance NFData NameSpace
instance NFData OccName
instance NFData DefinitionSiteMap
instance NFData DefinitionSite
instance NFData Declaration
instance NFData NameSort
instance NFData DeclarationSort
instance NFData PackageId
instance NFData (PackageInfo ModuleInfo)
instance NFData (PackageInfo CompactModuleInfo)
instance NFData IdentifierSrcSpan
instance NFData DirTree
instance NFData ComponentId
instance NFData ComponentType
instance NFData ModuleInfo
instance NFData CompactModuleInfo
instance NFData LocationInfo
instance NFData IdentifierOccurrence
instance NFData IdentifierOccurrenceSort
instance NFData TypeComponent
instance NFData Type
instance NFData ExpressionInfo
instance NFData LocatableEntity
instance (NFData k, Ord k, NFData v, Ord v, Hashable k) =>
NFData (Trie k v)
omitNothingOptions :: Options
omitNothingOptions = defaultOptions {omitNothingFields = True}
instance A.ToJSON (PackageInfo a) where
toJSON PackageInfo {..} =
A.object
[ ("id", A.toJSON $ packageIdToText id)
, ("directoryTree", A.toJSON directoryTree)
, ("modules", A.toJSON . HM.map (const ()) $ moduleMap)
]
instance A.ToJSON ModuleInfo where
toJSON ModuleInfo {..} =
let sourceCodeLines = zip [1 ..] $ V.toList source
tokenizedLines =
L.map
(\(lineNumber, lineText) ->
case IM.lookup lineNumber idOccMap of
Just identifiers -> (lineNumber, tokenize lineText identifiers)
Nothing ->
( lineNumber
, [(lineText, (1, T.length lineText + 1), Nothing)]))
sourceCodeLines
html =
Html.table Html.! Attr.class_ "source-code" $
Html.tbody $ mapM_ (uncurry lineToHtml) tokenizedLines
in A.object
[ ("id", A.toJSON id)
, ("name", A.toJSON name)
, ("sourceCodeHtml", A.toJSON . renderHtml $ html)
, ("identifiers", A.toJSON idInfoMap)
, ("occurrences", A.toJSON $ idOccurrencesHashMap idOccMap)
, ("declarations", A.toJSON declarations)
]
idOccurrencesHashMap ::
IM.IntMap [((Int, Int), IdentifierOccurrence)]
-> HM.HashMap T.Text IdentifierOccurrence
idOccurrencesHashMap =
HM.fromList .
concatMap
(\(lineNum, occs) ->
L.map
(\((startCol, endCol), occ) ->
(occurrenceLocationToText lineNum startCol endCol, occ))
occs) .
IM.toList
idOccurrenceList ::
IM.IntMap [((Int, Int), IdentifierOccurrence)]
-> HM.HashMap T.Text IdentifierOccurrence
idOccurrenceList =
HM.fromList .
concatMap
(\(lineNum, occs) ->
L.map
(\((startCol, endCol), occ) ->
(occurrenceLocationToText lineNum startCol endCol, occ))
occs) .
IM.toList
occurrenceLocationToText :: Int -> Int -> Int -> T.Text
occurrenceLocationToText lineNum startCol endCol =
T.concat
[ T.pack . show $ lineNum
, "-"
, T.pack . show $ startCol
, "-"
, T.pack . show $ endCol
]
lineToHtml :: Int
-> [(T.Text, (Int, Int), Maybe IdentifierOccurrence)]
-> Html.Html
lineToHtml lineNumber tokens =
Html.tr $ do
Html.td Html.! Attr.class_ "line-number" Html.!
Attr.id (Html.textValue . T.append "LN" . T.pack $ show lineNumber) $
Html.toHtml (T.pack $ show lineNumber)
Html.td Html.! Attr.class_ "line-content" Html.!
Html.dataAttribute "line" (Html.textValue $ T.pack . show $ lineNumber) Html.!
Attr.id (Html.textValue . T.append "LC" . T.pack $ show lineNumber) $
mapM_
(\(content, (start, end), mbIdOcc) ->
let addPositionAttrs :: Html.Html -> Html.Html
addPositionAttrs htmlElement =
htmlElement Html.!
Html.dataAttribute
"start"
(Html.textValue $ T.pack . show $ start) Html.!
Html.dataAttribute "end" (Html.textValue $ T.pack . show $ end)
in case mbIdOcc of
Just idOcc ->
addPositionAttrs $
Html.span Html.! Attr.class_ "identifier" Html.!
Attr.id
(Html.textValue .
maybe "" getInternalId . internalIdFromRenamedSource $
idOcc) Html.!
Html.dataAttribute
"occurrence"
(Html.textValue $
occurrenceLocationToText lineNumber start end) Html.!
Html.dataAttribute
"identifier"
(Html.textValue $
maybe "" getInternalId $
internalId (idOcc :: IdentifierOccurrence)) $
Html.toHtml content
Nothing -> addPositionAttrs . Html.span . Html.toHtml $ content)
tokens
tokenize
:: forall a.
T.Text -- ^ Source code
-> [((Int, Int), a)] -- ^ Identifier locations
-- The end position is defined to be the column /after/ the end of the
-- span. That is, a span of (1,1)-(1,2) is one character long, and a
-- span of (1,1)-(1,1) is zero characters long.
-> [(T.Text, (Int, Int), Maybe a)]
tokenize line =
L.reverse .
(\(remainingLine, currentIndex, c) ->
if T.null remainingLine
then c
else (remainingLine, (currentIndex, T.length line + 1), Nothing) : c) .
L.foldl' split (line, 1, [])
where
split ::
(T.Text, Int, [(T.Text, (Int, Int), Maybe a)])
-> ((Int, Int), a)
-> (T.Text, Int, [(T.Text, (Int, Int), Maybe a)])
split (remainingLine, currentIndex, chunks) ((start, end), a)
| start == currentIndex =
let (chunk, remainingLine') = T.splitAt (end - start) remainingLine
chunks' = (chunk, (start, end), Just a) : chunks
in (remainingLine', end, chunks')
| otherwise =
let (chunkNoId, remainingLine') =
T.splitAt (start - currentIndex) remainingLine
(chunk, remainingLine'') = T.splitAt (end - start) remainingLine'
in ( remainingLine''
, end
, (chunk, (start, end), Just a) :
(chunkNoId, (currentIndex, start), Nothing) : chunks)
docToHtml ::
forall mod id.
(mod -> Html.Html)
-> (id -> Html.Html)
-> DocH mod id
-> HTML
docToHtml modToHtml idToHtml = toStrict . renderHtml . toH
where
toH :: DocH mod id -> Html.Html
toH (DocAppend doc1 doc2) = toH doc1 >> toH doc2
toH (DocParagraph doc) = Html.p $ toH doc
toH (DocIdentifier identifier) = Html.span $ idToHtml identifier
toH (DocWarning doc) = Html.div Html.! Attr.class_ "warning" $ toH doc
toH (DocEmphasis doc) = Html.em $ toH doc
toH DocEmpty = mempty
toH (DocBold doc) = Html.b $ toH doc
toH (DocMonospaced doc) =
Html.span Html.! Attr.class_ "source-code-font" $ toH doc
toH (DocUnorderedList docs) = Html.ul $ mapM_ (Html.li . toH) docs
toH (DocOrderedList docs) = Html.ol $ mapM_ (Html.li . toH) docs
toH (DocDefList docs) =
Html.dl $
mapM_ (\(doc1, doc2) -> Html.dt (toH doc1) >> Html.dd (toH doc2)) docs
toH (DocCodeBlock doc) = Html.div Html.! Attr.class_ "source-code" $ toH doc
toH (DocIdentifierUnchecked modName) = modToHtml modName
toH (DocModule str) = Html.span . Html.toHtml . T.pack $ str
toH (DocHyperlink (Hyperlink url mbTitle)) =
Html.a Html.! (Attr.href . Html.textValue . T.pack $ url) $
Html.toHtml $ fromMaybe url mbTitle
toH (DocPic (Picture uri mbTitle)) =
Html.img Html.! (Attr.src . Html.textValue . T.pack $ uri) Html.!
(Attr.title . Html.textValue . T.pack $ fromMaybe "" mbTitle)
toH (DocMathInline str) =
Html.span . Html.toHtml $ T.pack ("\\(" ++ str ++ "\\)")
toH (DocMathDisplay str) =
Html.div . Html.toHtml $ T.pack ("\\[" ++ str ++ "\\]")
toH (DocAName str) =
Html.a Html.! (Attr.id . Html.textValue . T.pack $ str) $ mempty
toH (DocProperty str) =
Html.div Html.! Attr.class_ "source-code" $ Html.toHtml $ T.pack str
toH (DocExamples examples) =
Html.div Html.! Attr.class_ "source-code" $
mapM_
(\(Example expr results) ->
let htmlPrompt = Html.span $ Html.toHtml (">>> " :: String)
htmlExpression = Html.span $ Html.toHtml (expr ++ "\n")
in htmlPrompt >> htmlExpression >>
mapM_ (Html.span . Html.toHtml) (unlines results))
examples
toH (DocString str) = Html.span . Html.toHtml $ T.pack str
toH (DocHeader (Header level doc)) = toHeader level $ toH doc
where
toHeader 1 = Html.h1
toHeader 2 = Html.h2
toHeader 3 = Html.h3
toHeader 4 = Html.h4
toHeader 5 = Html.h5
toHeader _ = Html.h6
#if MIN_VERSION_GLASGOW_HASKELL(8,4,3,0)
toH (DocTable (Table hs bs)) =
let tableRowToH tdOrTh (TableRow cells) =
Html.tr $ mapM_ (tableCellToH tdOrTh) cells
tableCellToH tdOrTh (TableCell colspan rowspan doc) =
(tdOrTh $ toH doc) Html.!?
(colspan /= 1, (Attr.colspan (Html.stringValue $ show colspan))) Html.!?
(rowspan /= 1, (Attr.rowspan (Html.stringValue $ show rowspan)))
in Html.table $
Html.thead (mapM_ (tableRowToH Html.th) hs) >>
Html.tbody (mapM_ (tableRowToH Html.td) bs)
#endif
instance A.ToJSON HaskellModuleName where
toJSON (HaskellModuleName name) = A.String name
instance A.ToJSON HaskellModulePath where
toJSON (HaskellModulePath path) = A.String path
instance A.ToJSON HaskellFilePath where
toJSON (HaskellFilePath path) = A.String path
instance A.ToJSON LinePragma where
toJSON = A.genericToJSON omitNothingOptions
instance A.ToJSON FileLocation where
toJSON = A.genericToJSON omitNothingOptions
instance A.ToJSON IdentifierInfo where
toJSON = A.genericToJSON omitNothingOptions
instance A.ToJSON InternalId where
toJSON (InternalId text) = A.toJSON text
instance A.ToJSON ExternalId where
toJSON (ExternalId text) = A.toJSON text
instance A.ToJSON ExternalIdentifierInfo where
toJSON (ExternalIdentifierInfo info) = A.toJSON info
instance A.ToJSON InstanceResolution where
toJSON (Instance name typ types location instances) =
A.object
[ "name" A..= A.toJSON name
, "types" A..= A.toJSON types
, "location" A..= A.toJSON location
, "instanceType" A..= A.toJSON typ
, "instances" A..=
(A.Array . V.fromList . Prelude.map A.toJSON $ instances)
]
toJSON Stop = A.Null
instance A.ToJSON IdDetails where
toJSON = A.genericToJSON omitNothingOptions
instance A.ToJSON NameSpace where
toJSON = A.genericToJSON omitNothingOptions
instance A.ToJSON Declaration
instance A.ToJSON NameSort
instance A.ToJSON OccName where
toJSON (OccName name) = A.String name
instance A.ToJSON DeclarationSort
instance A.ToJSON PackageId
instance A.ToJSON ComponentId where
toJSON (ComponentId id) = A.toJSON id
instance A.ToJSON ComponentType
instance A.ToJSON LocationInfo where
toJSON = A.genericToJSON omitNothingOptions
instance A.ToJSON LocatableEntity
instance A.ToJSON IdentifierOccurrence where
toJSON IdentifierOccurrence {..} =
A.object $
[("sort", A.toJSON sort)] ++
[("description", A.toJSON description)] ++
[("internalId", A.toJSON internalId) | isJust internalId] ++
[("isBinder", A.toJSON isBinder) | isBinder] ++
[("instanceResolution", A.toJSON instanceResolution) | isJust instanceResolution] ++
[("idOccType", A.toJSON idOccType) | isJust idOccType]
instance A.ToJSON IdentifierOccurrenceSort
instance A.ToJSON TypeComponent where
toJSON = A.genericToJSON omitNothingOptions
instance A.ToJSON Type where
toJSON = A.genericToJSON omitNothingOptions
instance A.ToJSON ExpressionInfo where
toJSON = A.genericToJSON omitNothingOptions
instance A.ToJSON DirTree
instance A.ToJSON DefinitionSite where
toJSON = A.genericToJSON omitNothingOptions
instance A.ToJSON IdentifierSrcSpan
instance A.ToJSON (IVM.Interval (Int, Int)) where
toJSON (IVM.IntervalCO a b) = intervalToValue a b
toJSON (IVM.ClosedInterval a b) = intervalToValue a b
toJSON (IVM.OpenInterval a b) = intervalToValue a b
toJSON (IVM.IntervalOC a b) = intervalToValue a b
intervalToValue :: (Int, Int) -> (Int, Int) -> A.Value
intervalToValue (l1, c1) (l2, c2) =
A.object
[ ("start", A.object [("line", A.toJSON l1), ("column", A.toJSON c1)])
, ("end", A.object [("line", A.toJSON l2), ("column", A.toJSON c2)])
]
data SourceCodePreprocessing
= AfterPreprocessing
| BeforePreprocessing
deriving (Show, Eq)
data Log
= StdOut
| ToFile FilePath
deriving (Show, Eq)
|
#!/bin/sh
####################################################################################
# If not stated otherwise in this file or this component's Licenses.txt file the
# following copyright and licenses apply:
# Copyright 2018 RDK Management
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#######################################################################################
UTOPIA_PATH="/etc/utopia/service.d"
TAD_PATH="/usr/ccsp/tad"
source $TAD_PATH/corrective_action.sh
source $UTOPIA_PATH/log_env_var.sh
exec 3>&1 4>&2 >>$SELFHEALFILE 2>&1
killall PsmSsp
killall self_heal_connectivity_test.sh
killall resource_monitor.sh
if [ -f "/tmp/logagent_initialized" ]
then
rm -rf /tmp/logagent_initialized
fi
killall log_agent
if [ -f "/tmp/pam_initialized" ]
then
rm -rf /tmp/pam_initialized
fi
killall CcspPandMSsp
if [ -f "/tmp/Notify_initialized" ]
then
rm -rf /tmp/Notify_initialized
fi
killall notify_comp
killall CcspHomeSecurity
if [ -f "/tmp/cm_initialized" ]
then
rm -rf /tmp/cm_initialized
fi
killall CcspCMAgentSsp
if [ -f "/tmp/moca_initialized" ]
then
rm -rf /tmp/moca_initialized
fi
killall CcspMoCA
killall CcspTr069PaSsp
killall CcspTandDSsp
if [ -f "/tmp/webpa_initialized" ]
then
rm -rf /tmp/webpa_initialized
fi
killall webpa
killall CcspSafeNAT
if [ -f "/tmp/lmlite_initialized" ]
then
rm -rf /tmp/lmlite_initialized
fi
killall CcspLMLite
killall CcspXdnsSsp
if [ -f "/tmp/mta_initialized" ]
then
rm -rf /tmp/mta_initialized
fi
killall CcspMtaAgentSsp
killall CcspHotspot
export LD_LIBRARY_PATH=$PWD:.:$PWD/../../lib:$PWD/../../.:/lib:/usr/lib:$LD_LIBRARY_PATH
export DBUS_SYSTEM_BUS_ADDRESS=unix:path=/var/run/dbus/system_bus_socket
BINPATH="/usr/bin"
source /etc/utopia/service.d/log_env_var.sh
source /etc/device.properties
export LOG4C_RCPATH=/rdklogger
ulimit -c unlimited
if [ -f /tmp/cp_subsys_ert ]; then
Subsys="eRT."
elif [ -e ./cp_subsys_emg ]; then
Subsys="eMG."
else
Subsys=""
fi
keepalive_args="-n `sysevent get wan_ifname` -e 1"
echo "RDKB_SELFHEAL : Resetting process PsmSsp on atom reset"
if [ "x"$Subsys = "x" ];then
$BINPATH/PsmSsp
else
echo "$BINPATH/PsmSsp -subsys $Subsys"
$BINPATH/PsmSsp -subsys $Subsys
fi
sleep 20
#notify-comp
echo "RDKB_SELFHEAL : Resetting process notify-comp on atom reset"
cd /usr/ccsp/notify-comp
if [ "x"$Subsys = "x" ];then
$BINPATH/notify_comp
else
$BINPATH/notify_comp -subsys $Subsys
fi
# PandM restart
echo "RDKB_SELFHEAL : Resetting process CcspPandMSsp on atom reset"
cd /usr/ccsp/pam/
if [ "x"$Subsys = "x" ];then
$BINPATH/CcspPandMSsp
else
$BINPATH/CcspPandMSsp -subsys $Subsys
fi
# We need to check whether to enable captive portal flag
sleep 15
count=0
MAX_COUNT=20
while :
do
if [ -f "/tmp/pam_initialized" ]
then
echo "PandM initialization complete after atom reset, initializing other CCSP processes"
break
else
echo "Waiting for PandM initialization to complete"
count=$((count+1))
sleep 10
if [ "$count" -ge "$MAX_COUNT" ]
then
echo "PandM initialization reaches MAX time out, breaking the loop"
break
fi
fi
done
checkCaptivePortal
sleep 3
#CcspCMAgentSsp
echo "RDKB_SELFHEAL : Resetting process CcspCMAgentSsp on atom reset"
cd /usr/ccsp/cm
if [ "x"$Subsys = "x" ];then
$BINPATH/CcspCMAgentSsp
else
$BINPATH/CcspCMAgentSsp -subsys $Subsys
fi
sleep 3
echo "RDKB_SELFHEAL : Resetting process CcspMoCA on atom reset"
cd /usr/ccsp/moca
if [ "x"$Subsys = "x" ];then
$BINPATH/CcspMoCA
else
$BINPATH/CcspMoCA -subsys $Subsys
fi
sleep 3
#mta
if [ "$MODEL_NUM" = "DPC3939B" ] || [ "$MODEL_NUM" = "DPC3941B" ]; then
echo_t "Disabling MTA for BWG "
else
echo "[`getDateTime`] RDKB_SELFHEAL : Resetting process CcspMtaAgentSsp on atom reset"
cd /usr/ccsp/mta
if [ "x"$Subsys = "x" ];then
$BINPATH/CcspMtaAgentSsp
else
$BINPATH/CcspMtaAgentSsp -subsys $Subsys
fi
fi
sleep 3
echo "[`getDateTime`] RDKB_SELFHEAL : Resetting process webpa on atom reset"
cd /usr/ccsp/webpa
if [ "x"$Subsys = "x" ];then
$BINPATH/webpa
else
$BINPATH/webpa -subsys $Subsys
fi
sleep 3
#td
echo "[`getDateTime`] RDKB_SELFHEAL : Resetting process CcspTandDSsp on atom reset"
cd /usr/ccsp/tad
if [ "x"$Subsys = "x" ];then
$BINPATH/CcspTandDSsp
else
$BINPATH/CcspTandDSsp -subsys $Subsys
fi
sleep 3
#safenet
echo "[`getDateTime`] RDKB_SELFHEAL : Resetting process CcspSafeNAT on atom reset"
cd /usr/ccsp/ccsp-safenat-broadband
if [ "x"$Subsys = "x" ];then
$BINPATH/CcspSafeNAT
else
$BINPATH/CcspSafeNAT -subsys $Subsys
fi
sleep 3
#CcspLMLite
echo "[`getDateTime`] RDKB_SELFHEAL : Resetting process CcspLMLite on atom reset"
cd /usr/ccsp/lm
$BINPATH/CcspLMLite -subsys $Subsys
#CcspXdnsSsp
echo "[`getDateTime`] RDKB_SELFHEAL : Resetting process CcspXdnsSsp on atom reset"
cd /usr/ccsp/xdns
$BINPATH/CcspXdnsSsp -subsys $Subsys
xfinityenable=`psmcli get dmsb.hotspot.enable`
if [ $xfinityenable -eq 1 ];then
#CcspHotspot
echo "[`getDateTime`] RDKB_SELFHEAL : Resetting process CcspHotspot on atom reset"
cd /usr/ccsp/hotspot
$BINPATH/CcspHotspot -subsys $Subsys > /dev/null &
cd -
fi
sleep 2
#CcspHomeSecurity
if [ "$MODEL_NUM" = "DPC3939B" ] || [ "$MODEL_NUM" = "DPC3941B" ]; then
echo_t "Disabling CcpsHomeSecurity for BWG "
else
echo "[`getDateTime`] RDKB_SELFHEAL : Resetting process CcspHomeSecurity on atom reset"
CcspHomeSecurity 8081&
sleep 5
fi
#CcspTr069PaSsp
if [ "$MODEL_NUM" = "DPC3939B" ] || [ "$MODEL_NUM" = "DPC3941B" ]; then
echo_t "Disabling TR069Pa for BWG "
else
enable_TR69_Binary=`syscfg get EnableTR69Binary`
if [ "" = "$enable_TR69_Binary" ] || [ "true" = "$enable_TR69_Binary" ]; then
echo "[`getDateTime`] RDKB_SELFHEAL : Resetting process CcspTr069PaSsp on atom reset"
cd /usr/ccsp/tr069pa
if [ "x"$Subsys = "x" ]; then
$BINPATH/CcspTr069PaSsp
else
$BINPATH/CcspTr069PaSsp -subsys $Subsys
fi
fi
fi
sleep 2
echo "RDKB_SELFHEAL : Resetting process log_agent on atom reset"
cd /usr/ccsp/logagent
if [ "x"$Subsys = "x" ];then
$BINPATH/log_agent
else
echo "$BINPATH/log_agent -subsys $Subsys"
$BINPATH/log_agent -subsys $Subsys
fi
echo "Enable RFC feature from ccsp restart"
# Enable RFC feature
if [ -f /lib/rdk/rfc.service ]; then
/bin/sh /lib/rdk/rfc.service &
fi
|
<?php
namespace Aternos\Codex\Test\Tests\Log\File;
use Aternos\Codex\Log\File\StringLogFile;
use PHPUnit\Framework\TestCase;
class StringLogFileTest extends TestCase
{
public function testGetContent(): void
{
$content = uniqid();
$logFile = new StringLogFile($content);
$this->assertEquals($content, $logFile->getContent());
}
}
|
#!/bin/bash
INITIAL_DIR=$PWD
cd "${0%/*}"
mkdir Temporary
cd Temporary
wget -O download.tar.gz "https://github.com/Kitware/CMake/releases/download/v3.22.1/cmake-3.22.1-linux-x86_64.tar.gz"
tar -xf download.tar.gz
rm download.tar.gz
mv cmake-3.22.1-linux-x86_64 CMake
wget -O download.tar.gz "https://github.com/Kitware/VTK/archive/refs/tags/v9.1.0.tar.gz"
tar -xf download.tar.gz
rm download.tar.gz
mv VTK-9.1.0 VTK
cd VTK
mkdir Build
mkdir Install
cd Build
../../CMake/bin/cmake .. -DVTK_GROUP_ENABLE_Rendering=NO
../../CMake/bin/cmake --build . --config Release --parallel 32
../../CMake/bin/cmake --install . --prefix ../Install
cd ..
mkdir -p ../../Source/ThirdParty/VTK/Public && cp -a ./Install/include/vtk-9.1/. "$_"
mkdir -p ../../Intermediate/ThirdParty/VTK/Linux && cp -a ./Install/lib/. "$_"
mkdir -p ../../Binaries/ThirdParty/VTK/Linux && cp -a ./Install/bin/. "$_"
cd ../..
rm -rf Temporary
cd %INITIAL_DIR%
|
use path::math::Point;
use path::{Path, PathSlice};
use path::builder::{Build, FlatPathBuilder};
use path::PathEvent;
use svg;
pub type Polygons = Vec<Vec<Point>>;
pub fn path_to_polygons(path: PathSlice) -> Polygons {
let mut polygons = Vec::new();
let mut poly = Vec::new();
for evt in path {
match evt {
PathEvent::MoveTo(to) => {
if poly.len() > 0 {
polygons.push(poly);
}
poly = vec![to];
}
PathEvent::Line(segment) => {
poly.push(segment.to);
}
PathEvent::Close(..) => {
if !poly.is_empty() {
polygons.push(poly);
}
poly = Vec::new();
}
_ => {
println!(" -- path_to_polygons: warning! Unsupported event type {:?}", evt);
}
}
}
return polygons;
}
pub fn polygons_to_path(polygons: &Polygons) -> Path {
let mut builder = Path::builder().flattened(0.05);
for poly in polygons.iter() {
builder.move_to(poly[0]);
for i in 1..poly.len() {
builder.line_to(poly[i]);
}
builder.close();
}
return builder.build();
}
pub fn find_reduced_test_case<F: Fn(Path) -> bool + panic::UnwindSafe + panic::RefUnwindSafe>
(
path: PathSlice,
cb: &F,
) -> Path {
let mut polygons = path_to_polygons(path);
println!(" -- removing sub-paths...");
polygons = find_reduced_test_case_sp(polygons, cb);
println!(" -- removing vertices...");
for p in 0..polygons.len() {
let mut v = 0;
loop {
if v >= polygons[p].len() || polygons[p].len() <= 3 {
break;
}
let mut cloned = polygons.clone();
cloned[p].remove(v);
let path = polygons_to_path(&cloned);
let failed = panic::catch_unwind(|| cb(path)).unwrap_or(true);
if failed {
polygons = cloned;
continue;
}
v += 1;
}
}
let mut svg_path = svg::path_utils::PathSerializer::new();
println!(" ----------- reduced test case: -----------\n\n");
println!("#[test]");
println!("fn reduced_test_case() {{");
println!(" let mut builder = Path::builder();\n");
for p in 0..polygons.len() {
let pos = polygons[p][0];
println!(" builder.move_to(point({}, {}));", pos.x, pos.y);
svg_path.move_to(pos);
for v in 1..polygons[p].len() {
let pos = polygons[p][v];
println!(" builder.line_to(point({}, {}));", pos.x, pos.y);
svg_path.line_to(pos);
}
println!(" builder.close();\n");
svg_path.close();
}
println!(" test_path(builder.build().as_slice());\n");
println!(" // SVG path syntax:");
println!(" // \"{}\"", svg_path.build());
println!("}}\n\n");
return polygons_to_path(&polygons);
}
use std::panic;
fn find_reduced_test_case_sp<F>(mut polygons: Polygons, cb: &F) -> Polygons
where
F: Fn(Path) -> bool + panic::UnwindSafe + panic::RefUnwindSafe
{
let mut i = 0;
loop {
if i >= polygons.len() {
return polygons;
}
let mut cloned = polygons.clone();
cloned.remove(i);
let path = polygons_to_path(&cloned);
let failed = panic::catch_unwind(|| cb(path)).unwrap_or(true);
if failed {
polygons = cloned;
continue;
}
i += 1;
}
}
|
// Copyright 2021 Ivanov Arkadiy
#include <gtest/gtest.h>
#include <iostream>
#include "./vector_alternations.h"
#include <gtest-mpi-listener.hpp>
TEST(seq_and_par_check_equivalence, two_elems_at_the_borders_vec_size_200) {
int procRank = 0;
const int vecLen = 200;
int* vec = nullptr;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
std::fill_n(vec, vecLen, 1); vec[0] = vec[vecLen - 1] = -1;
}
int globalAlternations = parallelCount(vec, vecLen);
if (procRank == 0) {
int referenceAlternations = sequentialCount_V1(vec, vecLen);
delete[] vec;
ASSERT_EQ(referenceAlternations, globalAlternations);
}
}
TEST(seq_and_par_check_equivalence, one_elem_in_the_middle_vec_size_200) {
int procRank = 0;
const int vecLen = 200;
int* vec = nullptr;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
std::fill_n(vec, vecLen, 1); vec[100] = -1;
}
int globalAlternations = parallelCount(vec, vecLen);
if (procRank == 0) {
int referenceAlternations = sequentialCount_V1(vec, vecLen);
delete[] vec;
ASSERT_EQ(referenceAlternations, globalAlternations);
}
}
TEST(seq_and_par_check_equivalence, negative_vec_size_200) {
int procRank = 0;
const int vecLen = 200;
int* vec = nullptr;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
std::fill_n(vec, vecLen, -1);
}
int globalAlternations = parallelCount(vec, vecLen);
if (procRank == 0) {
int referenceAlternations = sequentialCount_V1(vec, vecLen);
delete[] vec;
ASSERT_EQ(referenceAlternations, globalAlternations);
}
}
TEST(seq_and_par_check_equivalence, positive_vec_size_200) {
int procRank = 0;
const int vecLen = 200;
int* vec = nullptr;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
std::fill_n(vec, vecLen, 1);
}
int globalAlternations = parallelCount(vec, vecLen);
if (procRank == 0) {
int referenceAlternations = sequentialCount_V1(vec, vecLen);
delete[] vec;
ASSERT_EQ(referenceAlternations, globalAlternations);
}
}
TEST(seq_and_par_check_equivalence, rng_vec_size_100000) {
int procRank = 0;
const int vecLen = 100000;
int* vec = nullptr;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
fillVecWithRandValues(vec, vecLen);
}
int globalAlternations = parallelCount(vec, vecLen);
if (procRank == 0) {
int referenceAlternations = sequentialCount_V1(vec, vecLen);
delete[] vec;
ASSERT_EQ(referenceAlternations, globalAlternations);
}
}
TEST(seq_and_par_check_equivalence, rng_vec_size_110) {
int procRank = 0;
const int vecLen = 110;
int* vec = nullptr;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
fillVecWithRandValues(vec, vecLen);
}
int globalAlternations = parallelCount(vec, vecLen);
if (procRank == 0) {
int referenceAlternations = sequentialCount_V1(vec, vecLen);
delete[] vec;
ASSERT_EQ(referenceAlternations, globalAlternations);
}
}
TEST(seq_and_par_check_equivalence, rng_vec_size_11) {
int procRank = 0;
const int vecLen = 11;
int* vec = nullptr;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
fillVecWithRandValues(vec, vecLen);
}
int globalAlternations = parallelCount(vec, vecLen);
if (procRank == 0) {
int referenceAlternations = sequentialCount_V1(vec, vecLen);
delete[] vec;
ASSERT_EQ(referenceAlternations, globalAlternations);
}
}
TEST(seq_and_par_check_equivalence, rng_vec_size_8) {
int procRank = 0;
const int vecLen = 8;
int* vec = nullptr;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
fillVecWithRandValues(vec, vecLen);
}
int globalAlternations = parallelCount(vec, vecLen);
if (procRank == 0) {
int referenceAlternations = sequentialCount_V1(vec, vecLen);
delete[] vec;
ASSERT_EQ(referenceAlternations, globalAlternations);
}
}
TEST(equivalense_of_sequential_count, two_elems_at_the_borders) {
int procRank = 0;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
const int vecLen = 1000;
int* vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
bool ok = true;
std::fill_n(vec, vecLen, 1);
vec[0] = vec[vecLen - 1] = -1;
int sc_v1 = sequentialCount_V1(vec, vecLen);
int sc_v2 = sequentialCount_V2(vec, vecLen);
if (sc_v1 != sc_v2 || sc_v1 != 2)
ok = false;
delete[] vec;
ASSERT_TRUE(ok);
}
}
TEST(equivalense_of_sequential_count, one_elem_in_the_middle) {
int procRank = 0;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
const int vecLen = 1000;
int* vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
bool ok = true;
std::fill_n(vec, vecLen, 1);
vec[400] = -1;
int sc_v1 = sequentialCount_V1(vec, vecLen);
int sc_v2 = sequentialCount_V2(vec, vecLen);
if (sc_v1 != sc_v2 || sc_v1 != 2)
ok = false;
delete[] vec;
ASSERT_TRUE(ok);
}
}
TEST(equivalense_of_sequential_count, on_negative_vect) {
int procRank = 0;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
const int vecLen = 1000;
int* vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
std::fill_n(vec, vecLen, -1);
int sc_v1 = sequentialCount_V1(vec, vecLen);
int sc_v2 = sequentialCount_V2(vec, vecLen);
delete[] vec;
ASSERT_EQ(sc_v1, sc_v2);
}
}
TEST(equivalense_of_sequential_count, on_positive_vect) {
int procRank = 0;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
const int vecLen = 1000;
int* vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
std::fill_n(vec, vecLen, 1);
int sc_v1 = sequentialCount_V1(vec, vecLen);
int sc_v2 = sequentialCount_V2(vec, vecLen);
delete[] vec;
ASSERT_EQ(sc_v1, sc_v2);
}
}
TEST(equivalense_of_sequential_count, on_rand_vect) {
int procRank = 0;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
const int vecLen = 1000;
int *vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
fillVecWithRandValues(vec, vecLen);
int sc_v1 = sequentialCount_V1(vec, vecLen);
int sc_v2 = sequentialCount_V2(vec, vecLen);
delete[] vec;
ASSERT_EQ(sc_v1, sc_v2);
}
}
TEST(RNG_function_check, does_not_contain_0) {
int procRank = 0;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
const int vecLen = 1000;
const int numOfIterations = 10;
bool ok = true;
int* vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
for (int i = 0; i < numOfIterations; i++) {
fillVecWithRandValues(vec, vecLen);
for (int j = 0; j < vecLen; j++) {
if (vec[j] == 0) {
ok = false;
break;
}
}
if (!ok) break;
}
delete[] vec;
ASSERT_TRUE(ok);
}
}
TEST(RNG_function_check, generate_positive_and_negative_numbers) {
int procRank = 0;
MPI_Comm_rank(MPI_COMM_WORLD, &procRank);
if (procRank == 0) {
const int vecLen = 30;
const int numOfIterations = 30;
bool hasPositive, hasNegative;
bool ok = true;
int* vec = new int[vecLen];
if (vec == nullptr) {
std::cout << MIE << std::endl;
throw MIE;
}
for (int i = 0; i < numOfIterations; i++) {
hasPositive = hasNegative = false;
fillVecWithRandValues(vec, vecLen);
for (int j = 0; j < vecLen; j++) {
if (vec[j] > 0) hasPositive = true;
if (vec[j] < 0) hasNegative = true;
if (hasPositive && hasNegative)
break;
}
if (!hasPositive || !hasNegative)
ok = false;
if (!ok)
break;
}
delete[] vec;
ASSERT_TRUE(ok);
}
}
int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
MPI_Init(&argc, &argv); // check for MPI_SUCCESS?
::testing::AddGlobalTestEnvironment(new GTestMPIListener::MPIEnvironment);
::testing::TestEventListeners& listeners = ::testing::UnitTest::GetInstance()->listeners();
listeners.Release(listeners.default_result_printer());
listeners.Release(listeners.default_xml_generator());
listeners.Append(new GTestMPIListener::MPIMinimalistPrinter);
return RUN_ALL_TESTS();
}
|
def login(email, password)
visit root_path
click_link "Войти"
fill_in :user_email, with: email
fill_in :user_password, with: password
click_button "Log in"
end
|
# laravel-formrequest-singleton
Use Laravel's excellent FormRequest as Singleton
## Installation
* run `composer require lextira/laravel-formrequest-singleton`
* open `config/app.php` in your project
* replace `Illuminate\Foundation\Providers\FoundationServiceProvider::class`
with `Lextira\FormRequestSingleton\FoundationServiceProvider::class`
* done!
## Usage
All classes, which extend `Illuminate\Foundation\Http\FormRequest` are now instantiated as singleton.
This brings the following benefits:
* Changes done to the request by `prepareForValidation()` are applied only once,
even if the FormRequest is used multiple times.
* The request validation is run only once, therefore especially database queries run only once.
|
<?php
namespace Biskuit\View\Helper;
use Biskuit\View\View;
class DataHelper implements HelperInterface
{
/**
* @var array
*/
protected $data = [];
/**
* Encode <, >, ', &, and " for RFC4627-compliant JSON, which may also be embedded into HTML.
* 15 === JSON_HEX_TAG | JSON_HEX_APOS | JSON_HEX_AMP | JSON_HEX_QUOT
*
* @var int
*/
protected $encodingOptions = 15;
/**
* {@inheritdoc}
*/
public function register(View $view)
{
$view->on('head', function ($event) use ($view) {
$view->trigger('data', [$this]);
$event->addResult($this->render());
}, 10);
}
/**
* Add shortcut.
*
* @see add()
*/
public function __invoke($name, $value)
{
$this->add($name, $value);
}
/**
* Gets the data values or a value by name.
*
* @param null|string $name
* @return array
*/
public function get($name = null)
{
if ($name === null) {
return $this->data;
}
return isset($this->data[$name]) ? $this->data[$name] : null;
}
/**
* Adds a data value to an existing key name.
*
* @param string $name
* @param mixed $value
* @return self
*/
public function add($name, $value)
{
if (isset($this->data[$name]) && is_array($this->data[$name])) {
$value = array_replace_recursive($this->data[$name], $value);
}
$this->data[$name] = $value;
}
/**
* Renders the data tags.
*
* @return string
*/
public function render()
{
$output = '';
foreach ($this->data as $name => $value) {
$output .= sprintf(" <script>var %s = %s;</script>\n", $name, json_encode($value, $this->encodingOptions));
}
return $output;
}
/**
* {@inheritdoc}
*/
public function getName()
{
return 'data';
}
}
|
module FastdfsClient
extend ActiveSupport::Concern
included do
delegate :upload, to: self.class
end
module ClassMethods
# option = Rails.application.secrets.fastdfs
# puts "option = #{Rails.application.secrets}"
@@tracker = Fastdfs::Client::Tracker.new(trackers: {host: "49.232.151.122", port: 22122})
def fdfs_upload(file)
@@tracker.upload(file)
end
def fdfs_delete(image)
@@tracker.delete(image.path, image.group)
end
end
end
|
@Target(AnnotationTarget.VALUE_PARAMETER, AnnotationTarget.TYPE)
annotation class A0
@Target(AnnotationTarget.VALUE_PARAMETER, AnnotationTarget.TYPE)
annotation class A1
@Target(AnnotationTarget.VALUE_PARAMETER, AnnotationTarget.TYPE)
annotation class A2
@Target(AnnotationTarget.VALUE_PARAMETER, AnnotationTarget.TYPE)
annotation class A3
@Target(AnnotationTarget.VALUE_PARAMETER, AnnotationTarget.TYPE)
annotation class A4
@Target(AnnotationTarget.VALUE_PARAMETER, AnnotationTarget.TYPE)
annotation class A5
@Target(AnnotationTarget.VALUE_PARAMETER, AnnotationTarget.TYPE)
annotation class A6
interface P<T, K>
class X
class Y
class klass {
fun annotatedMethod(x: @A0 P<@A1 X, P<@A2 @A3 X, @A4 Y>>, y: Array<@A5 Y>): @A6 X {
return ""
}
}
|
package tifmo
import dcstree.WordBase
import mylib.res.en.EnStopWords
package main.en {
case class EnWord(lemma: String, mypos: String, ner: String, isSingleton: Boolean) extends WordBase {
override def toString = lemma.replaceAll("[^a-zA-Z0-9]", "_") + "_" + mypos
def isStopWord = (ner == "O" || ner == "NUMBER") && EnStopWords.isStopWord(lemma)
def isNamedEntity = !Set("O", "NUMBER", "DATE", "TIME").contains(ner)
}
}
|
var videoId = $(".video-js").attr('id');
var player = videojs(videoId);
var videoList = JSON.parse($("#videoList").val());
player.playlist(videoList); //load the playlist into the video player
var video = videoList[0]; // load the first video
$(".video-details").find("#video-title").text(video.name);
$(".video-details").find("#video-views").text(video.views);
$(".video-details").find("#video-since").text(video.sinceCreated);
$(".video-details").find("#video-description").text(video.description);
//send the 'video load'
$(".video-details").find("#clicktrack_link").attr('data-video-id', video.id).trigger('click');
$("#video_id").val( video.id );
// Initialize the playlist-ui plugin with no option (i.e. the defaults).
player.playlistUi();
player.playlist.autoadvance(0);
player.on('loadedmetadata', function() {
var duration = player.duration();
var index = player.playlist.currentItem();
var video = videoList[index];
trackVideo(duration, video.id); //start the timer to track the watch
});
player.on('playlistitem', function() {
var index = player.playlist.currentItem();
var duration = player.duration();
var video = videoList[index];
$(".video-details").find("#video-title").text(video.name);
$(".video-details").find("#video-views").text(video.views);
$(".video-details").find("#video-since").text(video.sinceCreated);
$(".video-details").find("#video-description").text(video.description);
//send the video load
$(".video-details").find("#clicktrack_link").attr('data-video-id', video.id).trigger('click');
});
|
; RUN: llc -march=r600 -mcpu=redwood -verify-machineinstrs < %s
declare float @llvm.AMDGPU.dp4(<4 x float>, <4 x float>) nounwind readnone
define void @test_dp4(float addrspace(1)* %out, <4 x float> addrspace(1)* %a, <4 x float> addrspace(1)* %b) nounwind {
%src0 = load <4 x float>, <4 x float> addrspace(1)* %a, align 16
%src1 = load <4 x float>, <4 x float> addrspace(1)* %b, align 16
%dp4 = call float @llvm.AMDGPU.dp4(<4 x float> %src0, <4 x float> %src1) nounwind readnone
store float %dp4, float addrspace(1)* %out, align 4
ret void
}
|
#!/usr/bin/env bash
#
# Helper functions for running processes
# jaagr <c@rlberg.se>
#
include utils/log/defer.sh
include utils/spinner.sh
function proc::wait
{
local pid="$1" ; shift
local outbuf
local -i num=0
ansi::extend_buffer
log::defer "${1:-waiting for pid} $(ansi::colorize "31" "$pid") $(ansi::save_position)"
while [ -d "/proc/${pid}" ]; do
ansi::restore_position
spinner::get outbuf num "spin_11"
echo -e "]──${outbuf} "
sleep 0.15
done
ansi::restore_position
log::defer::success "Process finished"
}
function proc::run_and_wait
{
"$SHELL" -c "$1" &
proc::wait "$!"
}
|
/*
* Stream Cipher
* (C) 1999-2010 Jack Lloyd
*
* Distributed under the terms of the Botan license
*/
#include <botan/stream_cipher.h>
namespace Botan {
void StreamCipher::set_iv(const byte[], size_t iv_len)
{
if(iv_len)
throw Invalid_Argument("The stream cipher " + name() +
" does not support resyncronization");
}
bool StreamCipher::valid_iv_length(size_t iv_len) const
{
return (iv_len == 0);
}
}
|
export interface RPCResponse {
data: any
error: any
result: any
id: number
}
|
import numpy as np
import matplotlib.pyplot as plt
from load_binary_files import training_ims, training_labels
from init_functions import p_matrix, w_connectivity
from corrup import distort_binnary
# Select quantity of data to use
N_data_total = len(training_labels)
percentage = 0.05
N_to_use = int(percentage * N_data_total)
# Decide how much data to use
X = training_ims[0:N_to_use]
Y = training_labels[0:N_to_use]
# First we need to calculate the probabilities w and beta
p_vector = np.mean(X, axis=0)
p_matrix = p_matrix(X)
beta = p_vector
beta[beta == 0] = 1.0 / N_to_use ** 2
w = w_connectivity(p_vector, p_matrix, N_to_use)
# Now we create a distorted pattern
# Now we need to update the pattern
label = 2
percentage = 0.1
dis_pattern = distort_binnary(label, percentage, X)
log_beta = np.log(beta)
G = 1.1
iterations = 1000
for t in range(iterations):
s = log_beta + np.log(np.dot(w, o))
o = np.exp(G * s) / np.sum(np.exp(G * s))
plt.subplot(1, 3, 1)
plt.imshow(X[pattern].reshape((28, 28)))
plt.title('Original')
plt.subplot(1, 3, 2)
plt.imshow(o.reshape((28, 28)))
plt.title('O')
plt.subplot(1, 3, 3)
plt.imshow(s.reshape((28, 28)))
plt.title('S')
plt.show()
|
// eslint-disable-next-line import/no-extraneous-dependencies
import { ToastOptions } from '@ionic/core';
import Observable from 'zen-observable';
/*
export interface ToastOptions {
header?: string;
message?: string;
cssClass?: string | string[];
duration?: number;
buttons?: (ToastButton | string)[];
showCloseButton?: boolean;
closeButtonText?: string;
position?: 'top' | 'bottom' | 'middle';
translucent?: boolean;
animated?: boolean;
color?: Color;
mode?: Mode;
keyboardClose?: boolean;
id?: string;
enterAnimation?: AnimationBuilder;
leaveAnimation?: AnimationBuilder;
}
export interface ToastButton {
text?: string;
icon?: string;
side?: 'start' | 'end';
role?: 'cancel' | string;
cssClass?: string | string[];
handler?: () => boolean | void | Promise<boolean>;
}
*/
class ToastService {
private observable: Observable<ToastOptions>;
private subscriber!: ZenObservable.SubscriptionObserver<ToastOptions>;
constructor() {
this.observable = new Observable<ToastOptions>(subscriber => {
this.subscriber = subscriber;
});
}
public ERROR_DEFAULT_TIMEOUT: number | undefined = 3000;
public onNextToast = (onNext: (value: ToastOptions) => void) => {
return this.observable.subscribe(onNext);
};
public showInfo = (message: string, duration?: number) => {
this.showToast({
message,
duration,
color: 'primary',
buttons: [{ text: 'Close', role: 'cancel' }],
});
};
public showError = (error: string | Error, timeout?: number) => {
const message = typeof error === 'string' ? error : error.message;
const duration = timeout != null ? timeout : this.ERROR_DEFAULT_TIMEOUT;
this.showToast({
message,
duration,
color: 'danger',
buttons: [{ text: 'Close', role: 'cancel' }],
});
};
public showToast = (options: ToastOptions) => {
this.subscriber.next(options);
};
}
export default new ToastService();
|
package api
import (
"bytes"
"encoding/json"
"math/rand"
"net/http"
"time"
"github.com/Cocos-BCX/cocos-go/logging"
"github.com/juju/errors"
"github.com/pquerna/ffjson/ffjson"
)
//RPCClient allows you to access wallett functions
type RPCClient interface {
CallAPI(method string, args ...interface{}) (*json.RawMessage, error)
Close() error
Connect() error
}
type rpcClient struct {
*http.Client
*ffjson.Encoder
*ffjson.Decoder
encBuf *bytes.Buffer
endpointURL string
timeout int
}
func (p *rpcClient) Connect() error {
p.Client = &http.Client{
Timeout: 10 * time.Second,
}
p.encBuf = new(bytes.Buffer)
p.Encoder = ffjson.NewEncoder(p.encBuf)
p.Decoder = ffjson.NewDecoder()
return nil
}
func (p *rpcClient) Close() error {
return nil
}
func (p *rpcClient) CallAPI(method string, args ...interface{}) (*json.RawMessage, error) {
req := rpcRequest{
Method: method,
ID: uint64(rand.Uint64()),
Params: args,
}
if err := p.Encode(req); err != nil {
return nil, errors.Annotate(err, "Encode")
}
logging.DDumpJSON("rpc req >", req)
r, err := http.NewRequest("POST", p.endpointURL, p.encBuf)
if err != nil {
return nil, errors.Annotate(err, "NewRequest")
}
r.Close = true
r.Header.Set("Content-Type", "application/json")
r.Header.Set("Accept", "application/json")
resp, err := p.Do(r)
if err != nil {
return nil, errors.Annotate(err, "do request")
}
defer resp.Body.Close()
var ret rpcResponseString
if err := p.DecodeReader(resp.Body, &ret); err != nil {
return nil, errors.Annotate(err, "Decode")
}
if ret.Error != nil {
return nil, ret.Error
}
logging.DDumpJSON("rpc resp <", ret.Result)
return ret.Result, nil
}
//NewRPCClient creates a new RPC Client
func NewRPCClient(rpcEndpointURL string) RPCClient {
cli := rpcClient{
endpointURL: rpcEndpointURL,
}
return &cli
}
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.areaCircunferencia = void 0;
const PI = 3.1416;
const areaCircunferencia = (raio) => {
return PI * Math.pow(raio, 2);
};
exports.areaCircunferencia = areaCircunferencia;
//# sourceMappingURL=Circunferencia.js.map
|
/**
* Copyright (c) Microsoft Corporation.
* Licensed under the MIT License.
*
* @format
*/
import * as crypto from 'crypto';
/**
* Hash content into the form expected in a manifest entry.
*/
export function hashContent(str: string) {
const hasher = crypto.createHash('sha1');
const normalizedStr = str.replace(/(?<!\r)\n/g, '\r\n');
hasher.update(normalizedStr);
return hasher.digest('hex');
}
|
Sequel.migration do
change do
create_table(:comments) do
primary_key :id, :type=>:Bignum
column :post_id, "bigint", :null=>false
column :body, "character varying", :null=>false
column :author_name, "character varying", :null=>false
column :created_at, "timestamp(6) without time zone", :null=>false
column :updated_at, "timestamp(6) without time zone", :null=>false
end
create_table(:schema_migrations) do
column :filename, "text", :null=>false
primary_key [:filename]
end
end
end
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Xunit.Internal;
using Xunit.Sdk;
namespace Xunit.v3
{
/// <summary>
/// A reusable implementation of <see cref="_ITestFrameworkExecutor"/> which contains the basic behavior
/// for running tests.
/// </summary>
/// <typeparam name="TTestCase">The type of the test case used by the test framework. Must
/// derive from <see cref="_ITestCase"/>.</typeparam>
public abstract class TestFrameworkExecutor<TTestCase> : _ITestFrameworkExecutor, IAsyncDisposable
where TTestCase : _ITestCase
{
_IReflectionAssemblyInfo assemblyInfo;
bool disposed;
/// <summary>
/// Initializes a new instance of the <see cref="TestFrameworkExecutor{TTestCase}"/> class.
/// </summary>
/// <param name="assemblyInfo">The test assembly.</param>
protected TestFrameworkExecutor(_IReflectionAssemblyInfo assemblyInfo)
{
this.assemblyInfo = Guard.ArgumentNotNull(assemblyInfo);
}
/// <summary>
/// Gets the assembly information of the assembly under test.
/// </summary>
protected _IReflectionAssemblyInfo AssemblyInfo
{
get => assemblyInfo;
set => assemblyInfo = Guard.ArgumentNotNull(value, nameof(AssemblyInfo));
}
/// <summary>
/// Gets the disposal tracker for the test framework discoverer.
/// </summary>
protected DisposalTracker DisposalTracker { get; } = new();
/// <summary>
/// Override to create a test framework discoverer that can be used to discover
/// tests when the user asks to run all test.
/// </summary>
/// <returns>The test framework discoverer</returns>
protected abstract _ITestFrameworkDiscoverer CreateDiscoverer();
/// <inheritdoc/>
public virtual ValueTask DisposeAsync()
{
if (disposed)
throw new ObjectDisposedException(GetType().FullName);
disposed = true;
return DisposalTracker.DisposeAsync();
}
/// <inheritdoc/>
public ValueTask RunAll(
_IMessageSink executionMessageSink,
_ITestFrameworkDiscoveryOptions discoveryOptions,
_ITestFrameworkExecutionOptions executionOptions)
{
Guard.ArgumentNotNull(executionMessageSink);
Guard.ArgumentNotNull(discoveryOptions);
Guard.ArgumentNotNull(executionOptions);
var tcs = new TaskCompletionSource<object?>();
ThreadPool.QueueUserWorkItem(async _ =>
{
try
{
await using var tracker = new DisposalTracker();
var discoverer = CreateDiscoverer();
tracker.Add(discoverer);
var testCases = new List<TTestCase>();
await discoverer.Find(
testCase => { testCases.Add((TTestCase)testCase); return new(true); },
discoveryOptions
);
using (new PreserveWorkingFolder(AssemblyInfo))
using (new CultureOverride(executionOptions.Culture()))
await RunTestCases(testCases, executionMessageSink, executionOptions);
tcs.SetResult(null);
}
catch (Exception ex)
{
tcs.SetException(ex);
}
});
return new(tcs.Task);
}
/// <inheritdoc/>
public abstract ValueTask RunTestCases(
IReadOnlyCollection<TTestCase> testCases,
_IMessageSink executionMessageSink,
_ITestFrameworkExecutionOptions executionOptions
);
ValueTask _ITestFrameworkExecutor.RunTestCases(
IReadOnlyCollection<_ITestCase> testCases,
_IMessageSink executionMessageSink,
_ITestFrameworkExecutionOptions executionOptions)
{
Guard.ArgumentNotNull(testCases);
Guard.ArgumentNotNull(executionMessageSink);
Guard.ArgumentNotNull(executionOptions);
var tcs = new TaskCompletionSource<object?>();
ThreadPool.QueueUserWorkItem(async _ =>
{
try
{
using (new PreserveWorkingFolder(AssemblyInfo))
using (new CultureOverride(executionOptions.Culture()))
await RunTestCases(testCases.Cast<TTestCase>().CastOrToReadOnlyCollection(), executionMessageSink, executionOptions);
tcs.SetResult(null);
}
catch (Exception ex)
{
tcs.SetException(ex);
}
});
return new(tcs.Task);
}
}
}
|
require 'vagrant'
require 'vagrant-ansible/provisioner'
Vagrant.provisioners.register(:ansible, Vagrant::Provisioners::Ansible)
|
#pragma once
#include <vector>
#include "base/Mesh.hpp"
class IRenderingDevice;
class Material;
class IProgram;
class ResourceManager {
public:
static ResourceManager& instance();
bool init(IRenderingDevice& renderingDevice);
Material* createMaterial(const char* programName);
Mesh* createMesh(Mesh::Primitive primitive);
Mesh* createMesh(const char* fileName);
private:
IRenderingDevice* _renderingDevice;
std::vector<Material*> _materials;
std::vector<Mesh*> _meshes;
};
|
using System.Collections.Generic;
using System.Threading.Tasks;
using LMSEntities.DataTransferObjects;
using LMSEntities.Helpers;
using LMSEntities.Models;
namespace LMSContracts.Interfaces
{
public interface IAuthorService
{
Task<AuthorDto> AddAuthor(AuthorDto authorDto);
Task<LmsResponseHandler<AuthorDto>> DeleteAuthor(int authorId);
Task<LmsResponseHandler<AuthorDto>> EditAuthor(AuthorDto authorDto);
Task<LmsResponseHandler<AuthorDto>> GetAuthorForController(int authorId);
Task<PagedList<AuthorDto>> GetPaginatedAuthors(PaginationParams paginationParams);
}
}
|
package com.plop.bankingkotlin.buildingBlocks
import kotlin.reflect.KClass
class EventDispatcher(eventHandlers: List<EventHandler<out DomainEvent>>) : EventBusMiddleware {
private var registeredCommandHandlers: Map<KClass<out DomainEvent>, List<EventHandler<out DomainEvent>>> = mapOf()
init {
registeredCommandHandlers = eventHandlers.groupBy { it.isAssignedTo() }
}
override fun <E : DomainEvent> dispatch(event: E) {
registeredCommandHandlers[event::class]
?.map { castHandler<E>(it) }
?.forEach { it.handle(event) }
}
@Suppress("UNCHECKED_CAST")
private fun <E : DomainEvent> castHandler(it: EventHandler<out DomainEvent>) = it as EventHandler<E>
}
|
# frozen_string_literal: true
module Formatters
# Used to help format arrays of database responses for numeric variables.
class NumericFormatter < DomainFormatter
def raw_response(response, shared_responses = domain_options)
domain_option = shared_responses.find { |option| option.value == response }
if domain_option
domain_option.value
elsif response.blank?
response
else
Float(response)
end
rescue
response
end
end
end
|
package com.guru.composecookbook.tiktok.components.home
import android.content.Context
import android.net.Uri
import androidx.compose.runtime.Composable
import androidx.compose.runtime.DisposableEffect
import androidx.compose.runtime.remember
import androidx.compose.ui.viewinterop.AndroidView
import com.google.android.exoplayer2.C
import com.google.android.exoplayer2.Player
import com.google.android.exoplayer2.SimpleExoPlayer
import com.google.android.exoplayer2.source.ProgressiveMediaSource
import com.google.android.exoplayer2.ui.AspectRatioFrameLayout
import com.google.android.exoplayer2.ui.PlayerView
import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory
@Composable
fun TikTokPlayer(context: Context, url: String, selected: Boolean) {
val tiktokPlayer = remember {
SimpleExoPlayer.Builder(context)
.build()
.apply {
val mediaSource = ProgressiveMediaSource.Factory(
DefaultDataSourceFactory(context, "composeCookBook")
)
.createMediaSource(Uri.parse("asset:///${url}"))
this.prepare(mediaSource)
}
}
tiktokPlayer.videoScalingMode = C.VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING
tiktokPlayer.repeatMode = Player.REPEAT_MODE_ONE
AndroidView({
PlayerView(it).apply {
useController = false
player = tiktokPlayer
resizeMode = AspectRatioFrameLayout.RESIZE_MODE_ZOOM
}
})
tiktokPlayer.playWhenReady = selected
DisposableEffect(key1 = url) {
onDispose {
tiktokPlayer.release()
}
}
}
|
# BiomeEdit
An experimental Forge mod for setting biome values over coordinate areas.
## Command Usage
- `/blist` prints list of all available biome names.
- `/bget` displays biome name for the block at your current location.
- `/bget <x> <z>` displays biome name for the block at (x,z).
- `/bset <x1> <z1> <x2> <z2> <biomename>` sets the specified biome to all blocks from (x1,z1) to (x2,z2).
|
package offchainreporting
import (
"context"
"database/sql"
sqlds "github.com/ipfs/go-ds-sql"
pgqueries "github.com/ipfs/go-ds-sql/postgres"
p2ppeerstore "github.com/libp2p/go-libp2p-core/peerstore"
p2ppeerstoreds "github.com/libp2p/go-libp2p-peerstore/pstoreds"
)
const tableName = "p2p_peerstore"
// NewPeerstore creates a new database-backed peerstore
// NOTE: You can get sql.DB from store with store.DB.DB()
func NewPeerstore(ctx context.Context, db *sql.DB) (p2ppeerstore.Peerstore, error) {
queries := pgqueries.NewQueries(tableName)
datastore := sqlds.NewDatastore(db, queries)
opts := p2ppeerstoreds.DefaultOpts()
return p2ppeerstoreds.NewPeerstore(ctx, datastore, opts)
}
|
; ModuleID = '/home/david/src/c-semantics/tests/cil/test7.c'
target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
target triple = "x86_64-unknown-linux-gnu"
@y = common global i8 0, align 1
@x = common global i32 0, align 4
@.str = private unnamed_addr constant [23 x i8] c"hello world - x is %d\0A\00", align 1
define i32 @main(i32 %argc, i8** %argv) nounwind uwtable {
entry:
%retval = alloca i32, align 4
%argc.addr = alloca i32, align 4
%argv.addr = alloca i8**, align 8
store i32 0, i32* %retval
store i32 %argc, i32* %argc.addr, align 4
store i8** %argv, i8*** %argv.addr, align 8
store i8 7, i8* @y, align 1
%0 = load i8* @y, align 1
%conv = sext i8 %0 to i32
store i32 %conv, i32* @x, align 4
%1 = load i32* %argc.addr, align 4
%tobool = icmp ne i32 %1, 0
br i1 %tobool, label %if.then, label %if.end
if.then: ; preds = %entry
%2 = load i32* @x, align 4
%sub = sub nsw i32 0, %2
store i32 %sub, i32* @x, align 4
br label %if.end
if.end: ; preds = %if.then, %entry
%3 = load i32* @x, align 4
%call = call i32 (i8*, ...)* @printf(i8* getelementptr inbounds ([23 x i8]* @.str, i32 0, i32 0), i32 %3)
ret i32 0
}
declare i32 @printf(i8*, ...)
|
# frozen_string_literal: true
require "flow/base/atomic_boolean"
require "flow/errors"
module Flow
module Base
#
# TODO: Document Signaller class
#
class Signaller
#
# @param target [Object]
# Target object to receive asynchronous signal handler callbacks as the
# pending signals queue is processed. Generally also the submitter of
# pending signals. Required if *any* signals are provided by name,
# rather than as a map from symbol to proc callback.
#
# @param signals [Array<Symbol, Hash{Symbol => Proc}>]
# Names of signals to handle, assuming a convention that the target's
# signal handler functions are the prefix "do_" followed by the signal
# name. For exceptions to this rule, just provide a Hash mapping signal
# names to their callbacks.
#
# @param num_per_run [Integer]
# How many signals the main loop will process in one run before
# rescheduling itself to run again.
#
# @param runner [Runner]
# How we'll run the signaller's main loop (usually asynchronously).
#
def initialize(
target:,
signals:,
runner: Thread.method(:new),
num_per_run: 2,
on_error: Logger.new($stderr).method(:error)
)
# Error handling is solely via `on_error`
@on_error = on_error
# Signals and their mapping to callbacks (usually methods on the target)
@callbacks_by_signal = setup_callbacks(signals, target)
# Configure how we'll run the signaller's main loop
@runner = runner
@num_per_run = num_per_run
# Only one signaler main loop can run at a time
@is_running = AtomicBoolean.new
@is_cancelled = AtomicBoolean.new
@pending_signals = Queue.new
@main_loop = method(:main_loop).to_proc
end
#
# Add a signal to the queue, to be processed asynchronously.
#
# @param name [Symbol]
# Name of the signal to process
#
# @param args [Array]
# Optional arguments to be passed to the signal's callback
#
def signal(name, *args)
@pending_signals << (args.empty? ? name : [name, args])
try_to_run_main_loop
end
#
# Cancel this signal processing queue.
#
# Usually called from inside a signal callback method on the `target`,
# since usually the target wants to complete its own clean-up in
# addition to stopping the signal-processing loop.
#
# After cancelling:
# 1. Calls to {#signal} will raise an exception
# 2. No callbacks will be made for pending signals, but the queue will
# be drained
#
def cancel
@is_cancelled.value = true
end
def cancelled?
@is_cancelled.value
end
private
# By default, signal names are prefixed to find callback methods on target
CALLBACK_PREFIX = "do_"
# An instance of an anonymous class, to compare equal only to itself
NO_PENDING_SIGNALS = Class.new.new.freeze
# The main signal processing loop
def main_loop
# Establishes a happens-before relationship with end of previous run
return unless @is_running.value
num_left = @num_per_run
loop do
next_signal, args = next_pending_signal
call_signal_callback(next_signal, args)
break if (num_left -= 1).zero? || NO_PENDING_SIGNALS == next_signal
end
rescue StandardError => error
terminate_due_to(error)
ensure
# Establishes a happens-before relationship with beginning of next run
@is_running.value = false
# If we still have signals to process, schedule ourselves to run again
try_to_run_main_loop unless @pending_signals.empty?
end
# @return [NO_PENDING_SIGNALS, Symbol, Array(Symbol, Array)]
# Return the next pending signal (optionally with arguments) in the
# queue, or a special value indicating there are no pending signals.
#
def next_pending_signal
@pending_signals.pop(true)
rescue ThreadError # Indicates that the queue is empty
NO_PENDING_SIGNALS
end
def call_signal_callback(next_signal, args)
return if @is_cancelled.value || NO_PENDING_SIGNALS == next_signal
signal_callback = @callbacks_by_signal[next_signal] || begin
msg = "#{self.class.name} received an unrecognized signal: "\
"#{next_signal.inspect}"
raise Flow::Error, msg
end
signal_callback.call(*args)
end
def try_to_run_main_loop
# Only schedule to run if not already running
@runner.call(&@main_loop) if @is_running.make_true
rescue StandardError => error
# If we can't schedule to run, we need to fail gracefully
terminate_due_to(error)
end
def terminate_due_to(error)
cancel
@on_error.call(error)
end
# @param signals [Array<Symbol, Hash{Symbol => Proc}>]
# @param target [Object]
# @return [Hash{Symbol => Proc}]
def setup_callbacks(signals, target)
signals.inject({}) do |hsh, sym_or_hsh|
hsh.merge(callback_mapping_for(sym_or_hsh, target))
end.freeze
end
# @param sym_or_hsh [Symbol, Hash{Symbol => Proc}]
# @param target [Object]
# @return [Hash{Symbol => Proc}]
def callback_mapping_for(sym_or_hsh, target)
if sym_or_hsh.is_a? Symbol
callback = target.method("#{CALLBACK_PREFIX}#{sym_or_hsh}".to_sym)
{ sym_or_hsh => callback }
else
sym_or_hsh
end
end
end
end
end
|
#!/bin/bash
[ -n "$DEBUG" ] && set -o xtrace
set -o nounset
set -o errexit
shopt -s nullglob
cd $(dirname "${0}")
source ./etc/config
filter_forward_chain="warden-forward"
filter_default_chain="warden-default"
filter_instance_prefix="warden-i-"
filter_instance_chain="${filter_instance_prefix}${id}"
filter_instance_log_chain="${filter_instance_prefix}${id}-log"
nat_prerouting_chain="warden-prerouting"
nat_instance_prefix="warden-i-"
nat_instance_chain="${filter_instance_prefix}${id}"
external_ip=$(ip route get 1.2.3.4 | sed 's/.*src\s\(.*\)\s/\1/;tx;d;:x')
function teardown_filter() {
echo "Teardown filter"
# Prune forward chain
iptables -w -S ${filter_forward_chain} 2> /dev/null |
grep "\-g ${filter_instance_chain}\b" |
sed -e "s/-A/-D/" |
xargs --no-run-if-empty --max-lines=1 iptables -w
# Flush and delete instance chain
iptables -w -F ${filter_instance_chain} 2> /dev/null || true
iptables -w -X ${filter_instance_chain} 2> /dev/null || true
iptables -w -F ${filter_instance_log_chain} 2> /dev/null || true
iptables -w -X ${filter_instance_log_chain} 2> /dev/null || true
}
function setup_filter() {
teardown_filter
# Create instance chain
iptables -w -N ${filter_instance_chain}
iptables -w -A ${filter_instance_chain} \
--goto ${filter_default_chain}
# Bind instance chain to forward chain
iptables -w -I ${filter_forward_chain} 2 \
--in-interface ${network_host_iface} \
--goto ${filter_instance_chain}
# Create instance log chain
iptables -w -N ${filter_instance_log_chain}
iptables -w -A ${filter_instance_log_chain} \
-p tcp -m conntrack --ctstate NEW,UNTRACKED,INVALID -j LOG --log-prefix "${filter_instance_chain} "
iptables -w -A ${filter_instance_log_chain} \
--jump RETURN
}
function teardown_nat() {
echo "Teardown nat"
# Prune prerouting chain
iptables -w -t nat -S ${nat_prerouting_chain} 2> /dev/null |
grep "\-j ${nat_instance_chain}\b" |
sed -e "s/-A/-D/" |
xargs --no-run-if-empty --max-lines=1 iptables -w -t nat
# Flush and delete instance chain
iptables -w -t nat -F ${nat_instance_chain} 2> /dev/null || true
iptables -w -t nat -X ${nat_instance_chain} 2> /dev/null || true
}
function setup_nat() {
teardown_nat
# Create instance chain
iptables -w -t nat -N ${nat_instance_chain}
# Bind instance chain to prerouting chain
iptables -w -t nat -A ${nat_prerouting_chain} \
--jump ${nat_instance_chain}
}
case "${1}" in
"setup")
setup_filter
setup_nat
;;
"teardown")
teardown_filter
teardown_nat
;;
"in")
if [ -z "${HOST_PORT:-}" ]; then
echo "Please specify HOST_PORT..." 1>&2
exit 1
fi
if [ -z "${CONTAINER_PORT:-}" ]; then
echo "Please specify CONTAINER_PORT..." 1>&2
exit 1
fi
iptables -w -t nat -A ${nat_instance_chain} \
--protocol tcp \
--destination "${external_ip}" \
--destination-port "${HOST_PORT}" \
--jump DNAT \
--to-destination "${network_container_ip}:${CONTAINER_PORT}"
;;
"out")
if [ "${PROTOCOL:-}" != "icmp" ] && [ -z "${NETWORK:-}" ] && [ -z "${PORTS:-}" ]; then
echo "Please specify NETWORK and/or PORTS..." 1>&2
exit 1
fi
opts="--protocol ${PROTOCOL:-tcp}"
if [ -n "${NETWORK:-}" ]; then
case ${NETWORK} in
*-*)
opts="${opts} -m iprange --dst-range ${NETWORK}"
;;
*)
opts="${opts} --destination ${NETWORK}"
;;
esac
fi
if [ -n "${PORTS:-}" ]; then
opts="${opts} --destination-port ${PORTS}"
fi
if [ "${PROTOCOL}" == "icmp" ]; then
if [ -n "${ICMP_TYPE}" ]; then
opts="${opts} --icmp-type ${ICMP_TYPE}"
if [ -n "${ICMP_CODE}" ]; then
opts="${opts}/${ICMP_CODE}"
fi
fi
fi
if [ "${LOG}" == "true" ]; then
target="--goto ${filter_instance_log_chain}"
else
target="--jump RETURN"
fi
iptables -w -I ${filter_instance_chain} 1 ${opts} ${target}
;;
"get_ingress_info")
tc qdisc show dev ${network_ifb_iface}
;;
"get_egress_info")
tc qdisc show dev ${network_host_iface}
;;
*)
echo "Unknown command: ${1}" 1>&2
exit 1
;;
esac
|
import ShortUniqueId from 'short-unique-id';
// @ts-ignore
import { version } from '../package.json';
type milliseconds = number;
export type PayloadId = string;
export type QueueableFunction<O = {[k: string]: any}, R = void> = (options: O) => Promise<R>;
export type QueueableSyncFunction<O = {[k: string]: any}, R = void> = (options: O) => R;
export interface FunctionQueueResult<R = void> {
id: PayloadId;
duration: milliseconds;
startTimestamp: number;
endTimestamp: number;
result?: R;
error?: any;
};
export interface FunctionQueueOptions {
waitTimeBetweenRuns: milliseconds;
getResultTimeout: milliseconds;
maxRetries: number;
cleanupResultsOlderThan: milliseconds;
}
interface FunctionQueueEntry<O = {[k: string]: any}, R = void> {
id: PayloadId;
payload: O;
result?: FunctionQueueResult<R>;
}
const uid = new ShortUniqueId({length: 8});
const defaultOptions: FunctionQueueOptions = {
waitTimeBetweenRuns: 100,
getResultTimeout: 60000,
maxRetries: 1,
cleanupResultsOlderThan: 60000,
};
const sleep = (ms: milliseconds) => new Promise(resolve => setTimeout(resolve, ms));
class FunctionQueue<O = {[k: string]: any}, R = void> {
static version = version;
private _fn: QueueableFunction<O, R>;
private _queue: FunctionQueueEntry<O, R>[] = [];
private _options: FunctionQueueOptions;
private _processing: Boolean = false;
public results: FunctionQueueResult<R>[] = [];
public processQueuePromise: Promise<FunctionQueueResult<R>[]> = Promise.resolve([]);
constructor(
fn: QueueableFunction<O, R>,
options?: Partial<FunctionQueueOptions>,
) {
this._fn = fn;
this._options = {
...defaultOptions,
...(options || {}),
};
}
public queuePayload(payload: O): PayloadId {
const id: PayloadId = uid();
this._queue.push({payload, id});
return id;
}
private _tryFn = async (id: string, payload: O, startTimestamp: number): Promise<FunctionQueueResult<R>> => {
let retries = 0;
let finalResult: FunctionQueueResult<R> | undefined;
while ((!finalResult || (finalResult as any).error) && retries <= this._options.maxRetries) {
retries++;
try {
await sleep(this._options.waitTimeBetweenRuns);
const fnResult = await this._fn(payload);
const endTimestamp = Date.now();
const duration = endTimestamp - startTimestamp;
finalResult = {
id,
duration,
startTimestamp,
endTimestamp,
result: fnResult,
};
} catch (error) {
const endTimestamp = Date.now();
const duration = endTimestamp - startTimestamp;
finalResult = {
id,
duration,
startTimestamp,
endTimestamp,
error,
};
}
}
return finalResult as FunctionQueueResult<R>;
}
private async _processQueue(): Promise<void> {
this._processing = true;
let entry: FunctionQueueEntry<O, R>;
const startTimestamp = Date.now();
while (entry = this._queue.shift() as FunctionQueueEntry<O, R>) {
const { payload, id } = entry;
try {
const result = await this._tryFn(id, payload, startTimestamp);
this.results.push(
{
...result,
}
);
} catch (error) {
const endTimestamp = Date.now();
this.results.push(
{
id,
startTimestamp,
duration: endTimestamp - startTimestamp,
endTimestamp,
error,
}
);
}
}
this._processing = false;
}
public cleanupResults(): void {
this.results = this.results.filter(
(r) => {
const age = (Date.now() - r.endTimestamp);
return age < this._options.cleanupResultsOlderThan;
}
);
}
public async processQueue(): Promise<void> {
if (this._processing) {
return;
}
this.cleanupResults();
this.processQueuePromise = this._processQueue().then(() => this.results);
}
public async getResult(id: string): Promise<FunctionQueueResult<R>> {
this.cleanupResults();
let result = this.results.find((r) => r.id === id);
const startTimestamp = Date.now();
while (!result && (Date.now() - startTimestamp) < this._options.getResultTimeout) {
await sleep(this._options.waitTimeBetweenRuns);
result = this.results.find((r) => r.id === id);
}
if (!result) {
const endTimestamp = Date.now();
return {
id,
startTimestamp,
duration: endTimestamp - startTimestamp,
endTimestamp,
error: new Error(
`Result for id ${id} not found (timeout of ${this._options.getResultTimeout}ms exceeded)`
),
};
}
this.results = this.results.filter((r) => r.id !== id);
return result;
}
}
export default FunctionQueue;
|
1 28 162 515 704
2 57 417 483
3 293
4 297 543
5 325 365
6 72 507
7 155 183 274 538
8 670
9 326
10 5 122 148 554
11 82
12 113 562
13 68
14 63 73 257 413 521 687
15 330 685
16 14 170 194 398 645
17 324 349 599 607 694
18 111 389 419 499 703
19 201 240
20 27 315
21
22 444 604
23 80 431 549
24 108 474 575
25 44 176 187 486 628
26 90 179 630
27 210 280 440
28 261 688
29 41 69 561
30 35 89 352 448 476 523 558
31 24 575
32 187 262 302
33 88 95
34 549
35 77 484
36 365 518 657 707
37 215 256 347 619 626
38 26 259 493 611
39 85
40 4 297 339 450 587
41 69 123 302 492
42 494 603 621
43 86 367 548
44 176
45 149 383 410 506
46 485 526
47 62 105
48 227 428 645 667 689
49 156 248 285 377 381
50 70 151 328 390 497
51 7 406 691
52 15 44 214 457
53 108 359 491 525
54
55 430 432
56 166 205 279 568
57 270 417 572 671
58 94
59 180 345 395 396 447 536
60 42 319 364 621 632
61 451 497
62 294 592
63 257 457
64 453 625 654
65 30 395
66 93 184 266 272
67 311 439 633 678
68 184 310 422 649
69 646 702
70 39 335 488 633 658 674
71 363
72 12 110 152 253 464
73 170 194
74 150 333 366
75 40 693
76 15 25 44 52 628
77 651
78 107 277 278 422
79 54 106 299 446 699
80 200 247 313 549 597
81 127 167 664
82 101 203 653
83 342 384
84 299 659
85 65 352 395
86 51 274 401 548
87 14 16 662
88 139 425
89 96 316 361 448 523 677
90 24 31 474 581 681
91 64 688
92 284 355 589 702
93 157 317 402 563 635
94 137 140 293 382 516 570
95 425 535 582
96
97 34 122 236
98 215 266 498
99 333 397 438
100 510 546 675
101 445
102 217 332 357 393
103 468 637
104 178 219 245
105 592
106 54 423 446
107 211 322 510 555 617
108 557
109 322 409 418 454 551 613
110 152 585
111 419 420 434 703
112 81 127 424 485 633
113 72 129 252
114 21 415 503 584
115 171 304
116 230 442 506 695
117 29 247 302 372 561 597
118 268 430 473 512
119 573 691
120 14 63 687
121 189 373
122 190 213 236 528 554 622
123 204 302 371 486
124 372 615
125 197 427 666 678
126 220 280
127
128 301 594 684
129 286 414 683
130 466 604
131 240 524 575 636
132 92 355 530 571 589
133 42 244 375 446
134 27 126 280
135 137 240 462
136 413
137 140 384 516
138 348
139 54 79 84 320
140 83 384 652
141 329 331 444 475
142 54 175 423 495
143 298 542
144 154 164 254 456
145 119 279 562
146 71 388
147 25 44 52 176 198 676
148 325 545
149 305 689 696
150 162 290 515 540 686
151 70 328
152 113
153 168 317 349 429 563 709
154 254 392 550
155 6
156 238 248 418 454
157 66 184 239
158 4 40 535
159 56 661
160 172
161 45 202 237 380
162 28 540
163 593 618 710
164 100 291
165 73 174 382
166 21 191 205
167 206 325 356 658 664
168 239 242 317
169 41 69 492
170 165 194 665 700
171 62 218 514 552
172 118 426 644
173 6 8 155 464 670
174 3 58 73 382
175 33 320 337 495 598
176
177 83 351 652
178 55 219 245 262 430
179 38 42 60 493 632
180 396 412 536
181 119 145
182 532 600
183 145 538 562 691
184 239 272 697
185 3 120 214 457 652
186 289 345
187 262 302
188 37 295 393
189 298 399 519 605
190 97 160 172 213 426
191 21 636
192 114 387 573
193 18 222 389
194 14
195 314 330 652
196 107 211 613
197 46 427 500
198 52
199 92 705
200 23 124 334 372 431 638
201 441
202 9 207 237 326
203 71 363 472 588 693
204 371
205 159 706
206 50
207 242 246 326 380 391 697
208 182 523
209 39 67 311 439
210 126 288 421 440 585
211 277 579 613 708
212 53 226 259 359 368 525
213 160
214 685
215 452 498 626
216 263 660
217 264 341 393
218 115 304 514 532
219 34 313 430 512
220 152
221 83 177 201
222 455 601 661 683
223 449 536
224 258 655
225 241 260 489 528
226 64 259 453
227
228 227
229 23 431 545 549
230 383 442
231 117 404
232 246 422
233 151 650
234 436 672
235 105 680
236 10 148 545
237 9 679
238 283 454
239 317
240 441 462 560
241 267 528 622
242 239 326 697
243 95 338 465 582
244 226 603
245 262 302
246 171 467 596
247 104 245
248 377
249 100 182 208 523 558 675
250 5
251 11 82
252 12 279 537 562
253 110 228 421 480 599 656
254 138 449 669
255 99 103 438 468
256 248 265 619
257 198 281 501
258 164 291 558 655
259 244 493
260 241 267 292 518
261 394 460 569 625
262
263 75 163 339 618 640
264 188 393 416 498 656
265 37 248
266 272 541
267 103 292 468
268 55 74 430 473
269 266 402 498
270 132 471
271 64 91 226
272 68
273
274 43 51 155 173 346 367
275 223 348 385 504 602
276 146 535 587 631
277 107 579
278 115 232 617
279 286 387
280 20 220 332 443 490
281 198 204 371 705
282 39 70
283 606
284 2 483 702
285 216 348 377 504
286 459
287 26 60 632
288 253 421 656
289 475 534
290 195 314 327 515 608 704
291 144 224 456
292 103 518 539
293 58 140 174 185 652
294 47 105 235
295 215 452 498 626
296 226 359 368
297 130 400 411
298 513 519 682
299 139
300 15 195 330 685
301 594
302 29 247 262
303 285 411
304 182 278 617
305 9 237 679 696
306 68 232 422 467
307 111 420 445 559 577
308 556
309 53 359 481 608
310 266 272 708
311 39 70 488
312 141 289 475 500 666
313 34 104 247
314 330
315 27 210 332 341
316 96 208 361
317 157 563
318 18 251 350 389 455
319 350 364 455
320 33 54 88 142
321 201 433
322 196 238 283 510
323 129 499 703
324 227 228 349 694
325 10 356 545 623
326 168 429 696
327 195 481
328 590
329 312 444 612
330 55 74 268 366
331 22 444
332 20 420 443
333 150 268 473 540
334 373 564
335 30 282 352 566
336 46 308 612
337 33 276 535 631
338 329
339 75 297 640
340 405 472 494 621
341 102 210 264 288 332 656
342 201 221 407
343 22 180 331 412 604
344 270 383 470 595
345
346 227 367 614 667
347 102 188 619
348 49 223 449 479
349 709
350 11 251 364
351 321 327 433
352 39 65 282
353 87 136 442
354 270 595
355 199 571
356 5 81 250
357 347 660
358 31 575 690
359 28 608
360 138 238 254 479 669
361 208
362 588 653
363 146 587 693
364 362 544
365 250 424
366 290 314 686
367 548
368 226 359
369 109 211 613 708
370 151 233 328 650
371 486 676
372 615
373 189 519 638
374 75 618 627
375 42 446 494 495 598 647
376 102 332
377 216 357
378 234 403
379 163 216 263 610
380 202
381 156 238 348 479
382 58 570
383 410 506
384 135 342 407
385 130 504 604
386 143 483 646
387 56 145 181 568 573
388 71 472 598 647
389
390 206 658
391 171 246 380
392 254 536 550
393 347
394 64 103 255 625 639
395 345
396 186 345
397 255
398 170 401 645
399 408 682
400 303 339 640
401 170 548
402 635
403 96 316 463 513 529
404 511 638
405 375 494
406 7 183 691
407 201 441
408 463 529 682
409 369 551
410 344 595
411 130 400
412 536 602 604
413 87 353 521
414 113 502
415 192 301 503 516 573
416 692
417 92 284 671
418 248 265 454
419 251 318
420 376 443 577
421 110
422 13 277 278 579
423 54 446
424 36 127 485
425 33 243 308 634
426 34 97 219 512
427 46 112 633
428 116 149 695
429 48 168
430
431 206 334 545 623
432 178 262 522
433 177 201 221
434 82 101 251 307 419
435 136 199 257 413 705
436 378 403 513 531 672 701
437 82 203 374 618
438 160 468 567
439 125 520
440 126 134
441 19 462 533
442 136 662
443 323
444 338 586
445 163 434
446 244 495
447 395 484 553
448 335 566
449 138
450 75 693
451 50
452 498
453 244 446 578
454 322
455 193 350 389 583 601 663
456 224 643
457 120 214 501
458 264 269 402 416 498
459 56 159 279
460 255 397 540
461 408 448 487 650
462 384 533
463 96 461
464 6 8 614
465 308 329 338 425
466 4 297 477 543
467 232
468 241 622
469 38 53 474 611
470 230 270 383 442
471 57 235 354
472 71 647
473 99 172
474 26 38 53 108
475 180 186 331 343 396
476 35 65 77
477 22 130 586 604
478 1 28 359 704
479 138 238
480 72 228 464 614
481 351 491
482 17 153 349 402
483 641
484 550 553 651 655
485 36 427 526
486 147 187 302 676
487 399 408 590 650
488 67
489 5 528 554
490 129 220 323 414 443
491 108 309 321 351
492 92 123 204 702
493 244
494 621
495 423 598
496 275 385 412
497 206 334 431 451
498 188
499 193 222
500 46 329
501 52 63 198
502 113 152 220 490
503 137 516
504 303 348 576
505 177 195 327 351
506 149 230 428
507 72 155
508 128 165 301 684
509 19 108 240 557
510 283
511 231 561
512 172 430
513 143
514 62 234 294
515 162 704
516 301 508 570
517 50 61 121 328 451
518 273 659 707
519 511 638 646
520 39 85 209 668
521 257 435
522 15 55 76 330
523 361 558
524 191 358 575 636
525 469 611
526 36 308 336 556 707
527 275 496
528 554
529 463 513
530 136 270 442 470
531 143 513
532 234 304 378 514
533 384 407
534 125 186 345 616
535 33 582
536 254 449
537 113 129 279 286
538 12 72 155 507 562
539 394
540 28 99 261 397
541 98 269 498
542 235 471 531 641 672
543 158 477 582
544 11 350 362 653
545
546 249 510 617
547 3 73 174
548 398
549 313 580
550 144 536
551 37 98 215 265 266 418 708
552 45 62 161 380 391 592
553 59 536 550
554 5
555 510 546 617
556 273 629 707
557 19 201 321 491
558 35 484 675
559 376 445 593
560 21 114 135 137 503 636
561 69 231 646
562 279
563 402 482
564 121 373 517
565 84 139 273 518 629 634 659
566 70 151 233
567 99 160 473
568 21 166 192
569 255 394 460
570 165 508
571 136 199 435 530
572 2 471 483 542
573 181
574 79 446 453
575 108 240 509
576 130 303 385 411
577 376 559
578 64 299 574 659
579 13
580 34 97 229 236 545
581 706
582 158 338 586
583 601 663
584 21 192 568
585 126 152 220 421
586 22 338 543
587 146 158 450 535 693
588 340 472 621 653
589 417 671
590
591 61 334 497 517 564
592 47
593 445 610 620
594 51 684
595 45 105 270 552 592
596 115 171 232 278
597 200 247 372
598 337 631 647
599 694
600 208 316 378 403 532
601 205 642 706
602 223 496 527 536
603 133 179 493
604 496
605 121 328 399 487 517 590
606 238 360 510 669
607 402 458 482 599
608 327 478 481
609 102 357 660
610 102 376 559 609
611 212 259
612 46 308 465 500
613 322
614 8 227 228 274 670
615 117 200 231 404
616 125 439 520
617 78 182 249
618 75
619 248 357 377
620 163 379 610
621 362 364
622 160 213 438 528
623 167 206 545
624 92 199 204 492 705
625 91
626 188
627 75 203 437
628 187 262 432 522
629 273 308
630 90 287 581 583 663 706
631 146 388
632 26
633 125 488 664 678
634 88 139 308 629
635 66 266 269
636 21 240
637 292 394 539
638 334 511 615
639 64 539 654
640 216 285 303
641 143 386 572
642 159 205 222
643 144 224 484 550 655
644 160 473 567
645 87 367 428 548 662
646 143 298 483 511 702
647 340 405
648 153 429
649 207 246 306 467 697
650 328 448 566 590
651 35 65 395 447 476
652 83 214 300 505
653 11 203
654 578 659
655 558
656 416 599 692
657 225 260 365 518
658 206 633
659 292 299 539 639
660 377 379 610
661 286 459 642 683
662 116 353 428 695
663 60 287 319
664 112 658
665 51 86 401 594 684
666 197 289 500 534
667 227 367 645
668 85 345 395 534 616
669 100 144 164 510
670 274
671 132 270
672 235 294 514 531
673 5 225 365 489 657
674 50 390 658
675 164 258
676 198 281
677 96 448 461 463
678 439
679 45 149 161
680 105 354 471 595
681 31 358 581
682 189 513 529
683 286 323 499
684 165
685 52 652
686 74 290
687 3 73 185 547
688 226 261 271 296 359 625
689 428
690 166 191 524 581 681
691 145 301 415 573 594
692 458 599 607
693 627
694 228 253
695
696 9 48 429 689
697 68 239
698 81 127 250 356 365 424
699 299 574 578
700 165 665 684
701 513 531
702 169 483
703 420 443 499
704 608
705 204 257
706 166 583 690
707 273
708 13 68 266 409 579
709 48 227 324 429 648
710 82 101 437 445 618
|
import 'package:flutter/material.dart';
import 'package:flutterando_class/app/core/constants/colors.dart';
class CustomTitleWidget extends StatelessWidget {
final String title;
final ImageProvider? icon;
const CustomTitleWidget({
Key? key,
required this.title,
this.icon,
}) : super(key: key);
@override
Widget build(BuildContext context) {
return Padding(
padding: const EdgeInsets.only(left: 34, top: 50, right: 32),
child: Stack(
clipBehavior: Clip.none,
alignment: Alignment.centerLeft,
children: [
if (icon != null) Positioned(left: -10, child: Image(image: icon!, height: 26)),
Row(
children: [
Expanded(
child: Text.rich(
TextSpan(
children: [
TextSpan(
text: "Flutterando ",
style: Theme.of(context).textTheme.headline3?.copyWith(color: Colors.white),
),
TextSpan(
text: title,
style: Theme.of(context).textTheme.headline3?.copyWith(color: AppColors.accent),
),
],
),
),
),
Text(
"ver todos ",
style: Theme.of(context).textTheme.headline3?.copyWith(color: Colors.white),
),
Icon(Icons.keyboard_arrow_right_rounded, color: AppColors.accent, size: 18),
],
),
],
),
);
}
}
|
// Code generated by counterfeiter. DO NOT EDIT.
package fakes
import (
"sync"
"github.com/mminges/replicator/replicator"
)
type TileReplicator struct {
ReplicateStub func(replicator.ApplicationConfig) error
replicateMutex sync.RWMutex
replicateArgsForCall []struct {
arg1 replicator.ApplicationConfig
}
replicateReturns struct {
result1 error
}
replicateReturnsOnCall map[int]struct {
result1 error
}
invocations map[string][][]interface{}
invocationsMutex sync.RWMutex
}
func (fake *TileReplicator) Replicate(arg1 replicator.ApplicationConfig) error {
fake.replicateMutex.Lock()
ret, specificReturn := fake.replicateReturnsOnCall[len(fake.replicateArgsForCall)]
fake.replicateArgsForCall = append(fake.replicateArgsForCall, struct {
arg1 replicator.ApplicationConfig
}{arg1})
fake.recordInvocation("Replicate", []interface{}{arg1})
fake.replicateMutex.Unlock()
if fake.ReplicateStub != nil {
return fake.ReplicateStub(arg1)
}
if specificReturn {
return ret.result1
}
return fake.replicateReturns.result1
}
func (fake *TileReplicator) ReplicateCallCount() int {
fake.replicateMutex.RLock()
defer fake.replicateMutex.RUnlock()
return len(fake.replicateArgsForCall)
}
func (fake *TileReplicator) ReplicateArgsForCall(i int) replicator.ApplicationConfig {
fake.replicateMutex.RLock()
defer fake.replicateMutex.RUnlock()
return fake.replicateArgsForCall[i].arg1
}
func (fake *TileReplicator) ReplicateReturns(result1 error) {
fake.ReplicateStub = nil
fake.replicateReturns = struct {
result1 error
}{result1}
}
func (fake *TileReplicator) ReplicateReturnsOnCall(i int, result1 error) {
fake.ReplicateStub = nil
if fake.replicateReturnsOnCall == nil {
fake.replicateReturnsOnCall = make(map[int]struct {
result1 error
})
}
fake.replicateReturnsOnCall[i] = struct {
result1 error
}{result1}
}
func (fake *TileReplicator) Invocations() map[string][][]interface{} {
fake.invocationsMutex.RLock()
defer fake.invocationsMutex.RUnlock()
fake.replicateMutex.RLock()
defer fake.replicateMutex.RUnlock()
copiedInvocations := map[string][][]interface{}{}
for key, value := range fake.invocations {
copiedInvocations[key] = value
}
return copiedInvocations
}
func (fake *TileReplicator) recordInvocation(key string, args []interface{}) {
fake.invocationsMutex.Lock()
defer fake.invocationsMutex.Unlock()
if fake.invocations == nil {
fake.invocations = map[string][][]interface{}{}
}
if fake.invocations[key] == nil {
fake.invocations[key] = [][]interface{}{}
}
fake.invocations[key] = append(fake.invocations[key], args)
}
|
require 'spec_helper'
describe Showcase::Helpers do
let(:object) { Person.new('Steve Ballmer') }
let(:context) { Context.new }
describe '.present' do
it 'instanciate a new presenter, inferring the class' do
PersonPresenter.stub(:new).with(object, context).and_return 'Presenter'
context.present(object, PersonPresenter).should == 'Presenter'
end
it 'the presenter class to use can be specified as the second parameter' do
ProjectPresenter.stub(:new).with(object, context).and_return 'Presenter'
context.present(object, ProjectPresenter).should == 'Presenter'
end
it 'the context to use can be specified as third parameter' do
different_context = double
context.present(object, ProjectPresenter, different_context).view_context.should == different_context
end
end
describe '.present_collection' do
it 'returns a presenter for each object in the collection' do
collection = [ Person.new('Mark'), Person.new('Luke') ]
PersonPresenter.stub(:new).with(collection[0], context).and_return 'foo'
PersonPresenter.stub(:new).with(collection[1], context).and_return 'bar'
presented_collection = context.present_collection(collection)
presented_collection.should == [ 'foo', 'bar' ]
end
end
end
|
# Copyright (c) 2012-2016 Seafile Ltd.
from django.conf import settings
NOTIFICATION_CACHE_TIMEOUT = getattr(settings, 'NOTIFICATION_CACHE_TIMEOUT', 0)
|
"""
Custom logger for the project.
"""
from colorama import init
from termcolor import colored
init(autoreset=True, convert=True)
def print_inf(msg: str):
"""Print information message.
Args:
msg (str): Message to be printed.
"""
print(msg)
def print_title(msg: str):
"""Print title message with {magenta} color.
Args:
msg (str): Message to be printed.
"""
print(colored(msg, 'magenta'))
def print_ok(msg: str):
"""Print ok message with {green} color.
Args:
msg (str): Message to be printed.
"""
print(colored(msg, 'green'))
def print_error(msg: str):
"""Print error message with {red} color.
Args:
msg (str): Message to be printed.
"""
print(colored(msg, 'red'))
def print_debug(msg: str):
"""Print debug message with {cyan} color.
Args:
msg (str): Message to be printed.
"""
print(colored(msg, 'cyan'))
def print_warning(msg: str):
"""Print warning message with {yellow} color.
Args:
msg (str): Message to be printed.
"""
print(colored(msg, 'yellow'))
|
# Usage: ./release.sh 3.95
# Delete old stuff
#cd ~/repos/wekan
#./releases/release-cleanup.sh
# Build Source
#cd ~/repos/wekan
#./releases/rebuild-release.sh
REPODIR=/home/wekan/repos
WEKANDIR=/home/wekan/repos/wekan
# Ensure sudo access
sudo echo .
# Build Sandstorm
cd $REPODIR
rm -rf $WEKANDIR
git clone git@github.com:wekan/wekan.git
cd $WEKANDIR
sudo n 12.19.0
sudo mkdir -p /usr/local/lib/node_modules/fibers/.node-gyp
# Build Wekan
./releases/rebuild-release.sh
cd .build/bundle/programs/server
npm install node-gyp node-pre-gyp fibers
cd $WEKANDIR
# Build Sandstorm
meteor-spk pack wekan-$1.spk
#spk publish wekan-$1.spk
#scp wekan-$1.spk x2:/var/snap/wekan/common/releases.wekan.team/
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.