text
stringlengths 3
1.05M
|
|---|
/*
Copyright (c) 2003-2012, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
/*
Copyright (c) 2003-2012, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'a11yhelp', 'ku',
{
accessibilityHelp :
{
title : 'ڕێنمای لەبەردەستدابوون',
contents : 'پێکهاتەی یارمەتی. کلیك ESC بۆ داخستنی ئەم دیالۆگه.',
legend :
[
{
name : 'گشتی',
items :
[
{
name : 'تووڵامرازی دهستكاریكهر',
legend:
'کلیك ${toolbarFocus} بۆ ڕابەری تووڵامراز. بۆ گواستنەوەی پێشوو داهاتووی گرووپی تووڵامرازی داگرتنی کلیلی TAB لهگهڵ SHIFT-TAB. بۆ گواستنەوەی پێشوو داهاتووی دووگمەی تووڵامرازی لەڕێی کلیلی تیری دەستی ڕاست یان کلیلی تیری دەستی چەپ. کلیکی کلیلی SPACE یان ENTER بۆ چالاککردنی دووگمەی تووڵامراز.'
},
{
name : 'دیالۆگی دهستكاریكهر',
legend :
'لەهەمانکاتدا کەتۆ لەدیالۆگی, کلیکی کلیلی TAB بۆ ڕابەری خانەی دیالۆگێکی تر, داگرتنی کلیلی SHIFT + TAB بۆ گواستنەوەی بۆ خانەی پێشووتر, کلیكی کلیلی ENTER بۆ ڕازیکردنی دیالۆگەکە, کلیكی کلیلی ESC بۆ هەڵوەشاندنەوەی دیالۆگەکە. بۆ دیالۆگی لەبازدەری (تابی) زیاتر, کلیكی کلیلی ALT + F10 بۆ ڕابهری لیستی بازدهرهکان. بۆ چوونه بازدهری تابی داهاتوو کلیكی کلیلی TAB یان کلیلی تیری دهستی ڕاست. بۆچوونه بازدهری تابی پێشوو داگرتنی کلیلی SHIFT + TAB یان کلیلی تیری دهستی چهپ. کلیی کلیلی SPACE یان ENTER بۆ ههڵبژاردنی بازدهر (تاب).'
},
{
name : 'پێڕستی سهرنووسهر',
legend :
'کلیك ${contextMenu} یان دوگمهی لیسته(Menu) بۆ کردنهوهی لیستهی دهق. بۆ چوونه ههڵبژاردهیهکی تر له لیسته کلیکی کلیلی TAB یان کلیلی تیری ڕوو لهخوارهوه بۆ چوون بۆ ههڵبژاردهی پێشوو کلیکی کلیلی SHIFT+TAB یان کلیلی تیری ڕوو له سهرهوه. داگرتنی کلیلی SPACE یان ENTER بۆ ههڵبژاردنی ههڵبژاردهی لیسته. بۆ کردنهوهی لقی ژێر لیسته لهههڵبژاردهی لیسته کلیکی کلیلی SPACE یان ENTER یان کلیلی تیری دهستی ڕاست. بۆ گهڕانهوه بۆ سهرهوهی لیسته کلیکی کلیلی ESC یان کلیلی تیری دهستی چهپ. بۆ داخستنی لیسته کلیكی کلیلی ESC بکه.'
},
{
name : 'لیستی سنووقی سهرنووسهر',
legend :
'لهناو سنوقی لیست, چۆن بۆ ههڵنبژاردهی لیستێکی تر کلیکی کلیلی TAB یان کلیلی تیری ڕوو لهخوار. چوون بۆ ههڵبژاردهی لیستی پێشوو کلیکی کلیلی SHIFT + TAB یان کلیلی تیری ڕوو لهسهرهوه. کلیکی کلیلی SPACE یان ENTER بۆ دیاریکردنی ههڵبژاردهی لیست. کلیکی کلیلی ESC بۆ داخستنی سنوقی لیست.'
},
{
name : 'تووڵامرازی توخم',
legend :
'کلیك ${elementsPathFocus} بۆ ڕابهری تووڵامرازی توخمهکان. چوون بۆ دوگمهی توخمێکی تر کلیکی کلیلی TAB یان کلیلی تیری دهستی ڕاست. چوون بۆ دوگمهی توخمی پێشوو کلیلی SHIFT+TAB یان کلیکی کلیلی تیری دهستی چهپ. داگرتنی کلیلی SPACE یان ENTER بۆ دیاریکردنی توخمهکه لهسهرنووسه.'
}
]
},
{
name : 'فهرمانهکان',
items :
[
{
name : 'فهرمانی پووچکردنهوه',
legend : 'کلیك ${undo}'
},
{
name : 'فهرمانی ههڵگهڕانهوه',
legend : 'کلیك ${redo}'
},
{
name : 'فهرمانی دهقی قهڵهو',
legend : 'کلیك ${bold}'
},
{
name : 'فهرمانی دهقی لار',
legend : 'کلیك ${italic}'
},
{
name : 'فهرمانی ژێرهێڵ',
legend : 'کلیك ${underline}'
},
{
name : 'فهرمانی بهستهر',
legend : 'کلیك ${link}'
},
{
name : 'شاردهنهوهی تووڵامراز',
legend : 'کلیك ${toolbarCollapse}'
},
{
name : 'دهستپێگهیشتنی یارمهتی',
legend : 'کلیك ${a11yHelp}'
}
]
}
]
}
});
|
from __future__ import unicode_literals
import time
import binascii
import io
from .fragment import FragmentFD
from ..compat import (
compat_Struct,
compat_urllib_error,
)
u8 = compat_Struct(">B")
u88 = compat_Struct(">Bx")
u16 = compat_Struct(">H")
u1616 = compat_Struct(">Hxx")
u32 = compat_Struct(">I")
u64 = compat_Struct(">Q")
s88 = compat_Struct(">bx")
s16 = compat_Struct(">h")
s1616 = compat_Struct(">hxx")
s32 = compat_Struct(">i")
unity_matrix = (s32.pack(0x10000) + s32.pack(0) * 3) * 2 + s32.pack(0x40000000)
TRACK_ENABLED = 0x1
TRACK_IN_MOVIE = 0x2
TRACK_IN_PREVIEW = 0x4
SELF_CONTAINED = 0x1
def box(box_type, payload):
return u32.pack(8 + len(payload)) + box_type + payload
def full_box(box_type, version, flags, payload):
return box(box_type, u8.pack(version) + u32.pack(flags)[1:] + payload)
def write_piff_header(stream, params):
track_id = params["track_id"]
fourcc = params["fourcc"]
duration = params["duration"]
timescale = params.get("timescale", 10000000)
language = params.get("language", "und")
height = params.get("height", 0)
width = params.get("width", 0)
is_audio = width == 0 and height == 0
creation_time = modification_time = int(time.time())
ftyp_payload = b"isml" # major brand
ftyp_payload += u32.pack(1) # minor version
ftyp_payload += b"piff" + b"iso2" # compatible brands
stream.write(box(b"ftyp", ftyp_payload)) # File Type Box
mvhd_payload = u64.pack(creation_time)
mvhd_payload += u64.pack(modification_time)
mvhd_payload += u32.pack(timescale)
mvhd_payload += u64.pack(duration)
mvhd_payload += s1616.pack(1) # rate
mvhd_payload += s88.pack(1) # volume
mvhd_payload += u16.pack(0) # reserved
mvhd_payload += u32.pack(0) * 2 # reserved
mvhd_payload += unity_matrix
mvhd_payload += u32.pack(0) * 6 # pre defined
mvhd_payload += u32.pack(0xFFFFFFFF) # next track id
moov_payload = full_box(b"mvhd", 1, 0, mvhd_payload) # Movie Header Box
tkhd_payload = u64.pack(creation_time)
tkhd_payload += u64.pack(modification_time)
tkhd_payload += u32.pack(track_id) # track id
tkhd_payload += u32.pack(0) # reserved
tkhd_payload += u64.pack(duration)
tkhd_payload += u32.pack(0) * 2 # reserved
tkhd_payload += s16.pack(0) # layer
tkhd_payload += s16.pack(0) # alternate group
tkhd_payload += s88.pack(1 if is_audio else 0) # volume
tkhd_payload += u16.pack(0) # reserved
tkhd_payload += unity_matrix
tkhd_payload += u1616.pack(width)
tkhd_payload += u1616.pack(height)
trak_payload = full_box(
b"tkhd", 1, TRACK_ENABLED | TRACK_IN_MOVIE | TRACK_IN_PREVIEW, tkhd_payload
) # Track Header Box
mdhd_payload = u64.pack(creation_time)
mdhd_payload += u64.pack(modification_time)
mdhd_payload += u32.pack(timescale)
mdhd_payload += u64.pack(duration)
mdhd_payload += u16.pack(
((ord(language[0]) - 0x60) << 10)
| ((ord(language[1]) - 0x60) << 5)
| (ord(language[2]) - 0x60)
)
mdhd_payload += u16.pack(0) # pre defined
mdia_payload = full_box(b"mdhd", 1, 0, mdhd_payload) # Media Header Box
hdlr_payload = u32.pack(0) # pre defined
hdlr_payload += b"soun" if is_audio else b"vide" # handler type
hdlr_payload += u32.pack(0) * 3 # reserved
hdlr_payload += (b"Sound" if is_audio else b"Video") + b"Handler\0" # name
mdia_payload += full_box(b"hdlr", 0, 0, hdlr_payload) # Handler Reference Box
if is_audio:
smhd_payload = s88.pack(0) # balance
smhd_payload += u16.pack(0) # reserved
media_header_box = full_box(b"smhd", 0, 0, smhd_payload) # Sound Media Header
else:
vmhd_payload = u16.pack(0) # graphics mode
vmhd_payload += u16.pack(0) * 3 # opcolor
media_header_box = full_box(b"vmhd", 0, 1, vmhd_payload) # Video Media Header
minf_payload = media_header_box
dref_payload = u32.pack(1) # entry count
dref_payload += full_box(b"url ", 0, SELF_CONTAINED, b"") # Data Entry URL Box
dinf_payload = full_box(b"dref", 0, 0, dref_payload) # Data Reference Box
minf_payload += box(b"dinf", dinf_payload) # Data Information Box
stsd_payload = u32.pack(1) # entry count
sample_entry_payload = u8.pack(0) * 6 # reserved
sample_entry_payload += u16.pack(1) # data reference index
if is_audio:
sample_entry_payload += u32.pack(0) * 2 # reserved
sample_entry_payload += u16.pack(params.get("channels", 2))
sample_entry_payload += u16.pack(params.get("bits_per_sample", 16))
sample_entry_payload += u16.pack(0) # pre defined
sample_entry_payload += u16.pack(0) # reserved
sample_entry_payload += u1616.pack(params["sampling_rate"])
if fourcc == "AACL":
sample_entry_box = box(b"mp4a", sample_entry_payload)
else:
sample_entry_payload += u16.pack(0) # pre defined
sample_entry_payload += u16.pack(0) # reserved
sample_entry_payload += u32.pack(0) * 3 # pre defined
sample_entry_payload += u16.pack(width)
sample_entry_payload += u16.pack(height)
sample_entry_payload += u1616.pack(0x48) # horiz resolution 72 dpi
sample_entry_payload += u1616.pack(0x48) # vert resolution 72 dpi
sample_entry_payload += u32.pack(0) # reserved
sample_entry_payload += u16.pack(1) # frame count
sample_entry_payload += u8.pack(0) * 32 # compressor name
sample_entry_payload += u16.pack(0x18) # depth
sample_entry_payload += s16.pack(-1) # pre defined
codec_private_data = binascii.unhexlify(
params["codec_private_data"].encode("utf-8")
)
if fourcc in ("H264", "AVC1"):
sps, pps = codec_private_data.split(u32.pack(1))[1:]
avcc_payload = u8.pack(1) # configuration version
avcc_payload += sps[
1:4
] # avc profile indication + profile compatibility + avc level indication
avcc_payload += u8.pack(
0xFC | (params.get("nal_unit_length_field", 4) - 1)
) # complete representation (1) + reserved (11111) + length size minus one
avcc_payload += u8.pack(1) # reserved (0) + number of sps (0000001)
avcc_payload += u16.pack(len(sps))
avcc_payload += sps
avcc_payload += u8.pack(1) # number of pps
avcc_payload += u16.pack(len(pps))
avcc_payload += pps
sample_entry_payload += box(
b"avcC", avcc_payload
) # AVC Decoder Configuration Record
sample_entry_box = box(b"avc1", sample_entry_payload) # AVC Simple Entry
stsd_payload += sample_entry_box
stbl_payload = full_box(b"stsd", 0, 0, stsd_payload) # Sample Description Box
stts_payload = u32.pack(0) # entry count
stbl_payload += full_box(b"stts", 0, 0, stts_payload) # Decoding Time to Sample Box
stsc_payload = u32.pack(0) # entry count
stbl_payload += full_box(b"stsc", 0, 0, stsc_payload) # Sample To Chunk Box
stco_payload = u32.pack(0) # entry count
stbl_payload += full_box(b"stco", 0, 0, stco_payload) # Chunk Offset Box
minf_payload += box(b"stbl", stbl_payload) # Sample Table Box
mdia_payload += box(b"minf", minf_payload) # Media Information Box
trak_payload += box(b"mdia", mdia_payload) # Media Box
moov_payload += box(b"trak", trak_payload) # Track Box
mehd_payload = u64.pack(duration)
mvex_payload = full_box(b"mehd", 1, 0, mehd_payload) # Movie Extends Header Box
trex_payload = u32.pack(track_id) # track id
trex_payload += u32.pack(1) # default sample description index
trex_payload += u32.pack(0) # default sample duration
trex_payload += u32.pack(0) # default sample size
trex_payload += u32.pack(0) # default sample flags
mvex_payload += full_box(b"trex", 0, 0, trex_payload) # Track Extends Box
moov_payload += box(b"mvex", mvex_payload) # Movie Extends Box
stream.write(box(b"moov", moov_payload)) # Movie Box
def extract_box_data(data, box_sequence):
data_reader = io.BytesIO(data)
while True:
box_size = u32.unpack(data_reader.read(4))[0]
box_type = data_reader.read(4)
if box_type == box_sequence[0]:
box_data = data_reader.read(box_size - 8)
if len(box_sequence) == 1:
return box_data
return extract_box_data(box_data, box_sequence[1:])
data_reader.seek(box_size - 8, 1)
class IsmFD(FragmentFD):
"""
Download segments in a ISM manifest
"""
FD_NAME = "ism"
def real_download(self, filename, info_dict):
segments = (
info_dict["fragments"][:1]
if self.params.get("test", False)
else info_dict["fragments"]
)
ctx = {
"filename": filename,
"total_frags": len(segments),
}
self._prepare_and_start_frag_download(ctx)
fragment_retries = self.params.get("fragment_retries", 0)
skip_unavailable_fragments = self.params.get("skip_unavailable_fragments", True)
track_written = False
frag_index = 0
for i, segment in enumerate(segments):
frag_index += 1
if frag_index <= ctx["fragment_index"]:
continue
count = 0
while count <= fragment_retries:
try:
success, frag_content = self._download_fragment(
ctx, segment["url"], info_dict
)
if not success:
return False
if not track_written:
tfhd_data = extract_box_data(
frag_content, [b"moof", b"traf", b"tfhd"]
)
info_dict["_download_params"]["track_id"] = u32.unpack(
tfhd_data[4:8]
)[0]
write_piff_header(
ctx["dest_stream"], info_dict["_download_params"]
)
track_written = True
self._append_fragment(ctx, frag_content)
break
except compat_urllib_error.HTTPError as err:
count += 1
if count <= fragment_retries:
self.report_retry_fragment(
err, frag_index, count, fragment_retries
)
if count > fragment_retries:
if skip_unavailable_fragments:
self.report_skip_fragment(frag_index)
continue
self.report_error(
"giving up after %s fragment retries" % fragment_retries
)
return False
self._finish_frag_download(ctx)
return True
|
const { cliopts } = require('estrella');
const [opts] = cliopts.parse(
['serve', 'Serve build site'],
['analyze', 'Analyze Bundle'],
['livereload', 'Init livereload']
);
if (cliopts.watch && opts.analyze) throw new Error('watch and analyze not allowed together');
if (opts.livereload && opts.serve)
throw new Error('-livereload -serve not allowed since most servers come with their own livereload');
module.exports = opts;
|
#
# This file is part of pretix (Community Edition).
#
# Copyright (C) 2014-2020 Raphael Michel and contributors
# Copyright (C) 2020-2021 rami.io GmbH and contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation in version 3 of the License.
#
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
# this file, see <https://pretix.eu/about/en/license>.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
# This file is based on an earlier version of pretix which was released under the Apache License 2.0. The full text of
# the Apache License 2.0 can be obtained at <http://www.apache.org/licenses/LICENSE-2.0>.
#
# This file may have since been changed and any changes are released under the terms of AGPLv3 as described above. A
# full history of changes and contributors is available at <https://github.com/pretix/pretix>.
#
# This file contains Apache-licensed contributions copyrighted by: Christian Franke, Daniel, Heok Hong Low, Jakob
# Schnell, Maico Timmerman, Sohalt, Tobias Kunze, Ture Gjørup, jasonwaiting@live.hk
#
# Unless required by applicable law or agreed to in writing, software distributed under the Apache License 2.0 is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under the License.
import json
import operator
import re
from collections import OrderedDict
from decimal import Decimal
from itertools import groupby
from urllib.parse import urlsplit
from django.conf import settings
from django.contrib import messages
from django.contrib.contenttypes.models import ContentType
from django.core.files import File
from django.db import transaction
from django.db.models import ProtectedError
from django.forms import inlineformset_factory
from django.http import (
Http404, HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed,
JsonResponse,
)
from django.shortcuts import get_object_or_404, redirect
from django.urls import reverse
from django.utils.functional import cached_property
from django.utils.timezone import now
from django.utils.translation import gettext, gettext_lazy as _
from django.views.generic import DeleteView, FormView, ListView
from django.views.generic.base import TemplateView, View
from django.views.generic.detail import SingleObjectMixin
from i18nfield.strings import LazyI18nString
from i18nfield.utils import I18nJSONEncoder
from pytz import timezone
from pretix.base.channels import get_all_sales_channels
from pretix.base.email import get_available_placeholders
from pretix.base.models import Event, LogEntry, Order, TaxRule, Voucher
from pretix.base.models.event import EventMetaValue
from pretix.base.services import tickets
from pretix.base.services.invoices import build_preview_invoice_pdf
from pretix.base.signals import register_ticket_outputs
from pretix.base.templatetags.rich_text import markdown_compile_email
from pretix.control.forms.event import (
CancelSettingsForm, CommentForm, ConfirmTextFormset, EventDeleteForm,
EventMetaValueForm, EventSettingsForm, EventUpdateForm,
InvoiceSettingsForm, ItemMetaPropertyForm, MailSettingsForm,
PaymentSettingsForm, ProviderForm, QuickSetupForm,
QuickSetupProductFormSet, TaxRuleForm, TaxRuleLineFormSet,
TicketSettingsForm, WidgetCodeForm,
)
from pretix.control.permissions import EventPermissionRequiredMixin
from pretix.control.views.user import RecentAuthenticationRequiredMixin
from pretix.helpers.database import rolledback_transaction
from pretix.multidomain.urlreverse import get_event_domain
from pretix.plugins.stripe.payment import StripeSettingsHolder
from pretix.presale.style import regenerate_css
from ...base.i18n import language
from ...base.models.items import (
Item, ItemCategory, ItemMetaProperty, Question, Quota,
)
from ...base.settings import SETTINGS_AFFECTING_CSS, LazyI18nStringList
from ..logdisplay import OVERVIEW_BANLIST
from . import CreateView, PaginationMixin, UpdateView
class EventSettingsViewMixin:
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['is_event_settings'] = True
return ctx
class MetaDataEditorMixin:
meta_form = EventMetaValueForm
meta_model = EventMetaValue
@cached_property
def meta_forms(self):
if hasattr(self, 'object') and self.object:
val_instances = {
v.property_id: v for v in self.object.meta_values.all()
}
else:
val_instances = {}
formlist = []
for p in self.request.organizer.meta_properties.all():
formlist.append(self._make_meta_form(p, val_instances))
return formlist
def _make_meta_form(self, p, val_instances):
return self.meta_form(
prefix='prop-{}'.format(p.pk),
property=p,
disabled=(
p.protected and
not self.request.user.has_organizer_permission(self.request.organizer, 'can_change_organizer_settings', request=self.request)
),
instance=val_instances.get(p.pk, self.meta_model(property=p, event=self.object)),
data=(self.request.POST if self.request.method == "POST" else None)
)
def save_meta(self):
for f in self.meta_forms:
if f.cleaned_data.get('value'):
f.save()
elif f.instance and f.instance.pk:
f.instance.delete()
class DecoupleMixin:
def _save_decoupled(self, form):
# Save fields that are currently only set via the organizer but should be decoupled
fields = set()
for f in self.request.POST.getlist("decouple"):
fields |= set(f.split(","))
for f in fields:
if f not in form.fields:
continue
if f not in self.request.event.settings._cache():
self.request.event.settings.set(f, self.request.event.settings.get(f))
class EventUpdate(DecoupleMixin, EventSettingsViewMixin, EventPermissionRequiredMixin, MetaDataEditorMixin, UpdateView):
model = Event
form_class = EventUpdateForm
template_name = 'pretixcontrol/event/settings.html'
permission = 'can_change_event_settings'
@cached_property
def object(self) -> Event:
return self.request.event
def get_object(self, queryset=None) -> Event:
return self.object
@cached_property
def sform(self):
return EventSettingsForm(
obj=self.object,
prefix='settings',
data=self.request.POST if self.request.method == 'POST' else None,
files=self.request.FILES if self.request.method == 'POST' else None,
)
def get_context_data(self, *args, **kwargs) -> dict:
context = super().get_context_data(*args, **kwargs)
context['sform'] = self.sform
context['meta_forms'] = self.meta_forms
context['item_meta_property_formset'] = self.item_meta_property_formset
context['confirm_texts_formset'] = self.confirm_texts_formset
return context
@transaction.atomic
def form_valid(self, form):
self._save_decoupled(self.sform)
self.sform.save()
self.save_meta()
self.save_item_meta_property_formset(self.object)
self.save_confirm_texts_formset(self.object)
change_css = False
if self.sform.has_changed() or self.confirm_texts_formset.has_changed():
data = {k: self.request.event.settings.get(k) for k in self.sform.changed_data}
if self.confirm_texts_formset.has_changed():
data.update(confirm_texts=self.confirm_texts_formset.cleaned_data)
self.request.event.log_action('pretix.event.settings', user=self.request.user, data=data)
if any(p in self.sform.changed_data for p in SETTINGS_AFFECTING_CSS):
change_css = True
if form.has_changed():
self.request.event.log_action('pretix.event.changed', user=self.request.user, data={
k: (form.cleaned_data.get(k).name
if isinstance(form.cleaned_data.get(k), File)
else form.cleaned_data.get(k))
for k in form.changed_data
})
tickets.invalidate_cache.apply_async(kwargs={'event': self.request.event.pk})
if change_css:
regenerate_css.apply_async(args=(self.request.event.pk,))
messages.success(self.request, _('Your changes have been saved. Please note that it can '
'take a short period of time until your changes become '
'active.'))
else:
messages.success(self.request, _('Your changes have been saved.'))
return super().form_valid(form)
def get_success_url(self) -> str:
return reverse('control:event.settings', kwargs={
'organizer': self.object.organizer.slug,
'event': self.object.slug,
})
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
if self.request.user.has_active_staff_session(self.request.session.session_key):
kwargs['change_slug'] = True
kwargs['domain'] = True
return kwargs
def post(self, request, *args, **kwargs):
form = self.get_form()
if form.is_valid() and self.sform.is_valid() and all([f.is_valid() for f in self.meta_forms]) and \
self.item_meta_property_formset.is_valid() and self.confirm_texts_formset.is_valid():
# reset timezone
zone = timezone(self.sform.cleaned_data['timezone'])
event = form.instance
event.date_from = self.reset_timezone(zone, event.date_from)
event.date_to = self.reset_timezone(zone, event.date_to)
event.presale_start = self.reset_timezone(zone, event.presale_start)
event.presale_end = self.reset_timezone(zone, event.presale_end)
return self.form_valid(form)
else:
messages.error(self.request, _('We could not save your changes. See below for details.'))
return self.form_invalid(form)
@staticmethod
def reset_timezone(tz, dt):
return tz.localize(dt.replace(tzinfo=None)) if dt is not None else None
@cached_property
def item_meta_property_formset(self):
formsetclass = inlineformset_factory(
Event, ItemMetaProperty,
form=ItemMetaPropertyForm, can_order=False, can_delete=True, extra=0
)
return formsetclass(self.request.POST if self.request.method == "POST" else None, prefix="item-meta-property",
instance=self.object, queryset=self.object.item_meta_properties.all())
def save_item_meta_property_formset(self, obj):
for form in self.item_meta_property_formset.initial_forms:
if form in self.item_meta_property_formset.deleted_forms:
if not form.instance.pk:
continue
form.instance.delete()
form.instance.pk = None
elif form.has_changed():
form.save()
for form in self.item_meta_property_formset.extra_forms:
if not form.has_changed():
continue
if self.item_meta_property_formset._should_delete_form(form):
continue
form.instance.event = obj
form.save()
@cached_property
def confirm_texts_formset(self):
initial = [{"text": text, "ORDER": order} for order, text in
enumerate(self.object.settings.get("confirm_texts", as_type=LazyI18nStringList))]
return ConfirmTextFormset(self.request.POST if self.request.method == "POST" else None, event=self.object,
prefix="confirm-texts", initial=initial)
def save_confirm_texts_formset(self, obj):
obj.settings.confirm_texts = LazyI18nStringList(
form_data['text'].data
for form_data in sorted(self.confirm_texts_formset.cleaned_data, key=operator.itemgetter("ORDER"))
if not form_data.get("DELETE", False)
)
class EventPlugins(EventSettingsViewMixin, EventPermissionRequiredMixin, TemplateView, SingleObjectMixin):
model = Event
context_object_name = 'event'
permission = 'can_change_event_settings'
template_name = 'pretixcontrol/event/plugins.html'
def get_object(self, queryset=None) -> Event:
return self.request.event
def get_context_data(self, *args, **kwargs) -> dict:
from pretix.base.plugins import get_all_plugins
context = super().get_context_data(*args, **kwargs)
plugins = [p for p in get_all_plugins(self.object) if not p.name.startswith('.')
and getattr(p, 'visible', True)]
order = [
'FEATURE',
'PAYMENT',
'INTEGRATION',
'CUSTOMIZATION',
'FORMAT',
'API',
]
labels = {
'FEATURE': _('Features'),
'PAYMENT': _('Payment providers'),
'INTEGRATION': _('Integrations'),
'CUSTOMIZATION': _('Customizations'),
'FORMAT': _('Output and export formats'),
'API': _('API features'),
}
context['plugins'] = sorted([
(c, labels.get(c, c), list(plist))
for c, plist
in groupby(
sorted(plugins, key=lambda p: str(getattr(p, 'category', _('Other')))),
lambda p: str(getattr(p, 'category', _('Other')))
)
], key=lambda c: (order.index(c[0]), c[1]) if c[0] in order else (999, str(c[1])))
context['plugins_active'] = self.object.get_plugins()
return context
def get(self, request, *args, **kwargs):
self.object = self.get_object()
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
def post(self, request, *args, **kwargs):
from pretix.base.plugins import get_all_plugins
self.object = self.get_object()
plugins_available = {
p.module: p for p in get_all_plugins(self.object)
if not p.name.startswith('.') and getattr(p, 'visible', True)
}
with transaction.atomic():
allow_restricted = request.user.has_active_staff_session(request.session.session_key)
for key, value in request.POST.items():
if key.startswith("plugin:"):
module = key.split(":")[1]
if value == "enable" and module in plugins_available:
if getattr(plugins_available[module], 'restricted', False):
if not allow_restricted:
continue
self.request.event.log_action('pretix.event.plugins.enabled', user=self.request.user,
data={'plugin': module})
self.object.enable_plugin(module, allow_restricted=allow_restricted)
else:
self.request.event.log_action('pretix.event.plugins.disabled', user=self.request.user,
data={'plugin': module})
self.object.disable_plugin(module)
self.object.save()
messages.success(self.request, _('Your changes have been saved.'))
return redirect(self.get_success_url())
def get_success_url(self) -> str:
return reverse('control:event.settings.plugins', kwargs={
'organizer': self.get_object().organizer.slug,
'event': self.get_object().slug,
})
class PaymentProviderSettings(EventSettingsViewMixin, EventPermissionRequiredMixin, TemplateView, SingleObjectMixin):
model = Event
context_object_name = 'event'
permission = 'can_change_event_settings'
template_name = 'pretixcontrol/event/payment_provider.html'
def get_success_url(self) -> str:
return reverse('control:event.settings.payment', kwargs={
'organizer': self.get_object().organizer.slug,
'event': self.get_object().slug,
})
@cached_property
def object(self):
return self.request.event
def get_object(self, queryset=None):
return self.object
@cached_property
def provider(self):
provider = self.request.event.get_payment_providers().get(self.kwargs['provider'])
return provider
@cached_property
def form(self):
form = ProviderForm(
obj=self.request.event,
settingspref=self.provider.settings.get_prefix(),
data=(self.request.POST if self.request.method == 'POST' else None),
files=(self.request.FILES if self.request.method == 'POST' else None),
provider=self.provider
)
form.fields = OrderedDict(
[
('%s%s' % (self.provider.settings.get_prefix(), k), v)
for k, v in self.provider.settings_form_fields.items()
]
)
form.prepare_fields()
return form
def dispatch(self, request, *args, **kwargs):
if not self.provider:
messages.error(self.request, _('This payment provider does not exist or the respective plugin is '
'disabled.'))
return redirect(self.get_success_url())
return super().dispatch(request, *args, **kwargs)
@cached_property
def settings_content(self):
return self.provider.settings_content_render(self.request)
def get_context_data(self, *args, **kwargs) -> dict:
context = super().get_context_data(*args, **kwargs)
context['form'] = self.form
context['provider'] = self.provider
context['settings_content'] = self.settings_content
return context
@transaction.atomic
def post(self, request, *args, **kwargs):
if self.form.is_valid():
if self.form.has_changed():
self.request.event.log_action(
'pretix.event.payment.provider.' + self.provider.identifier, user=self.request.user, data={
k: self.form.cleaned_data.get(k) for k in self.form.changed_data
}
)
self.form.save()
messages.success(self.request, _('Your changes have been saved.'))
return redirect(self.get_success_url())
else:
messages.error(self.request, _('We could not save your changes. See below for details.'))
return self.get(request)
class EventSettingsFormView(EventPermissionRequiredMixin, DecoupleMixin, FormView):
model = Event
permission = 'can_change_event_settings'
def get_context_data(self, *args, **kwargs) -> dict:
context = super().get_context_data(*args, **kwargs)
return context
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['obj'] = self.request.event
return kwargs
def form_success(self):
pass
@transaction.atomic
def post(self, request, *args, **kwargs):
form = self.get_form()
if form.is_valid():
form.save()
self._save_decoupled(form)
if form.has_changed():
self.request.event.log_action(
'pretix.event.settings', user=self.request.user, data={
k: (form.cleaned_data.get(k).name
if isinstance(form.cleaned_data.get(k), File)
else form.cleaned_data.get(k))
for k in form.changed_data
}
)
self.form_success()
messages.success(self.request, _('Your changes have been saved.'))
return redirect(self.get_success_url())
else:
messages.error(self.request, _('We could not save your changes. See below for details.'))
return self.render_to_response(self.get_context_data(form=form))
class PaymentSettings(EventSettingsViewMixin, EventSettingsFormView):
template_name = 'pretixcontrol/event/payment.html'
form_class = PaymentSettingsForm
permission = 'can_change_event_settings'
def get_success_url(self) -> str:
return reverse('control:event.settings.payment', kwargs={
'organizer': self.request.organizer.slug,
'event': self.request.event.slug,
})
def get_context_data(self, *args, **kwargs) -> dict:
context = super().get_context_data(*args, **kwargs)
context['providers'] = sorted(
[p for p in self.request.event.get_payment_providers().values()
if not (p.is_implicit(self.request) if callable(p.is_implicit) else p.is_implicit) and
(p.settings_form_fields or p.settings_content_render(self.request))],
key=lambda s: s.verbose_name
)
sales_channels = get_all_sales_channels()
for p in context['providers']:
p.show_enabled = p.is_enabled
p.sales_channels = [sales_channels[channel] for channel in p.settings.get('_restrict_to_sales_channels', as_type=list, default=['web'])]
if p.is_meta:
p.show_enabled = p.settings._enabled in (True, 'True')
return context
class InvoiceSettings(EventSettingsViewMixin, EventSettingsFormView):
model = Event
form_class = InvoiceSettingsForm
template_name = 'pretixcontrol/event/invoicing.html'
permission = 'can_change_event_settings'
def get_success_url(self) -> str:
if 'preview' in self.request.POST:
return reverse('control:event.settings.invoice.preview', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug
})
return reverse('control:event.settings.invoice', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug
})
class CancelSettings(EventSettingsViewMixin, EventSettingsFormView):
model = Event
form_class = CancelSettingsForm
template_name = 'pretixcontrol/event/cancel.html'
permission = 'can_change_event_settings'
def get_success_url(self) -> str:
return reverse('control:event.settings.cancel', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug
})
def get_context_data(self, **kwargs):
ctx = super().get_context_data()
ctx['gets_notification'] = self.request.user.notifications_send and (
(
self.request.user.notification_settings.filter(
event=self.request.event,
action_type='pretix.event.order.refund.requested',
enabled=True
).exists()
) or (
self.request.user.notification_settings.filter(
event__isnull=True,
action_type='pretix.event.order.refund.requested',
enabled=True
).exists() and not
self.request.user.notification_settings.filter(
event=self.request.event,
action_type='pretix.event.order.refund.requested',
enabled=False
).exists()
)
)
return ctx
class InvoicePreview(EventPermissionRequiredMixin, View):
permission = 'can_change_event_settings'
def get(self, request, *args, **kwargs):
fname, ftype, fcontent = build_preview_invoice_pdf(request.event)
resp = HttpResponse(fcontent, content_type=ftype)
resp['Content-Disposition'] = 'attachment; filename="{}"'.format(fname)
return resp
class DangerZone(EventPermissionRequiredMixin, TemplateView):
permission = 'can_change_event_settings'
template_name = 'pretixcontrol/event/dangerzone.html'
class DisplaySettings(View):
def get(self, request, *wargs, **kwargs):
return redirect(reverse('control:event.settings', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug
}) + '#tab-0-3-open')
class MailSettings(EventSettingsViewMixin, EventSettingsFormView):
model = Event
form_class = MailSettingsForm
template_name = 'pretixcontrol/event/mail.html'
permission = 'can_change_event_settings'
def get_success_url(self) -> str:
return reverse('control:event.settings.mail', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug
})
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['renderers'] = self.request.event.get_html_mail_renderers()
return ctx
@transaction.atomic
def post(self, request, *args, **kwargs):
form = self.get_form()
if form.is_valid():
form.save()
if form.has_changed():
self.request.event.log_action(
'pretix.event.settings', user=self.request.user, data={
k: form.cleaned_data.get(k) for k in form.changed_data
}
)
if request.POST.get('test', '0').strip() == '1':
backend = self.request.event.get_mail_backend(force_custom=True, timeout=10)
try:
backend.test(self.request.event.settings.mail_from)
except Exception as e:
messages.warning(self.request, _('An error occurred while contacting the SMTP server: %s') % str(e))
else:
if form.cleaned_data.get('smtp_use_custom'):
messages.success(self.request, _('Your changes have been saved and the connection attempt to '
'your SMTP server was successful.'))
else:
messages.success(self.request, _('We\'ve been able to contact the SMTP server you configured. '
'Remember to check the "use custom SMTP server" checkbox, '
'otherwise your SMTP server will not be used.'))
else:
messages.success(self.request, _('Your changes have been saved.'))
return redirect(self.get_success_url())
else:
messages.error(self.request, _('We could not save your changes. See below for details.'))
return self.get(request)
class MailSettingsPreview(EventPermissionRequiredMixin, View):
permission = 'can_change_event_settings'
# return the origin text if key is missing in dict
class SafeDict(dict):
def __missing__(self, key):
return '{' + key + '}'
# create index-language mapping
@cached_property
def supported_locale(self):
locales = {}
for idx, val in enumerate(settings.LANGUAGES):
if val[0] in self.request.event.settings.locales:
locales[str(idx)] = val[0]
return locales
# get all supported placeholders with dummy values
def placeholders(self, item):
ctx = {}
for p in get_available_placeholders(self.request.event, MailSettingsForm.base_context[item]).values():
s = str(p.render_sample(self.request.event))
if s.strip().startswith('*'):
ctx[p.identifier] = s
else:
ctx[p.identifier] = '<span class="placeholder" title="{}">{}</span>'.format(
_('This value will be replaced based on dynamic parameters.'),
s
)
return self.SafeDict(ctx)
def post(self, request, *args, **kwargs):
preview_item = request.POST.get('item', '')
if preview_item not in MailSettingsForm.base_context:
return HttpResponseBadRequest(_('invalid item'))
regex = r"^" + re.escape(preview_item) + r"_(?P<idx>[\d+])$"
msgs = {}
for k, v in request.POST.items():
# only accept allowed fields
matched = re.search(regex, k)
if matched is not None:
idx = matched.group('idx')
if idx in self.supported_locale:
with language(self.supported_locale[idx], self.request.event.settings.region):
msgs[self.supported_locale[idx]] = markdown_compile_email(
v.format_map(self.placeholders(preview_item))
)
return JsonResponse({
'item': preview_item,
'msgs': msgs
})
class MailSettingsRendererPreview(MailSettingsPreview):
permission = 'can_change_event_settings'
def post(self, request, *args, **kwargs):
return HttpResponse(status=405)
# get all supported placeholders with dummy values
def placeholders(self, item):
ctx = {}
for p in get_available_placeholders(self.request.event, MailSettingsForm.base_context[item]).values():
ctx[p.identifier] = str(p.render_sample(self.request.event))
return ctx
def get(self, request, *args, **kwargs):
v = str(request.event.settings.mail_text_order_placed)
v = v.format_map(self.placeholders('mail_text_order_placed'))
renderers = request.event.get_html_mail_renderers()
if request.GET.get('renderer') in renderers:
with rolledback_transaction():
order = request.event.orders.create(status=Order.STATUS_PENDING, datetime=now(),
expires=now(), code="PREVIEW", total=119)
item = request.event.items.create(name=gettext("Sample product"), default_price=42.23,
description=gettext("Sample product description"))
order.positions.create(item=item, attendee_name_parts={'_legacy': gettext("John Doe")},
price=item.default_price, subevent=request.event.subevents.last())
v = renderers[request.GET.get('renderer')].render(
v,
str(request.event.settings.mail_text_signature),
gettext('Your order: %(code)s') % {'code': order.code},
order,
position=None
)
r = HttpResponse(v, content_type='text/html')
r._csp_ignore = True
return r
else:
raise Http404(_('Unknown e-mail renderer.'))
class TicketSettingsPreview(EventPermissionRequiredMixin, View):
permission = 'can_change_event_settings'
@cached_property
def output(self):
responses = register_ticket_outputs.send(self.request.event)
for receiver, response in responses:
provider = response(self.request.event)
if provider.identifier == self.kwargs.get('output'):
return provider
def get(self, request, *args, **kwargs):
if not self.output:
messages.error(request, _('You requested an invalid ticket output type.'))
return redirect(self.get_error_url())
fname, mimet, data = tickets.preview(self.request.event.pk, self.output.identifier)
resp = HttpResponse(data, content_type=mimet)
ftype = fname.split(".")[-1]
resp['Content-Disposition'] = 'attachment; filename="ticket-preview.{}"'.format(ftype)
return resp
def get_error_url(self) -> str:
return reverse('control:event.settings.tickets', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug
})
class TicketSettings(EventSettingsViewMixin, EventPermissionRequiredMixin, FormView):
model = Event
form_class = TicketSettingsForm
template_name = 'pretixcontrol/event/tickets.html'
permission = 'can_change_event_settings'
def get_context_data(self, *args, **kwargs) -> dict:
context = super().get_context_data(*args, **kwargs)
context['providers'] = self.provider_forms
context['any_enabled'] = False
responses = register_ticket_outputs.send(self.request.event)
for receiver, response in responses:
provider = response(self.request.event)
if provider.is_enabled:
context['any_enabled'] = True
break
return context
def get_success_url(self) -> str:
return reverse('control:event.settings.tickets', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug
})
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['obj'] = self.request.event
return kwargs
def get_form(self, form_class=None):
form = super().get_form(form_class)
form.prepare_fields()
return form
def form_invalid(self, form):
messages.error(self.request, _('We could not save your changes. See below for details.'))
return super().form_invalid(form)
@transaction.atomic
def post(self, request, *args, **kwargs):
success = True
for provider in self.provider_forms:
if provider.form.is_valid():
provider.form.save()
if provider.form.has_changed():
self.request.event.log_action(
'pretix.event.tickets.provider.' + provider.identifier, user=self.request.user, data={
k: (provider.form.cleaned_data.get(k).name
if isinstance(provider.form.cleaned_data.get(k), File)
else provider.form.cleaned_data.get(k))
for k in provider.form.changed_data
}
)
tickets.invalidate_cache.apply_async(kwargs={'event': self.request.event.pk, 'provider': provider.identifier})
else:
success = False
form = self.get_form(self.get_form_class())
if success and form.is_valid():
form.save()
if form.has_changed():
self.request.event.log_action(
'pretix.event.tickets.settings', user=self.request.user, data={
k: form.cleaned_data.get(k) for k in form.changed_data
}
)
messages.success(self.request, _('Your changes have been saved.'))
return redirect(self.get_success_url())
else:
return self.form_invalid(form)
@cached_property
def provider_forms(self) -> list:
providers = []
responses = register_ticket_outputs.send(self.request.event)
for receiver, response in responses:
provider = response(self.request.event)
provider.form = ProviderForm(
obj=self.request.event,
settingspref='ticketoutput_%s_' % provider.identifier,
data=(self.request.POST if self.request.method == 'POST' else None),
files=(self.request.FILES if self.request.method == 'POST' else None)
)
provider.form.fields = OrderedDict(
[
('ticketoutput_%s_%s' % (provider.identifier, k), v)
for k, v in provider.settings_form_fields.items()
]
)
provider.settings_content = provider.settings_content_render(self.request)
provider.form.prepare_fields()
provider.evaluated_preview_allowed = True
if not provider.preview_allowed:
provider.evaluated_preview_allowed = False
else:
for k, v in provider.settings_form_fields.items():
if v.required and not self.request.event.settings.get('ticketoutput_%s_%s' % (provider.identifier, k)):
provider.evaluated_preview_allowed = False
break
providers.append(provider)
return providers
class EventPermissions(EventSettingsViewMixin, EventPermissionRequiredMixin, TemplateView):
template_name = 'pretixcontrol/event/permissions.html'
class EventLive(EventPermissionRequiredMixin, TemplateView):
permission = 'can_change_event_settings'
template_name = 'pretixcontrol/event/live.html'
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['issues'] = self.request.event.live_issues
ctx['actual_orders'] = self.request.event.orders.filter(testmode=False).exists()
return ctx
def post(self, request, *args, **kwargs):
if request.POST.get("live") == "true" and not self.request.event.live_issues:
with transaction.atomic():
request.event.live = True
request.event.save()
self.request.event.log_action(
'pretix.event.live.activated', user=self.request.user, data={}
)
messages.success(self.request, _('Your shop is live now!'))
elif request.POST.get("live") == "false":
with transaction.atomic():
request.event.live = False
request.event.save()
self.request.event.log_action(
'pretix.event.live.deactivated', user=self.request.user, data={}
)
messages.success(self.request, _('We\'ve taken your shop down. You can re-enable it whenever you want!'))
elif request.POST.get("testmode") == "true":
with transaction.atomic():
request.event.testmode = True
request.event.save()
self.request.event.log_action(
'pretix.event.testmode.activated', user=self.request.user, data={}
)
messages.success(self.request, _('Your shop is now in test mode!'))
elif request.POST.get("testmode") == "false":
with transaction.atomic():
request.event.testmode = False
request.event.save()
self.request.event.log_action(
'pretix.event.testmode.deactivated', user=self.request.user, data={
'delete': (request.POST.get("delete") == "yes")
}
)
request.event.cache.delete('complain_testmode_orders')
if request.POST.get("delete") == "yes":
try:
with transaction.atomic():
for order in request.event.orders.filter(testmode=True):
order.gracefully_delete(user=self.request.user)
except ProtectedError:
messages.error(self.request, _('An order could not be deleted as some constraints (e.g. data '
'created by plug-ins) do not allow it.'))
else:
request.event.cache.set('complain_testmode_orders', False, 30)
request.event.cartposition_set.filter(addon_to__isnull=False).delete()
request.event.cartposition_set.all().delete()
messages.success(self.request, _('We\'ve disabled test mode for you. Let\'s sell some real tickets!'))
return redirect(self.get_success_url())
def get_success_url(self) -> str:
return reverse('control:event.live', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug
})
class EventDelete(RecentAuthenticationRequiredMixin, EventPermissionRequiredMixin, FormView):
permission = 'can_change_event_settings'
template_name = 'pretixcontrol/event/delete.html'
form_class = EventDeleteForm
def post(self, request, *args, **kwargs):
if not self.request.event.allow_delete():
messages.error(self.request, _('This event can not be deleted.'))
return self.get(self.request, *self.args, **self.kwargs)
return super().post(request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['event'] = self.request.event
return kwargs
def form_valid(self, form):
try:
with transaction.atomic():
self.request.organizer.log_action(
'pretix.event.deleted', user=self.request.user,
data={
'event_id': self.request.event.pk,
'name': str(self.request.event.name),
'slug': self.request.event.slug,
'logentries': list(self.request.event.logentry_set.values_list('pk', flat=True))
}
)
self.request.event.delete_sub_objects()
self.request.event.delete()
messages.success(self.request, _('The event has been deleted.'))
return redirect(self.get_success_url())
except ProtectedError:
messages.error(self.request, _('The event could not be deleted as some constraints (e.g. data created by '
'plug-ins) do not allow it.'))
return self.get(self.request, *self.args, **self.kwargs)
def get_success_url(self) -> str:
return reverse('control:index')
class EventLog(EventPermissionRequiredMixin, PaginationMixin, ListView):
template_name = 'pretixcontrol/event/logs.html'
model = LogEntry
context_object_name = 'logs'
def get_queryset(self):
qs = self.request.event.logentry_set.all().select_related(
'user', 'content_type', 'api_token', 'oauth_application', 'device'
).order_by('-datetime')
qs = qs.exclude(action_type__in=OVERVIEW_BANLIST)
if not self.request.user.has_event_permission(self.request.organizer, self.request.event, 'can_view_orders',
request=self.request):
qs = qs.exclude(content_type=ContentType.objects.get_for_model(Order))
if not self.request.user.has_event_permission(self.request.organizer, self.request.event, 'can_view_vouchers',
request=self.request):
qs = qs.exclude(content_type=ContentType.objects.get_for_model(Voucher))
if not self.request.user.has_event_permission(self.request.organizer, self.request.event,
'can_change_event_settings', request=self.request):
allowed_types = [
ContentType.objects.get_for_model(Voucher),
ContentType.objects.get_for_model(Order)
]
if self.request.user.has_event_permission(self.request.organizer, self.request.event,
'can_change_items', request=self.request):
allowed_types += [
ContentType.objects.get_for_model(Item),
ContentType.objects.get_for_model(ItemCategory),
ContentType.objects.get_for_model(Quota),
ContentType.objects.get_for_model(Question),
]
qs = qs.filter(content_type__in=allowed_types)
if self.request.GET.get('user') == 'yes':
qs = qs.filter(user__isnull=False)
elif self.request.GET.get('user') == 'no':
qs = qs.filter(user__isnull=True)
elif self.request.GET.get('user', '').startswith('d-'):
qs = qs.filter(device_id=self.request.GET.get('user')[2:])
elif self.request.GET.get('user'):
qs = qs.filter(user_id=self.request.GET.get('user'))
if self.request.GET.get('content_type'):
qs = qs.filter(content_type=get_object_or_404(ContentType, pk=self.request.GET.get('content_type')))
if self.request.GET.get('object'):
qs = qs.filter(object_id=self.request.GET.get('object'))
return qs
def get_context_data(self, **kwargs):
ctx = super().get_context_data()
ctx['userlist'] = self.request.event.logentry_set.order_by().distinct().values('user__id', 'user__email')
ctx['devicelist'] = self.request.event.logentry_set.order_by('device__name').distinct().values('device__id', 'device__name')
return ctx
class EventComment(EventPermissionRequiredMixin, View):
permission = 'can_change_event_settings'
def post(self, *args, **kwargs):
form = CommentForm(self.request.POST)
if form.is_valid():
self.request.event.comment = form.cleaned_data.get('comment')
self.request.event.save()
self.request.event.log_action('pretix.event.comment', user=self.request.user, data={
'new_comment': form.cleaned_data.get('comment')
})
messages.success(self.request, _('The comment has been updated.'))
else:
messages.error(self.request, _('Could not update the comment.'))
return redirect(self.get_success_url())
def get(self, *args, **kwargs):
return HttpResponseNotAllowed(['POST'])
def get_success_url(self) -> str:
return reverse('control:event.index', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug
})
class TaxList(EventSettingsViewMixin, EventPermissionRequiredMixin, PaginationMixin, ListView):
model = TaxRule
context_object_name = 'taxrules'
template_name = 'pretixcontrol/event/tax_index.html'
permission = 'can_change_event_settings'
def get_queryset(self):
return self.request.event.tax_rules.all()
class TaxCreate(EventSettingsViewMixin, EventPermissionRequiredMixin, CreateView):
model = TaxRule
form_class = TaxRuleForm
template_name = 'pretixcontrol/event/tax_edit.html'
permission = 'can_change_event_settings'
context_object_name = 'taxrule'
def get_success_url(self) -> str:
return reverse('control:event.settings.tax', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug,
})
def get_initial(self):
return {
'name': LazyI18nString.from_gettext(gettext('VAT'))
}
def post(self, request, *args, **kwargs):
self.object = None
form = self.get_form()
if form.is_valid() and self.formset.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
@cached_property
def formset(self):
return TaxRuleLineFormSet(
data=self.request.POST if self.request.method == "POST" else None,
event=self.request.event,
)
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['formset'] = self.formset
return ctx
@transaction.atomic
def form_valid(self, form):
form.instance.event = self.request.event
form.instance.custom_rules = json.dumps([
f.cleaned_data for f in self.formset.ordered_forms if f not in self.formset.deleted_forms
], cls=I18nJSONEncoder)
messages.success(self.request, _('The new tax rule has been created.'))
ret = super().form_valid(form)
form.instance.log_action('pretix.event.taxrule.added', user=self.request.user, data=dict(form.cleaned_data))
return ret
def form_invalid(self, form):
messages.error(self.request, _('We could not save your changes. See below for details.'))
return super().form_invalid(form)
class TaxUpdate(EventSettingsViewMixin, EventPermissionRequiredMixin, UpdateView):
model = TaxRule
form_class = TaxRuleForm
template_name = 'pretixcontrol/event/tax_edit.html'
permission = 'can_change_event_settings'
context_object_name = 'rule'
def get_object(self, queryset=None) -> TaxRule:
try:
return self.request.event.tax_rules.get(
id=self.kwargs['rule']
)
except TaxRule.DoesNotExist:
raise Http404(_("The requested tax rule does not exist."))
def post(self, request, *args, **kwargs):
self.object = self.get_object(self.get_queryset())
form = self.get_form()
if form.is_valid() and self.formset.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
@cached_property
def formset(self):
return TaxRuleLineFormSet(
data=self.request.POST if self.request.method == "POST" else None,
event=self.request.event,
initial=json.loads(self.object.custom_rules) if self.object.custom_rules else []
)
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['formset'] = self.formset
return ctx
@transaction.atomic
def form_valid(self, form):
messages.success(self.request, _('Your changes have been saved.'))
form.instance.custom_rules = json.dumps([
f.cleaned_data for f in self.formset.ordered_forms if f not in self.formset.deleted_forms
], cls=I18nJSONEncoder)
if form.has_changed():
self.object.log_action(
'pretix.event.taxrule.changed', user=self.request.user, data={
k: form.cleaned_data.get(k) for k in form.changed_data
}
)
return super().form_valid(form)
def get_success_url(self) -> str:
return reverse('control:event.settings.tax', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug,
})
def form_invalid(self, form):
messages.error(self.request, _('We could not save your changes. See below for details.'))
return super().form_invalid(form)
class TaxDelete(EventSettingsViewMixin, EventPermissionRequiredMixin, DeleteView):
model = TaxRule
template_name = 'pretixcontrol/event/tax_delete.html'
permission = 'can_change_event_settings'
context_object_name = 'taxrule'
def get_object(self, queryset=None) -> TaxRule:
try:
return self.request.event.tax_rules.get(
id=self.kwargs['rule']
)
except TaxRule.DoesNotExist:
raise Http404(_("The requested tax rule does not exist."))
@transaction.atomic
def delete(self, request, *args, **kwargs):
self.object = self.get_object()
success_url = self.get_success_url()
if self.object.allow_delete():
self.object.log_action(action='pretix.event.taxrule.deleted', user=request.user)
self.object.delete()
messages.success(self.request, _('The selected tax rule has been deleted.'))
else:
messages.error(self.request, _('The selected tax rule can not be deleted.'))
return redirect(success_url)
def get_success_url(self) -> str:
return reverse('control:event.settings.tax', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug,
})
def get_context_data(self, *args, **kwargs) -> dict:
context = super().get_context_data(*args, **kwargs)
context['possible'] = self.object.allow_delete()
return context
class WidgetSettings(EventSettingsViewMixin, EventPermissionRequiredMixin, FormView):
template_name = 'pretixcontrol/event/widget.html'
permission = 'can_change_event_settings'
form_class = WidgetCodeForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['event'] = self.request.event
return kwargs
def form_valid(self, form):
ctx = self.get_context_data()
ctx['form'] = form
ctx['valid'] = True
return self.render_to_response(ctx)
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx['urlprefix'] = settings.SITE_URL
domain = get_event_domain(self.request.event, fallback=True)
if domain:
siteurlsplit = urlsplit(settings.SITE_URL)
if siteurlsplit.port and siteurlsplit.port not in (80, 443):
domain = '%s:%d' % (domain, siteurlsplit.port)
ctx['urlprefix'] = '%s://%s' % (siteurlsplit.scheme, domain)
return ctx
class QuickSetupView(FormView):
template_name = 'pretixcontrol/event/quick_setup.html'
permission = 'can_change_event_settings'
form_class = QuickSetupForm
def dispatch(self, request, *args, **kwargs):
if request.event.items.exists() or request.event.quotas.exists():
messages.info(request, _('Your event is not empty, you need to set it up manually.'))
return redirect(reverse('control:event.index', kwargs={
'organizer': request.event.organizer.slug,
'event': request.event.slug
}))
return super().dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['event'] = self.request.event
return kwargs
def get_context_data(self, **kwargs):
ctx = super().get_context_data()
ctx['formset'] = self.formset
return ctx
def get_initial(self):
return {
'waiting_list_enabled': True,
'ticket_download': True,
'contact_mail': self.request.event.settings.contact_mail,
'imprint_url': self.request.event.settings.imprint_url,
}
def post(self, request, *args, **kwargs):
form = self.get_form()
if form.is_valid() and self.formset.is_valid():
return self.form_valid(form)
else:
messages.error(self.request, _('We could not save your changes. See below for details.'))
return self.form_invalid(form)
@transaction.atomic
def form_valid(self, form):
plugins_active = self.request.event.get_plugins()
if form.cleaned_data['ticket_download']:
if 'pretix.plugins.ticketoutputpdf' not in plugins_active:
self.request.event.log_action('pretix.event.plugins.enabled', user=self.request.user,
data={'plugin': 'pretix.plugins.ticketoutputpdf'})
plugins_active.append('pretix.plugins.ticketoutputpdf')
self.request.event.settings.ticket_download = True
self.request.event.settings.ticketoutput_pdf__enabled = True
try:
import pretix_passbook # noqa
except ImportError:
pass
else:
if 'pretix_passbook' not in plugins_active:
self.request.event.log_action('pretix.event.plugins.enabled', user=self.request.user,
data={'plugin': 'pretix_passbook'})
plugins_active.append('pretix_passbook')
self.request.event.settings.ticketoutput_passbook__enabled = True
if form.cleaned_data['payment_banktransfer__enabled']:
if 'pretix.plugins.banktransfer' not in plugins_active:
self.request.event.log_action('pretix.event.plugins.enabled', user=self.request.user,
data={'plugin': 'pretix.plugins.banktransfer'})
plugins_active.append('pretix.plugins.banktransfer')
self.request.event.settings.payment_banktransfer__enabled = True
for f in ('bank_details', 'bank_details_type', 'bank_details_sepa_name', 'bank_details_sepa_iban',
'bank_details_sepa_bic', 'bank_details_sepa_bank'):
self.request.event.settings.set(
'payment_banktransfer_%s' % f,
form.cleaned_data['payment_banktransfer_%s' % f]
)
if form.cleaned_data.get('payment_stripe__enabled', None):
if 'pretix.plugins.stripe' not in plugins_active:
self.request.event.log_action('pretix.event.plugins.enabled', user=self.request.user,
data={'plugin': 'pretix.plugins.stripe'})
plugins_active.append('pretix.plugins.stripe')
self.request.event.settings.show_quota_left = form.cleaned_data['show_quota_left']
self.request.event.settings.waiting_list_enabled = form.cleaned_data['waiting_list_enabled']
self.request.event.settings.attendee_names_required = form.cleaned_data['attendee_names_required']
self.request.event.settings.contact_mail = form.cleaned_data['contact_mail']
self.request.event.settings.imprint_url = form.cleaned_data['imprint_url']
self.request.event.log_action('pretix.event.settings', user=self.request.user, data={
k: self.request.event.settings.get(k) for k in form.changed_data
})
items = []
category = None
tax_rule = self.request.event.tax_rules.first()
if any(f not in self.formset.deleted_forms for f in self.formset):
category = self.request.event.categories.create(
name=LazyI18nString.from_gettext(gettext('Tickets'))
)
category.log_action('pretix.event.category.added', data={'name': gettext('Tickets')},
user=self.request.user)
subevent = self.request.event.subevents.first()
for i, f in enumerate(self.formset):
if f in self.formset.deleted_forms or not f.has_changed():
continue
item = self.request.event.items.create(
name=f.cleaned_data['name'],
category=category,
active=True,
default_price=f.cleaned_data['default_price'] or 0,
tax_rule=tax_rule,
admission=True,
position=i,
sales_channels=list(get_all_sales_channels().keys())
)
item.log_action('pretix.event.item.added', user=self.request.user, data=dict(f.cleaned_data))
if f.cleaned_data['quota'] or not form.cleaned_data['total_quota']:
quota = self.request.event.quotas.create(
name=str(f.cleaned_data['name']),
subevent=subevent,
size=f.cleaned_data['quota'],
)
quota.log_action('pretix.event.quota.added', user=self.request.user, data=dict(f.cleaned_data))
quota.items.add(item)
items.append(item)
if form.cleaned_data['total_quota']:
quota = self.request.event.quotas.create(
name=gettext('Tickets'),
size=form.cleaned_data['total_quota'],
subevent=subevent,
)
quota.log_action('pretix.event.quota.added', user=self.request.user, data={
'name': gettext('Tickets'),
'size': quota.size
})
quota.items.add(*items)
self.request.event.set_active_plugins(plugins_active, allow_restricted=True)
self.request.event.save()
messages.success(self.request, _('Your changes have been saved. You can now go on with looking at the details '
'or take your event live to start selling!'))
if form.cleaned_data.get('payment_stripe__enabled', False):
self.request.session['payment_stripe_oauth_enable'] = True
return redirect(StripeSettingsHolder(self.request.event).get_connect_url(self.request))
return redirect(reverse('control:event.index', kwargs={
'organizer': self.request.event.organizer.slug,
'event': self.request.event.slug,
}))
@cached_property
def formset(self):
return QuickSetupProductFormSet(
data=self.request.POST if self.request.method == "POST" else None,
event=self.request.event,
initial=[
{
'name': LazyI18nString.from_gettext(gettext('Regular ticket')),
'default_price': Decimal('35.00'),
'quota': 100,
},
{
'name': LazyI18nString.from_gettext(gettext('Reduced ticket')),
'default_price': Decimal('29.00'),
'quota': 50,
},
] if self.request.method != "POST" else []
)
|
var today = new Date();
var actual = String(today.getFullYear()+'-'+("0" + (today.getMonth() + 1)).slice(-2)+'-'+today.getDate());
$(".form_datetime").datetimepicker({
format: 'yyyy-mm-dd hh:ii',
autoclose: true,
startDate: actual,
language: 'es'
});
|
import argparse
import asyncio
import functools
import os
import signal
import sys
import yaml
import logging
from monitoring.metrics import Metrics
logging.basicConfig(level=logging.DEBUG, format='%(levelname)-8s [%(filename)s:%(lineno)d] %(message)s')
# logger for this file
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler('/tmp/robogen.log')
handler.setLevel(logging.ERROR)
formatter = logging.Formatter('%(levelname)-8s-[%(filename)s:%(lineno)d]-%(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
is_sighup_received = False
metric_monitor = None
def _graceful_shutdown():
global metric_monitor
if metric_monitor is not None:
del metric_monitor
def parse_arguments():
"""Arguments to run the script"""
parser = argparse.ArgumentParser(description='Robotic Arm Motion Generator')
parser.add_argument('--config', '-c', required=True, help='YAML Configuration File for RobotMotionGen with path')
return parser.parse_args()
def sighup_handler(name):
"""SIGHUP HANDLER"""
# logger.debug(f'signal_handler {name}')
logger.info('Updating the Metric Configuration')
global is_sighup_received
is_sighup_received = True
def read_config(yaml_config_file, root_key):
"""Parse the given Configuration File"""
if os.path.exists(yaml_config_file):
with open(yaml_config_file, 'r') as config_file:
yaml_as_dict = yaml.load(config_file, Loader=yaml.FullLoader)
return yaml_as_dict[root_key]
else:
logger.error('YAML Configuration File not Found.')
raise FileNotFoundError
async def app(eventloop, config):
"""Main application for Robot Generator"""
global metric_monitor
global is_sighup_received
while True:
# Read configuration
try:
metric_monitor_config = read_config(config, "monitoring")
except Exception as e:
logger.error('Error while reading configuration:')
logger.error(e)
break
metric_monitor = Metrics(config=metric_monitor_config)
# continuously monitor signal handle and update robot motion
while not is_sighup_received:
print(f'{await metric_monitor.measure()}')
# If SIGHUP Occurs, Delete the instances
_graceful_shutdown()
# reset sighup handler flag
is_sighup_received = False
def app_main():
"""Initialization"""
args = parse_arguments()
if not os.path.isfile(args.config):
logging.error("configuration file not readable. Check path to configuration file")
sys.exit(-1)
event_loop = asyncio.get_event_loop()
event_loop.add_signal_handler(signal.SIGHUP, functools.partial(sighup_handler, name='SIGHUP'))
try:
event_loop.run_until_complete(app(event_loop, args.config))
except KeyboardInterrupt:
logger.error('CTRL+C Pressed')
_graceful_shutdown()
|
import {
PRODUCT_ROUTES,
PRODUCT_ROUTES_SUCCESS,
PRODUCT_ROUTES_ERROR,
} from './constants'
export const getProductBestRoutes = () => {
return (dispatch, getState) => {
let { wishlist, user } = getState();
let items = (wishlist.data || []).map(item => item.text);
let data = {
items,
distancelimit: user.prefs.distancelimit,
timeSavingRatio: user.prefs.timeSavingRatio/100,
lat: user.location.lat,
long: user.location.long,
// "lat": 43.657648,
// "long": -79.381728,
}
// "lat": 43.657648,
// "long": -79.381728,
// "items": ["Apple"],
// "distancelimit": 10,
// "timeSavingRatio": 1
dispatch({
type: PRODUCT_ROUTES
});
fetch('https://cumulus-207900.appspot.com/external/routes', {
method: 'POST',
headers: {
'Accept': '*/*',
'Content-Type': 'application/json'
},
body: JSON.stringify(data)
}).then(res => res.json())
.then(res => {
console.info('found results', res);
let mockdata = {
"data": [
{
"route": {
"points": "s{miGheocNe@Nd@jDJ~@Jv@d@`EN`A@DpFcBfAYjD|WFl@@d@DhBz@xGB`@Cf@?XTbBnCpTfCnSPbAdAnIdBrNd@fDR~A?h@G\\Sn@qAvDOf@C\\bAvI`Fva@vHlo@D`ACrCG|FObQ[~^CdBUHw@TuBj@oGfBoHrB}DfAYHLjAGZCNZ~Bb@xE@VCVGFOL]JqCv@pCw@\\KNMFGBWKaBs@eG?MFY@MMkAlCu@~C{@bBe@y@gHMsA?m@Hs@@c@AUkCwSoBmOmEu]GeAEmB@qBNuB\\iEHmAAcAuG_j@}ByRmBsPoAaKCe@?oA_AiIgBwOsAkKeAiIgAiIyBmPm@eFOwAA_@qAgKW{BrDkAtBq@f@StBu@tGwB"
},
"distance": 10424,
"duration": 2506,
"total": 10.55,
"items": [
{
"product_id": 1000000000,
"product_type": "Apple",
"price": 10.55,
"store_id": 1000,
"city": "Toronto",
"state": "Ontario",
"lat": 43.655389,
"long": -79.435621,
"distance_in_km": 4.337028730345155,
"option": 1
}
]
},
{
"route": {
"points": "s{miGheocN``@}LhSqGrKgD~@WpBm@JBZGlFgBbBi@\\K~@vBzG~OFPPf@Dn@Nx@Xx@fBxEzAxD~@jCtAlFP\\Jn@b@xCh@tDz@vH^zCb@pEz@xI`AvK|AhPXpEDjB?xBKlFMhGGpDBxBJtDhArWJtE@jE?jGDvBLrCRpBVrBfFdYtEfW|@~EzC|PrExVXlBPjBLhBFpCA~BK|BKrASlB[jB_AvDuAlEuAfEoP~g@aAlDiA`F_AhFg@`Dg@tDc@`EYhD_@lFY|GSjJAjFHfGLvDRxCl@|Gf@lEh@tDfA|FvAbG`BzFzB|GtBnG@PHf@vAnExA`EHp@?^Gf@Sp@}@pB[x@aBpDe@|@q@z@{@|@Y\\iBtCe@d@}@j@WLc@NsC|@eBl@eAn@y@r@}@hAeM`PuAdB{@dAw@l@q@^u@Vo@RUNmBnAkBpAaCvBaBpAuDnCs@h@OTs@fAc@|@W^KO_CkCGGYk@Sm@eXjI}^`Lwe@xN{CbAyIbC]{C{AqKqCoSaAaH^?FAJEh@e@FCVGWFGBQLWVKDg@@jGdd@p@jFNlAnF}AhBe@r@W~GwBxg@uO`^sK|KkDhDeAFTZt@PT~BjCJNV_@b@}@NUr@gAxAeAzDuCjBcBpBaBbCaBdAq@tAc@j@Wn@c@x@w@rKgNhEoFt@{@|@q@pAk@rC}@~Ai@n@]p@g@XY\\g@z@wAZc@`BgBh@}@fA_Cn@sAj@cAn@w@jBiBTyABsACi@Om@Wg@yAcByAeBWa@KGECu@sB}@iCeBmFoAcE_AeDkAcFeAsFm@yDi@oEm@uGWwEMuDG_E?eFPkJb@sJX_E`@mEj@cFVmB^_Cz@}ExAwG~AqFbU_s@p@uCb@mCRoBLmBFqCAkBGoCOmBu@cFIk@Km@{Fa[gEmU]qBsGa^y@aFg@_DMuASaDK{E?iHOqIM}DaAeSG}EDeFRiN?iBCqAGaBQaCe@}EyAwO{AmPq@gHC{@[}DMqA_@iESeC[_Cg@wCYaACK_AyDa@kDWuC}EyRSSS}@]yAWaA[q@a@i@MKi@Ye@Ma@K]JcCv@eH|BqAh@UNqBl@_AVmWfIeSnGkT~G"
},
"distance": 29550,
"duration": 3060,
"total": 8.55,
"items": [
{
"product_id": 1000000000,
"product_type": "Apple",
"price": 8.55,
"store_id": 1001,
"city": "Toronto",
"state": "Ontario",
"lat": 43.668054,
"long": -79.484581,
"distance_in_km": 8.3427593344916,
"option": 2
}
]
}
]
}
dispatch({
type: PRODUCT_ROUTES_SUCCESS,
payload: mockdata || {data:[]}
});
})
.catch(function (error) {
dispatch({
type: PRODUCT_ROUTES_ERROR,
payload: { error }
});
});
};
};
|
# coding: utf-8
# TODO - split the asserts between algebraic and weak formulations ones
# - add assert for grad in vector case
# TODO: - __call__ examples are not working anymore
import pytest
from sympy import Symbol
from sympy.core.containers import Tuple
from sympy import symbols
from sympy import IndexedBase
from sympy import Matrix
from sympy import Function
from sympy import pi, cos, sin
from sympy import srepr
from sympy.physics.quantum import TensorProduct
from sympde.core import Constant
from sympde.calculus import grad, dot, inner, cross, rot, curl, div
from sympde.calculus import laplace, hessian
from sympde.topology import (dx, dy, dz)
from sympde.topology import FunctionSpace, VectorFunctionSpace
from sympde.topology import Field, VectorField
from sympde.topology import ProductSpace
from sympde.topology import TestFunction
from sympde.topology import VectorTestFunction
from sympde.topology import Unknown
from sympde.topology import Domain, Boundary, NormalVector, TangentVector
from sympde.topology import Trace, trace_0, trace_1
from sympde.expr import BilinearForm, LinearForm, Integral
from sympde.expr import atomize
from sympde.expr import evaluate
from sympde.expr import tensorize
from sympde.expr import Mass, Stiffness, Advection, AdvectionT
from sympde.expr import Projection
from sympde.expr import Norm
from sympde.expr import FormCall
from sympde.expr.errors import UnconsistentError
from sympde.expr.errors import UnconsistentLhsError
from sympde.expr.errors import UnconsistentRhsError
from sympde.expr.errors import UnconsistentBCError
DIM = 3
domain = Domain('Omega', dim=DIM)
#==============================================================================
def test_tensorize_3d():
V = FunctionSpace('V', domain)
U = FunctionSpace('U', domain)
W1 = VectorFunctionSpace('W1', domain)
T1 = VectorFunctionSpace('T1', domain)
v = TestFunction(V, name='v')
u = TestFunction(U, name='u')
w1 = VectorTestFunction(W1, name='w1')
t1 = VectorTestFunction(T1, name='t1')
x,y,z = domain.coordinates
alpha = Constant('alpha')
# ...
expr = dot(grad(v), grad(u))
a = BilinearForm((v,u), expr, name='a')
print(a)
print(tensorize(a))
print('')
# ...
# ...
expr = x*dx(v)*dx(u) + y*dy(v)*dy(u)
a = BilinearForm((v,u), expr, name='a')
print(a)
print(tensorize(a))
print('')
# ...
# ...
expr = sin(x)*dx(v)*dx(u)
a = BilinearForm((v,u), expr, name='a')
print(a)
print(tensorize(a))
print('')
# ...
# ...
expr = dot(curl(w1), curl(t1)) + div(w1)*div(t1)
a = BilinearForm((w1, t1), expr, name='a')
print(a)
print(tensorize(a))
print('')
# ...
#==============================================================================
# CLEAN UP SYMPY NAMESPACE
#==============================================================================
def teardown_module():
from sympy import cache
cache.clear_cache()
def teardown_function():
from sympy import cache
cache.clear_cache()
|
import React, { Component } from 'react';
import { Link } from 'react-router-dom';
import PropTypes from 'prop-types';
import { withStyles } from 'material-ui/styles';
import AppBar from 'material-ui/AppBar';
import Toolbar from 'material-ui/Toolbar';
import Button from 'material-ui/Button';
import List from 'material-ui/List';
import Typography from 'material-ui/Typography';
import TextField from 'material-ui/TextField';
import Paper from 'material-ui/Paper';
import SimpleSelect from '../common/SimpleSelect'
import AddIcon from 'material-ui-icons/Add';
import PerformanceForm from '../form/PerformanceForm'
import Grid from 'material-ui/Grid';
import ButtonBar from '../common/ButtonBar'
const styles = theme => ({
container: {
display: 'flex',
flexWrap: 'wrap',
width: '100%',
},
flex: {
flex: 1,
},
list:{
paddingLeft: theme.spacing.unit * 2,
minWidth: 300,
maxWidth: 500
},
textField: {
margin: theme.spacing.unit,
paddingBottom: 2,
flexWrap: 'wrap',
width: '100%',
},
});
class PerformanceInfo extends Component {
constructor(props) {
super(props)
this.state = {
newRecord : false,
editRecord: false,
index : 0,
record: this.props.performance[0]
}
}
handleChange = target => {
var performance = {...this.state.record, [target.id]:target.value}
this.setState({record : performance})
};
recordSelect(selected){
const {performance} = this.props
var index = -1
for (var i=0; i < performance.length; i++) {
if (performance[i].date === selected.value) {
index = i;
}
}
this.setState({
index,
record: performance[index]
});
}
editPerformance = () => {this.setState({editRecord: true})}
newPerformance(){
// TODO: Add day here
const lastIndex = this.props.performance.length - 1;
const lastPerform = this.props.performance[lastIndex];
var regex = /\s*\/\s*/;
var values = lastPerform.date.split(regex);
var year = values[1];
var month = values[0];
month = parseInt(month) + 1;
if (month < 10) {
month = "0"+ month.toString();
} else if (month > 12) {
month = "01";
year = (parseInt(year) + 1).toString();
}else {
month = month.toString();
}
var nextDate = month + "/" + year
var nextPerform = {...lastPerform, date: nextDate}
nextPerform.beginBal = nextPerform.endBal;
nextPerform.netReturn = ' ';
nextPerform.endBal = ' ';
this.setState({
record: nextPerform,
newRecord: true})
}
save(){
// Update account details or add new performance record
const {record, index, newRecord} = this.state;
var {performance} = this.props;
if (newRecord) {
performance.push(record);
}else {
performance[index] = record;
}
this.props.handleChange(performance);
this.cancel();
}
cancel(){
this.setState({
newRecord: false,
editRecord: false
})
}
// TODO: Update to reset scroll index to most recent
componentWillReceiveProps(nextProps){ this.setState({index: 0});}
renderBtns(){
const { index, record } = this.state;
const { classes } = this.props;
const checkFields = (obj) => Object.values(obj).every(x => x!== ' ');
if (checkFields(record)) {
return <ButtonBar
leftOnClick={this.cancel.bind(this)}
rightOnClick={this.save.bind(this)}
rightDisable={false}/>;
} else {
return <ButtonBar leftOnClick={this.cancel.bind(this)}/>;
}
}
render(){
const { newRecord, editRecord, index, record } = this.state;
const {classes, performance } = this.props;
const select = this.recordSelect.bind(this);
const edit = this.editPerformance.bind(this);
const dates = performance.map((record) => {return record.date});
const handleChange = this.handleChange.bind(this);
if (newRecord) {
return (
<div>
<PerformanceForm
handleChange={handleChange}
newRecord={true}
pastPerformance={record}/>
{this.renderBtns()}
</div>
)
}
else if (editRecord) {
return(
<div>
<PerformanceForm
handleChange={handleChange}
newRecord={false}
pastPerformance={performance[this.state.index]}/>
{this.renderBtns()}
</div>)
}
else {
return(
<div className={classes.root}>
<Paper className={classes.container} elevation={6}>
<AppBar className={classes.container} position="static" color="primary" >
<Toolbar>
<Typography variant="title" color="inherit" className={classes.flex}>
Performance History
</Typography>
<Button color="inherit"
onClick={edit}>
Edit
</Button>
<Button color="default" variant="fab"
onClick={this.newPerformance.bind(this)}>
<AddIcon size={"small"}/>
</Button>
</Toolbar>
</AppBar>
<List component="nav" className={classes.list}>
<SimpleSelect label="Date"
id="date"
value={record.date}
menu={dates}
handleChange={select}/>
<TextField label="Tax" value={performance[index].tax} id={'tax'} disabled className={classes.textField}/>
<TextField label="Horizon" value={performance[index].horizon} id={'horizon'}disabled className={classes.textField}/>
<TextField label="Bias" value={performance[index].bias} id={'bias'} disabled className={classes.textField}/>
<TextField label="Begin Balance" value={"$ "+performance[index].beginBal} id={'beginBal'} startadornment={"$"} disabled className={classes.textField}/>
<TextField label="End Balance" value={"$ "+performance[index].endBal} id={'endBal'} startadornment={"$"} disabled className={classes.textField}/>
<TextField label="Net Return" value={performance[index].netReturn+" %"} id={'netReturn'} startadornment={"%"} disabled className={classes.textField}/>
</List>
</Paper>
</div>)
}
}
}
PerformanceInfo.propTypes = {
classes: PropTypes.object.isRequired,
handleChange: PropTypes.func.isRequired,
performance: PropTypes.arrayOf(PropTypes.shape({
date: PropTypes.string.isRequired,
tax: PropTypes.string.isRequired,
horizon: PropTypes.string.isRequired,
bias: PropTypes.string.isRequired,
beginBal: PropTypes.string.isRequired,
endBal: PropTypes.string.isRequired,
netReturn: PropTypes.string.isRequired
})).isRequired
}
PerformanceInfo.defaultProps = {
handleChange: (event) => {console.log(event)},
performance : [{
date: '01/14',
tax: ' ',
horizon: ' ',
bias: ' ',
beginBal: '0',
endBal: '0',
netReturn: '0'
}]
}
export default withStyles(styles)(PerformanceInfo)
|
// @flow
import React, { useState, useCallback } from "react";
import { compose } from "redux";
import { connect, useDispatch } from "react-redux";
import { Trans, withTranslation } from "react-i18next";
import { createStructuredSelector } from "reselect";
import Track from "~/renderer/analytics/Track";
import { UserRefusedOnDevice } from "@ledgerhq/errors";
import { getAccountBridge } from "@ledgerhq/live-common/lib/bridge";
import useBridgeTransaction from "@ledgerhq/live-common/lib/bridge/useBridgeTransaction";
import type { StepId, StepProps, St } from "./types";
import type { Account, Operation } from "@ledgerhq/live-common/lib/types";
import type { TFunction } from "react-i18next";
import type { Device } from "@ledgerhq/live-common/lib/hw/actions/types";
import { addPendingOperation } from "@ledgerhq/live-common/lib/account";
import { updateAccountWithUpdater } from "~/renderer/actions/accounts";
import { getCurrentDevice } from "~/renderer/reducers/devices";
import { closeModal, openModal } from "~/renderer/actions/modals";
import Stepper from "~/renderer/components/Stepper";
import StepAmount, { StepAmountFooter } from "./steps/StepAmount";
import GenericStepConnectDevice from "~/renderer/modals/Send/steps/GenericStepConnectDevice";
import StepConfirmation, { StepConfirmationFooter } from "./steps/StepConfirmation";
import logger from "~/logger/logger";
type OwnProps = {|
stepId: StepId,
onClose: () => void,
onChangeStepId: StepId => void,
params: {
account: Account,
parentAccount: ?Account,
reward: number,
},
name: string,
|};
type StateProps = {|
t: TFunction,
device: ?Device,
accounts: Account[],
device: ?Device,
closeModal: string => void,
openModal: string => void,
|};
type Props = OwnProps & StateProps;
const steps: Array<St> = [
{
id: "amount",
label: <Trans i18nKey="polkadot.bond.steps.amount.title" />,
component: StepAmount,
noScroll: true,
footer: StepAmountFooter,
},
{
id: "connectDevice",
label: <Trans i18nKey="polkadot.bond.steps.connectDevice.title" />,
component: GenericStepConnectDevice,
onBack: ({ transitionTo }: StepProps) => transitionTo("amount"),
},
{
id: "confirmation",
label: <Trans i18nKey="polkadot.bond.steps.confirmation.title" />,
component: StepConfirmation,
footer: StepConfirmationFooter,
},
];
const mapStateToProps = createStructuredSelector({
device: getCurrentDevice,
});
const mapDispatchToProps = {
closeModal,
openModal,
};
const Body = ({
t,
stepId,
device,
closeModal,
openModal,
onChangeStepId,
params,
name,
}: Props) => {
const [optimisticOperation, setOptimisticOperation] = useState(null);
const [transactionError, setTransactionError] = useState(null);
const [signed, setSigned] = useState(false);
const dispatch = useDispatch();
const {
transaction,
setTransaction,
account,
parentAccount,
status,
bridgeError,
bridgePending,
} = useBridgeTransaction(() => {
const { account, parentAccount } = params;
const bridge = getAccountBridge(account, parentAccount);
const t = bridge.createTransaction(account);
const transaction = bridge.updateTransaction(t, {
mode: "bond",
recipient: account.freshAddress,
rewardDestination: "Stash",
});
return { account, parentAccount, transaction };
});
const handleCloseModal = useCallback(() => {
closeModal(name);
}, [closeModal, name]);
const handleStepChange = useCallback(e => onChangeStepId(e.id), [onChangeStepId]);
const handleRetry = useCallback(() => {
onChangeStepId("amount");
}, [onChangeStepId]);
const handleTransactionError = useCallback((error: Error) => {
if (!(error instanceof UserRefusedOnDevice)) {
logger.critical(error);
}
setTransactionError(error);
}, []);
const handleOperationBroadcasted = useCallback(
(optimisticOperation: Operation) => {
if (!account) return;
dispatch(
updateAccountWithUpdater(account.id, account =>
addPendingOperation(account, optimisticOperation),
),
);
setOptimisticOperation(optimisticOperation);
setTransactionError(null);
},
[account, dispatch],
);
const error = transactionError || bridgeError;
const stepperProps = {
title: t("polkadot.bond.title"),
device,
account,
parentAccount,
transaction,
signed,
stepId,
steps,
errorSteps: [],
disabledSteps: [],
hideBreadcrumb: !!error,
onRetry: handleRetry,
onStepChange: handleStepChange,
onClose: handleCloseModal,
reward: params.reward,
error,
status,
optimisticOperation,
openModal,
setSigned,
onChangeTransaction: setTransaction,
onOperationBroadcasted: handleOperationBroadcasted,
onTransactionError: handleTransactionError,
t,
bridgePending,
};
return (
<Stepper {...stepperProps}>
<Track onUnmount event="CloseModalBond" />
</Stepper>
);
};
const C: React$ComponentType<OwnProps> = compose(
connect(mapStateToProps, mapDispatchToProps),
withTranslation(),
)(Body);
export default C;
|
// Copyright (c) 2009-2017 SAP SE, All Rights Reserved
/**
* @fileOverview QUnit tests for sap/ushell/User.js
*/
(function () {
"use strict";
/* global deepEqual, module, ok, test, strictEqual, sinon, throws */
jQuery.sap.require("sap.ushell.User");
jQuery.sap.require("sap.ui.thirdparty.URI");
var URI = sap.ui.require("sap/ui/thirdparty/URI");
function getDocumentLocationOrigin () {
var oUri = new URI(document.location);
return oUri.protocol() + "://" + oUri.host();
}
module("sap.ushell.User", {
setup: function () { },
// This method is called after each test. Add every restoration code here.
teardown: function () {
if (sap.ui.core.Core.prototype && sap.ui.core.Core.prototype.applyTheme.restore) {
sap.ui.core.Core.prototype.applyTheme.restore();
}
}
});
test("constructor is robust enough to deal with an empty container adapter config", function () {
var oContainerAdapterConfig;
// Arrange
oContainerAdapterConfig = {};
// Act
new sap.ushell.User(oContainerAdapterConfig);
// Assert
ok(true, "Success: Constructor can deal with empty container adapter config");
// not sure if all methods run through
});
test("setAccessibilityMode throws if it is not allowed to set the accessibility mode", function () {
var oUser = new sap.ushell.User({});
sinon.stub(oUser, "isSetAccessibilityPermitted").returns(false);
throws(
oUser.setAccessibilityMode.bind(oUser, "some accessibility mode"),
/setAccessibilityMode not permitted/,
"exception was thrown"
);
});
test("setAccessibilityMode logs an error if it is not allowed to set the accessibility mode", function () {
var oUser = new sap.ushell.User({});
sinon.stub(oUser, "isSetAccessibilityPermitted").returns(false);
sinon.spy(jQuery.sap.log, "error");
try {
oUser.setAccessibilityMode("some accessibility mode");
} catch (e) {
// do nothing
}
strictEqual(jQuery.sap.log.error.getCall(0).args[0], "setAccessibilityMode not permitted",
"expected error message was logged");
jQuery.sap.log.error.restore();
});
/* ****************************************************************
* User Image tests
* **************************************************************** */
test("User image can be retrieved after setting", function () {
var sDummyUserURI = "http://dummyUsrURI",
oContainerAdapterConfig,
oUser,
sRetrievedUserURI;
// Arrange
oContainerAdapterConfig = {};
// Act
oUser = new sap.ushell.User(oContainerAdapterConfig);
oUser.setImage(sDummyUserURI);
sRetrievedUserURI = oUser.getImage();
// Assert
ok(sRetrievedUserURI === sDummyUserURI,
"User image was unsuccessfully set");
});
test("Attached callbacks are being called upon setting user image", function () {
var sDummyUserURI = "http://dummyUsrURI",
sRetrievedUserURI,
fnMockCallback = sinon.spy(function (param) {
sRetrievedUserURI = param.mParameters;
}),
oContainerAdapterConfig,
oUser;
// Arrange
oContainerAdapterConfig = {};
// Act
oUser = new sap.ushell.User(oContainerAdapterConfig);
oUser.attachOnSetImage(fnMockCallback);
oUser.setImage(sDummyUserURI);
// Assert
ok(fnMockCallback.calledOnce,
"fnMockCallback is expected to be called one");
ok(sRetrievedUserURI === sDummyUserURI,
"Failed retrieving image URI from event object");
});
/* ****************************************************************
* Theming tests
* **************************************************************** */
[
{
testDescription: "oThemeInput and oSystemTheme are undefined",
oThemeInput: undefined,
oSystemTheme: undefined,
expected: {
originalTheme: {
theme: "",
root: ""
},
theme: "",
suppliedRoot: "",
path: "",
locationPath: "",
locationOrigin: ""
}
}, {
testDescription: "oThemeInput and oSystemTheme are empty objects",
oThemeInput: {},
oSystemTheme: {},
expected: {
originalTheme: {
theme: "",
root: ""
},
theme: "",
suppliedRoot: "",
path: "",
locationPath: "",
locationOrigin: ""
}
}, {
testDescription: "oThemeInput is an empty object",
oThemeInput: {},
oSystemTheme: {
locationPathUi5: "/UI5/theme/path",
locationPathCustom: "/custom/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
},
expected: {
originalTheme: {
theme: "",
root: ""
},
theme: "",
suppliedRoot: "",
path: "",
locationPath: "",
locationOrigin: ""
}
}, {
testDescription: "oThemeInput.theme is undefined",
oThemeInput: {
theme: undefined,
root: undefined
},
oSystemTheme: {
locationPathUi5: "/UI5/theme/path",
locationPathCustom: "/custom/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
},
expected: {
originalTheme: {
theme: "",
root: ""
},
theme: "",
suppliedRoot: "",
path: "",
locationPath: "",
locationOrigin: ""
}
}, {
testDescription: "oThemeInput is an sap_ theme",
oThemeInput: {
theme: "sap_hcb",
root: ""
},
oSystemTheme: {
locationPathUi5: "/UI5/theme/path",
locationPathCustom: "/custom/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
},
expected: {
originalTheme: {
theme: "sap_hcb",
root: ""
},
theme: "sap_hcb",
suppliedRoot: "",
path: "",
locationPath: "/UI5/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
}
}, {
testDescription: "oThemeInput is an sap_ theme with path as theme root",
oThemeInput: {
theme: "sap_hcb",
root: "/some/supplied/theme/path"
},
oSystemTheme: {
locationPathUi5: "/UI5/theme/path",
locationPathCustom: "/custom/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
},
expected: {
originalTheme: {
theme: "sap_hcb",
root: "/some/supplied/theme/path"
},
theme: "sap_hcb",
suppliedRoot: "/some/supplied/theme/path",
path: "/some/supplied/theme/path",
locationPath: "/some/supplied/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
}
}, {
testDescription: "oThemeInput is an sap_ theme with full URL as theme root",
oThemeInput: {
theme: "sap_hcb",
root: "https://someotherfrontendserver.sap.com:3270/some/supplied/theme/path"
},
oSystemTheme: {
locationPathUi5: "/UI5/theme/path",
locationPathCustom: "/custom/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
},
expected: {
originalTheme: {
theme: "sap_hcb",
root: "https://someotherfrontendserver.sap.com:3270/some/supplied/theme/path"
},
theme: "sap_hcb",
suppliedRoot: "https://someotherfrontendserver.sap.com:3270/some/supplied/theme/path",
path: "/some/supplied/theme/path",
locationPath: "/some/supplied/theme/path",
locationOrigin: "https://someotherfrontendserver.sap.com:3270"
}
}, {
testDescription: "oThemeInput is a custom theme",
oThemeInput: {
theme: "customTheme",
root: "/supplied/custom/theme/path"
},
oSystemTheme: {
locationPathUi5: "/UI5/theme/path",
locationPathCustom: "/custom/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
},
expected: {
originalTheme: {
theme: "customTheme",
root: "/supplied/custom/theme/path"
},
theme: "customTheme",
suppliedRoot: "/supplied/custom/theme/path",
path: "/supplied/custom/theme/path",
locationPath: "/supplied/custom/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
}
}, {
testDescription: "oThemeInput is a custom theme with no theme root",
oThemeInput: {
theme: "customTheme",
root: ""
},
oSystemTheme: {
locationPathUi5: "/UI5/theme/path",
locationPathCustom: "/custom/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
},
expected: {
originalTheme: {
theme: "customTheme",
root: ""
},
theme: "customTheme",
suppliedRoot: "",
path: "/custom/theme/path",
locationPath: "/custom/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
}
}, {
testDescription: "oThemeInput is a custom theme with path as theme root",
oThemeInput: {
theme: "customTheme",
root: "/some/supplied/theme/path"
},
oSystemTheme: {
locationPathUi5: "/UI5/theme/path",
locationPathCustom: "/custom/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
},
expected: {
originalTheme: {
theme: "customTheme",
root: "/some/supplied/theme/path"
},
theme: "customTheme",
suppliedRoot: "/some/supplied/theme/path",
path: "/some/supplied/theme/path",
locationPath: "/some/supplied/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
}
}, {
testDescription: "oThemeInput is a custom theme with full URL as theme root",
oThemeInput: {
theme: "customTheme",
root: "https://someotherfrontendserver.sap.com:3270/some/supplied/theme/path"
},
oSystemTheme: {
locationPathUi5: "/UI5/theme/path",
locationPathCustom: "/custom/theme/path",
locationOrigin: "https://frontendserver.sap.com:4711"
},
expected: {
originalTheme: {
theme: "customTheme",
root: "https://someotherfrontendserver.sap.com:3270/some/supplied/theme/path"
},
theme: "customTheme",
suppliedRoot: "https://someotherfrontendserver.sap.com:3270/some/supplied/theme/path",
path: "/some/supplied/theme/path",
locationPath: "/some/supplied/theme/path",
locationOrigin: "https://someotherfrontendserver.sap.com:3270"
}
}
].forEach(function (oFixture) {
test("_AmendTheme returns the correct result when " + oFixture.testDescription, 1, function () {
var oCompleteTheme;
// Act
oCompleteTheme = sap.ushell.User.prototype._amendTheme(oFixture.oThemeInput, oFixture.oSystemTheme);
// Assert
deepEqual(oCompleteTheme, oFixture.expected,
"Theme is completed correctly");
});
});
/* **************************************************************** */
[
{
testDescription: "boot theme is undefined and sThemeRoot is undefined",
oBootTheme: undefined,
sThemeRoot: undefined,
expected: {
themeName: "",
originalTheme: "",
themePlusUrl: "",
NWBC: ""
}
}, {
testDescription: "boot theme is undefined and sThemeRoot is initialized",
oBootTheme: undefined,
sThemeRoot: "/theme/root",
expected: {
themeName: "",
originalTheme: "",
themePlusUrl: "",
NWBC: ""
}
}, {
testDescription: "boot theme is a sap_ theme with undefined root",
oBootTheme: {
theme: "sap_hcb",
root: undefined
},
sThemeRoot: "/theme/root",
expected: {
themeName: "sap_hcb",
originalTheme: "sap_hcb",
themePlusUrl: "sap_hcb@" + getDocumentLocationOrigin() + (new URI(jQuery.sap.getModulePath(""))).absoluteTo(document.location).pathname(),
// absolute path to the frontend server
NWBC: "sap_hcb"
}
}, {
testDescription: "boot theme is a sap_ theme with '' root",
oBootTheme: {
theme: "sap_hcb",
root: ""
},
sThemeRoot: "/theme/root",
expected: {
themeName: "sap_hcb",
originalTheme: "sap_hcb",
themePlusUrl: "sap_hcb@" + getDocumentLocationOrigin() + (new URI(jQuery.sap.getModulePath(""))).absoluteTo(document.location).pathname(),
// absolute path to the frontend server
NWBC: "sap_hcb"
}
}, {
testDescription: "boot theme is a sap_ theme with a root",
oBootTheme: {
theme: "sap_hcb",
root: "/theme/specific/root"
},
sThemeRoot: "/theme/root",
expected: {
themeName: "sap_hcb",
originalTheme: "sap_hcb",
themePlusUrl: "sap_hcb@" + getDocumentLocationOrigin() + "/theme/specific/root",
NWBC: "sap_hcb"
}
}, {
testDescription: "boot theme is a sap_ theme with a URL as root",
oBootTheme: {
theme: "sap_hcb",
root: "https://frontendserver.sap.com/theme/specific/root"
},
sThemeRoot: "/theme/root",
expected: {
themeName: "sap_hcb",
originalTheme: "sap_hcb",
themePlusUrl: "sap_hcb@https://frontendserver.sap.com/theme/specific/root",
NWBC: "sap_hcb"
}
}, {
testDescription: "boot theme is a custom theme with '' root",
oBootTheme: {
theme: "red_crystal",
root: ""
},
sThemeRoot: "/system/theme/root",
expected: {
themeName: "red_crystal",
originalTheme: "red_crystal",
themePlusUrl: "red_crystal@" + getDocumentLocationOrigin() + "/system/theme/root",
NWBC: "red_crystal@" + getDocumentLocationOrigin() + "/system/theme/root"
}
}, {
testDescription: "boot theme is a custom theme with a root",
oBootTheme: {
theme: "red_crystal",
root: "/theme/specific/root"
},
sThemeRoot: "/system/theme/root",
expected: {
themeName: "red_crystal",
originalTheme: "red_crystal",
themePlusUrl: "red_crystal@" + getDocumentLocationOrigin() + "/theme/specific/root",
NWBC: "red_crystal@" + getDocumentLocationOrigin() + "/theme/specific/root"
}
}, {
testDescription: "boot theme is a custom theme @ root",
oBootTheme: {
theme: "red_crystal@/theme/specific/root",
root: ""
},
sThemeRoot: "/system/theme/root",
expected: {
themeName: "red_crystal",
originalTheme: "red_crystal@/theme/specific/root",
themePlusUrl: "red_crystal@" + getDocumentLocationOrigin() + "/theme/specific/root",
NWBC: "red_crystal@" + getDocumentLocationOrigin() + "/theme/specific/root"
}
}, {
testDescription: "boot theme is a sap_ theme with a URL as root",
oBootTheme: {
theme: "red_crystal",
root: "https://frontendserver.sap.com/theme/specific/root"
},
sThemeRoot: "/system/theme/root",
expected: {
originalTheme: "red_crystal",
themeName: "red_crystal",
themePlusUrl: "red_crystal@https://frontendserver.sap.com/theme/specific/root",
NWBC: "red_crystal@https://frontendserver.sap.com/theme/specific/root"
}
}, {
testDescription: "boot theme is a sap_ theme @ URL",
oBootTheme: {
theme: "red_crystal@https://frontendserver.sap.com/theme/specific/root",
root: ""
},
sThemeRoot: "/system/theme/root",
expected: {
originalTheme: "red_crystal@https://frontendserver.sap.com/theme/specific/root",
themeName: "red_crystal",
themePlusUrl: "red_crystal@https://frontendserver.sap.com/theme/specific/root",
NWBC: "red_crystal@https://frontendserver.sap.com/theme/specific/root"
}
}
].forEach(function (oFixture) {
test("The User object is correctly initialized rgd. theme when " + oFixture.testDescription, 4, function () {
var oUser,
oContainerAdapterConfig;
// Arrange
oContainerAdapterConfig = {
bootTheme: oFixture.oBootTheme,
themeRoot: oFixture.sThemeRoot
};
// Act
oUser = new sap.ushell.User(oContainerAdapterConfig);
// Assert
strictEqual(oUser.getTheme(), oFixture.expected.themeName,
"Theme name is set correctly (no parameter supplied to getTheme)");
strictEqual(oUser.getTheme(sap.ushell.User.prototype.constants.themeFormat.ORIGINAL_THEME), oFixture.expected.originalTheme,
"Original theme is set correctly (parameter supplied to getTheme)");
strictEqual(oUser.getTheme(sap.ushell.User.prototype.constants.themeFormat.THEME_NAME_PLUS_URL), oFixture.expected.themePlusUrl,
"Theme name and location URL is set correctly");
strictEqual(oUser.getTheme(sap.ushell.User.prototype.constants.themeFormat.NWBC), oFixture.expected.NWBC,
"Theme is set correctly");
});
});
/* **************************************************************** */
[
{
testDescription: "a sap theme is set where the boot theme was a custom boot theme",
oBootTheme: {
theme: "redcrystal",
root: "/sap/public/bc/themes/~client120"
},
sThemeRoot: "/sap/public/bc/themes/~client120",
sNewTheme: "sap_bluecrystal",
expected: ["sap_bluecrystal"]
}, {
testDescription: "a custom theme is set where the boot theme was an sap theme",
oBootTheme: {
theme: "sap_bluecrystal",
root: ""
},
sThemeRoot: "/sap/public/bc/themes/~client120",
sNewTheme: "redcrystal@/sap/public/bc/themes/~client120",
expected: ["redcrystal", "/sap/public/bc/themes/~client120/UI5/"]
}, {
testDescription: "a custom theme is set where the boot theme was a custom theme",
oBootTheme: {
theme: "green_bluecrystal",
root: "/sap/public/bc/themes/~client120"
},
sThemeRoot: "/sap/public/bc/themes/~client120",
sNewTheme: "redcrystal",
expected: ["redcrystal", "/sap/public/bc/themes/~client120/UI5/"]
}, {
testDescription: "a custom theme with a path is set",
oBootTheme: {
theme: "sap_bluecrystal",
root: ""
},
sThemeRoot: "/sap/public/bc/themes/~client120",
sNewTheme: "redcrystal@/custom/theme/path",
expected: ["redcrystal", "/custom/theme/path/UI5/"]
}, {
testDescription: "an sap_ theme with a theme URL is set",
oBootTheme: {
theme: "redcrystal",
root: "/sap/public/bc/themes/~client120"
},
sThemeRoot: "/sap/public/bc/themes/~client120",
sNewTheme: "sap_hcb@https://frontendserver.customer.com/his/theme/path",
expected: ["sap_hcb", "https://frontendserver.customer.com/his/theme/path/UI5/"]
}, {
testDescription: "a custom theme with a theme URL is set",
oBootTheme: {
theme: "redcrystal",
root: "/sap/public/bc/themes/~client120"
},
sThemeRoot: "/sap/public/bc/themes/~client120",
sNewTheme: "greencrystal@https://frontendserver.customer.com/his/theme/path",
expected: ["greencrystal", "https://frontendserver.customer.com/his/theme/path/UI5/"]
}
].forEach(function (oFixture) {
test("setTheme applies the new theme correctly when " + oFixture.testDescription, 3, function () {
var oContainerAdapterConfig,
oUser,
fnApplyTheme;
// Arrange
oContainerAdapterConfig = {
bootTheme: oFixture.oBootTheme,
themeRoot: oFixture.sThemeRoot
};
// Arrange
fnApplyTheme = sinon.stub(sap.ui.core.Core.prototype, "applyTheme");
oUser = new sap.ushell.User(oContainerAdapterConfig);
// Act
oUser.setTheme(oFixture.sNewTheme);
// Assert
ok(fnApplyTheme.calledOnce,
"Success: applyTheme was called once");
deepEqual(fnApplyTheme.args[0], oFixture.expected,
"correct arguments");
strictEqual(oUser.getTheme(), oFixture.expected[0], // theme name
"getTheme returns the set theme");
});
});
/* **************************************************************** */
/* **************************************************************** */
test("sap.ushell.User: changed property handling", function () {
// 1 - get
// 2 - set get
// 3 - reset get
var oContainerAdapterConfig,
oUser,
aReturnedChangedProperties,
aExpectedChangedProperties;
// Arrange
aExpectedChangedProperties = [{
"propertyName": "Property1",
"name": "Property1",
"newValue": "newValue",
"oldValue": "oldValue"
}];
oContainerAdapterConfig = {};
oUser = new sap.ushell.User(oContainerAdapterConfig);
// Act - Step 1
aReturnedChangedProperties = oUser.getChangedProperties();
// Assert
deepEqual(aReturnedChangedProperties, [],
"Success: Step 1 - empty changed properties");
// Act - Step 2
oUser.setChangedProperties(
{
"propertyName": aExpectedChangedProperties[0].propertyName,
"name": aExpectedChangedProperties[0].name
},
aExpectedChangedProperties[0].oldValue,
aExpectedChangedProperties[0].newValue
);
// Assert
deepEqual(aReturnedChangedProperties, [],
"Success: Step 2 - set did not affect the value returned by the first get");
// to check that the live object is not returned but only a copied object
aReturnedChangedProperties = oUser.getChangedProperties();
deepEqual(aReturnedChangedProperties, aExpectedChangedProperties,
"Success: Step 2 - changed properties were set correctly");
// Act - Step 3
oUser.resetChangedProperties();
// Assert
deepEqual(aReturnedChangedProperties, aExpectedChangedProperties,
"Success: Step 3 - reset did not affect the value returend by the second get");
aReturnedChangedProperties = oUser.getChangedProperties();
deepEqual(aReturnedChangedProperties, [],
"Success: Step 3 - changed properties were reset correctly");
});
test("sap.ushell.User: does not warn if constructed with an adapter configuration that does not specify a content density", function () {
// Arrange
sinon.spy(jQuery.sap.log, "warning");
// Act
new sap.ushell.User({ // the container adapter configuration
userProfile: [{
id: "CONTENT_DENSITY",
value: "compact"
}],
bootTheme: {
theme: "redcrystal",
root: "/sap/public/bc/themes/~client120"
},
themeRoot: "/sap/public/bc/themes/~client120"
});
// Assert
strictEqual(jQuery.sap.log.warning.callCount, 0,
"jQuery.sap.log.warning was not called");
jQuery.sap.log.warning.restore();
});
[
{
testDescription: "user profile doesn't contain CONTENT_DENSITY",
containerAdapterConfig: {
userProfile: [{
id: "SOMETHING_ELSE",
value: "something"
}]
},
expectedValue: undefined
}
].forEach(function (oFixture) {
test("sap.ushell.User: getContentDensity returns the correct value when " + oFixture.testDescription, function () {
// Act
var oUser = new sap.ushell.User(oFixture.containerAdapterConfig);
// Assert
strictEqual(oUser.getContentDensity(), oFixture.expectedValue,
"expected value was returned");
});
});
[
{
testDescription: "containerAdapConfig has a ranges.theme which contains the given theme name",
input: {
containerAdapterConfig: {
"ranges": {
"theme": {
"custom_cool_theme": {
"displayName": "Custom Theme",
"themeRoot": "myThemeRoot"
}
}
}
},
givenTheme: "custom_cool_theme"
},
expectedThemeRoot: "myThemeRoot"
}, {
testDescription: "Meta Data contains of ranges and theme root undefined",
input: {
containerAdapterConfig: { "ranges": { "theme": { "custom_cool_theme": { "displayName": "Custom Theme" } } } },
givenTheme: "custom_cool_theme"
},
expectedThemeRoot: ""
}, {
testDescription: "Meta Data contains of ranges and no themes",
input: {
containerAdapterConfig: { "ranges": {} },
givenTheme: "custom_cool_theme"
},
expectedThemeRoot: ""
}, {
testDescription: "Meta Data does not contain ranges, to stay compatible ",
input: {
containerAdapterConfig: {},
givenTheme: "custom_cool_theme"
},
expectedThemeRoot: ""
}
].forEach(function (oFixture) {
test("sap.ushell.User: getThemeRoot returns correct value when " + oFixture.testDescription, function () {
// Arange
var oUser = new sap.ushell.User(oFixture.input.containerAdapterConfig);
// Act && Assert
strictEqual(oUser.getThemeRoot(oFixture.input.givenTheme), oFixture.expectedThemeRoot,
"expected theme root was returned");
});
});
[
{
testDescription: "cozy contentDensity is set and isSetContentDensityPermitted is false",
contentDensity: "cozy"
}, {
testDescription: "'any value' contentDensity is set and isSetContentDensityPermitted is false",
contentDensity: "any value"
}
].forEach(function (oFixture) {
test("sap.ushell.User: setContentDensity throws when " + oFixture.testDescription, function () {
var oUser = new sap.ushell.User({}); // configuration doesn't matter
sinon.stub(oUser, "isSetContentDensityPermitted").returns(false);
throws(
oUser.setContentDensity.bind(oUser, oFixture.contentDensity),
/setContentDensity not permitted/,
"exception was thrown"
);
});
test("sap.ushell.User: setContentDensity logs an error when " + oFixture.testDescription, function () {
var oUser = new sap.ushell.User({}); // configuration doesn't matter
sinon.stub(oUser, "isSetContentDensityPermitted").returns(false);
sinon.spy(jQuery.sap.log, "error");
try {
oUser.setContentDensity(oFixture.contentDensity);
} catch (e) {
// do nothing
}
strictEqual(jQuery.sap.log.error.getCall(0).args[0], "setContentDensity not permitted",
"expected error message was logged");
jQuery.sap.log.error.restore();
});
});
// test that setChangedProperties is called if it is allowed to change contentDensity
// Language tests
test("sap.ushell.User: getLanguage and getLanguageBcp47 return the expected values", function () {
// Arrange
var sLanguage = "en_us";
var sBcpLanguage = "en";
var oContainerAdapterConfig = {
language: sLanguage,
languageBcp47: sBcpLanguage
};
// Act
var oUser = new sap.ushell.User(oContainerAdapterConfig);
// Assert
strictEqual(oUser.getLanguage(), sLanguage,
"Correct (technical) language returned");
strictEqual(oUser.getLanguageBcp47(), sBcpLanguage,
"Correct (bcp47) language returned");
});
test("sap.ushell.User: getLanguageText returns the correct language text", function () {
// Arrange
var sLanguage = "en-US";
var oContainerAdapterConfig = {};
var oLocale = new sap.ui.core.Locale(sLanguage);
var oGetLocaleStub = sinon.stub(sap.ui.getCore().getConfiguration(), "getLocale").returns(oLocale);
var oGetLanguageTagStub = sinon.stub(sap.ui.getCore().getConfiguration(), "getLanguageTag").returns(sLanguage);
// Act
var oUser = new sap.ushell.User(oContainerAdapterConfig);
// Assert
strictEqual(oUser.getLanguageText(), "American English",
"Local returned the correct language text");
oGetLocaleStub.restore();
oGetLanguageTagStub.restore();
});
test("sap.ushell.User: getLanguageText returns the technical name if no language text found", function () {
// Arrange
var sLanguage = "en-us";
var sLanguageShown = sLanguage.toUpperCase();
var sBcpLanguage = "en";
var sWrongLanguage = "enus";
var oContainerAdapterConfig = {
language: sLanguage,
languageBcp47: sBcpLanguage
};
var oLocale = new sap.ui.core.Locale(sLanguage);
var oGetLocaleStub = sinon.stub(sap.ui.getCore().getConfiguration(), "getLocale").returns(oLocale);
var oGetLanguageTagStub = sinon.stub(sap.ui.getCore().getConfiguration(), "getLanguageTag").returns(sWrongLanguage);
// Act
var oUser = new sap.ushell.User(oContainerAdapterConfig);
// Assert
strictEqual(oUser.getLanguageText(), sLanguageShown,
"Local returned the language's technical name");
oGetLocaleStub.restore();
oGetLanguageTagStub.restore();
});
}());
|
import os
from flask import Flask, request, abort, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask_cors import CORS
from auth import AuthError, requires_auth
from models import setup_db, Actor, Movie
from config import ITEMS_PER_PAGE
def create_app(test_config=None):
# create and configure the app
app = Flask(__name__)
setup_db(app)
CORS(app)
# CORS headers
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Headers',
'Content-Type,Authorization,true')
response.headers.add('Access-Control-Allow-Methods',
'GET,PATCH,POST,DELETE,OPTIONS')
return response
@app.route('/actors', methods=['GET'])
@requires_auth('read:actors')
def get_actors(payload):
page = request.args.get('page', 1, type=int)
selection = Actor.query.order_by(Actor.id).paginate(
page,
ITEMS_PER_PAGE,
False
)
selection_array = [actor.format() for actor in selection.items]
if len(selection_array) == 0:
abort(404, {'message': 'actors not found'})
return jsonify({
'success': True,
'actors': selection_array,
'total': selection.total
})
@app.route('/actors', methods=['POST'])
@requires_auth('create:actors')
def create_actor(payload):
body = request.get_json()
if not body:
abort(422, {'message': 'invalid body JSON'})
name = body.get('name', None)
age = body.get('age', None)
gender = body.get('gender', 'Other')
if not name:
abort(422, {'message': 'name cannot be blank'})
if not age:
abort(422, {'message': 'age cannot be blank'})
# try:
actor = Actor(
name=name,
age=age,
gender=gender
)
actor.insert()
# except Exception:
# abort(422)
return jsonify({
'success': True,
'created': actor.id,
})
@app.route('/actors/<int:actor_id>', methods=['PATCH'])
@requires_auth('edit:actors')
def edit_actor(payload, actor_id):
body = request.get_json()
if not body:
abort(422, {'message': 'invalid body JSON'})
actor = Actor.query.filter(Actor.id == actor_id).one_or_none()
if not actor:
abort(404, {'message': 'actor not found'})
try:
actor.name = body.get('name', actor.name)
actor.age = body.get('age', actor.age)
actor.gender = body.get('gender', actor.gender)
actor.update()
except Exception:
abort(422)
return jsonify({
'success': True,
'updated': actor.id,
})
@app.route('/actors/<int:actor_id>', methods=['DELETE'])
@requires_auth('delete:actors')
def delete_actor(payload, actor_id):
actor = Actor.query.filter(Actor.id == actor_id).one_or_none()
if not actor:
abort(404, {'message': 'actor not found'})
try:
actor.delete()
except Exception:
abort(422)
return jsonify({
'success': True,
'deleted': actor_id,
})
@app.route('/movies', methods=['GET'])
@requires_auth('read:movies')
def get_movies(payload):
page = request.args.get('page', 1, type=int)
selection = Movie.query.order_by(Movie.id).paginate(
page,
ITEMS_PER_PAGE,
False
)
if len(selection.items) == 0:
abort(404, {'message': 'movies not found'})
selection_array = [movie.format() for movie in selection.items]
return jsonify({
'success': True,
'movies': selection_array,
'total': selection.total
})
@app.route('/movies', methods=['POST'])
@requires_auth('create:movies')
def create_movies(payload):
body = request.get_json()
if not body:
abort(422, {'message': 'invalid body JSON'})
title = body.get('title', None)
release_year = body.get('release_year', None)
if not title:
abort(422, {'message': 'title cannot be blank'})
if not release_year:
abort(422, {'message': 'release_year cannot be blank'})
try:
movie = Movie(
title=title,
release_year=release_year
)
movie.insert()
except Exception:
abort(422)
return jsonify({
'success': True,
'created': movie.id,
})
@app.route('/movies/<int:movie_id>', methods=['PATCH'])
@requires_auth('edit:movies')
def edit_movie(payload, movie_id):
body = request.get_json()
if not body:
abort(422, {'message': 'invalid body JSON'})
movie = Movie.query.filter(Movie.id == movie_id).one_or_none()
if not movie:
abort(404, {'message': 'movie not found'})
try:
movie.title = body.get('title', movie.title)
movie.release_year = body.get('release_year', movie.release_year)
except Exception:
abort(422)
return jsonify({
'success': True,
'updated': movie.id,
})
@app.route('/movies/<int:movie_id>', methods=['DELETE'])
@requires_auth('delete:movies')
def delete_movie(payload, movie_id):
movie = Movie.query.filter(Movie.id == movie_id).one_or_none()
if not movie:
abort(404, {'message': 'movie not found'})
try:
movie.delete()
except Exception:
abort(422)
return jsonify({
'success': True,
'deleted': movie_id
})
@app.errorhandler(400)
def bad_request(error):
'''Error handler for bad request'''
return jsonify({
'success': False,
'error': 400,
'message': 'bad request'
}), 400
@app.errorhandler(AuthError)
def auth_error(e):
'''Error handler for AuthError'''
return jsonify({
'success': False,
'error': e.status_code,
'message': e.error['description']
}), 401
@app.errorhandler(404)
def not_found(error):
'''Error handler for 404'''
return jsonify({
'success': False,
'error': 404,
'message': get_error_message(error, 'resource not found')
}), 404
@app.errorhandler(422)
def unprocessable(error):
'''Error handling for unprocessable entity'''
return jsonify({
'success': False,
'error': 422,
'message': get_error_message(error, 'unprocessable')
}), 422
def get_error_message(error, default_message):
'''
Returns if there is any error message provided in
error.description.message else default_message
This can be passed by calling
abort(404, description={'message': 'your message'})
Parameters:
error (werkzeug.exceptions.NotFound): error object
default_message (str): default message if custom message not available
Returns:
str: Custom error message or default error message
'''
try:
return error.description['message']
except TypeError:
return default_message
return app
APP = create_app()
if __name__ == '__main__':
APP.run(host='0.0.0.0', port=8080, debug=True)
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[271],{4160:function(t,e,r){"use strict";r.r(e),r.d(e,"icon",(function(){return o}));r(11),r(2),r(4),r(8),r(3),r(9);var n=r(0),l=r.n(n);function a(){return(a=Object.assign||function(t){for(var e=1;e<arguments.length;e++){var r=arguments[e];for(var n in r)Object.prototype.hasOwnProperty.call(r,n)&&(t[n]=r[n])}return t}).apply(this,arguments)}function i(t,e){if(null==t)return{};var r,n,l=function(t,e){if(null==t)return{};var r,n,l={},a=Object.keys(t);for(n=0;n<a.length;n++)r=a[n],e.indexOf(r)>=0||(l[r]=t[r]);return l}(t,e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);for(n=0;n<a.length;n++)r=a[n],e.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(t,r)&&(l[r]=t[r])}return l}var o=function(t){var e=t.title,r=t.titleId,n=i(t,["title","titleId"]);return l.a.createElement("svg",a({width:16,height:16,viewBox:"0 0 16 16",xmlns:"http://www.w3.org/2000/svg","aria-labelledby":r},n),e?l.a.createElement("title",{id:r},e):null,l.a.createElement("path",{d:"M12.148 3.148L11 2l-9 9v3h3l9-9-1.144-1.144-8.002 7.998a.502.502 0 01-.708 0 .502.502 0 010-.708l8.002-7.998zM11 1c.256 0 .512.098.707.293l3 3a.999.999 0 010 1.414l-9 9A.997.997 0 015 15H2a1 1 0 01-1-1v-3c0-.265.105-.52.293-.707l9-9A.997.997 0 0111 1zM5 14H2v-3l3 3z"}))}}}]);
//# sourceMappingURL=icon.pencil-js.min.js.map
|
/**
* 打包的入口文件,把需要的组件或是第三方在这里导入进来
*/
//导入第三方包
/** Vue是变量名 vue是包名 */
import Vue from 'vue'
import Mint from 'mint-ui'
import VueResource from 'vue-resource'
import moment from 'moment'
import VuePreview from 'vue-preview'
// import axios from 'axios'
//集成中间件
Vue.use(Mint)
Vue.use(VueResource)//Vue.propertype.$http
Vue.use(VuePreview)
//原型上不要随便加东西,加一些每个实例都需要用到的
// Vue.prototype.$axios = axios
//导入样式
//todo 生产阶段要是用style.min.css
//在这里可以不用写node_modules的路径,它自己回去找
// import 'mint-ui/lib/style.css'
import 'mint-ui/lib/style.min.css'
// import './statics/mui/css/mui.css'
import './statics/mui/css/mui.min.css'
import './statics/css/site.css'
//全局的过滤器
Vue.filter('dateFmt',(input,dateFmtString)=>{
const lastFmtString = dateFmtString || 'YYYY-MM-DD HH:mm:ss'
/**
* 参数1:要格式化的原始时间
* 参数2:格式化的字符串
*/
return moment(input).format(lastFmtString)
})
//导入App.vue
import App from './App.vue'//var App = require('./App.vue')
//导入路由模块
import router from './router/router.js'
import store from './store/index.js'
//创建根实例
new Vue({
el:'#app',
// render:function(createElement){
// return createElement(App)
// }
router,
store,
render:h=>h(App)
})
|
import component from './domain-dns-anycast.component';
const moduleName = 'domainAnycast';
angular.module(moduleName, [])
.component('domainAnycast', component);
export default moduleName;
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS
Date : May 23, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : brush.tyler@gmail.com
The content of this file is based on
- PG_Manager by Martin Dobias (GPLv2 license)
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.PyQt.QtWidgets import QDialog
from .ui.ui_DlgDbError import Ui_DbManagerDlgDbError as Ui_Dialog
from .db_plugins.plugin import DbError
class DlgDbError(QDialog, Ui_Dialog):
def __init__(self, e, parent=None):
QDialog.__init__(self, parent)
self.setupUi(self)
def sanitize(txt):
return "" if txt is None else "<pre>" + txt.replace('<', '<') + "</pre>"
if isinstance(e, DbError):
self.setQueryMessage(sanitize(e.msg), sanitize(e.query))
else:
self.setMessage(sanitize(e.msg))
def setMessage(self, msg):
self.txtErrorMsg.setHtml(msg)
self.stackedWidget.setCurrentIndex(0)
def setQueryMessage(self, msg, query):
self.txtQueryErrorMsg.setHtml(msg)
self.txtQuery.setHtml(query)
self.stackedWidget.setCurrentIndex(1)
@staticmethod
def showError(e, parent=None):
dlg = DlgDbError(e, parent)
dlg.exec_()
|
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from swagger_server.models.base_model_ import Model
from swagger_server import util
class ContainerSummary(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, id: str=None, name: str=None, semester: str=None, observation_blocks: List[str]=None, comment: str=None): # noqa: E501
"""ContainerSummary - a model defined in Swagger
:param id: The id of this ContainerSummary. # noqa: E501
:type id: str
:param name: The name of this ContainerSummary. # noqa: E501
:type name: str
:param semester: The semester of this ContainerSummary. # noqa: E501
:type semester: str
:param observation_blocks: The observation_blocks of this ContainerSummary. # noqa: E501
:type observation_blocks: List[str]
:param comment: The comment of this ContainerSummary. # noqa: E501
:type comment: str
"""
self.swagger_types = {
'id': str,
'name': str,
'semester': str,
'observation_blocks': List[str],
'comment': str
}
self.attribute_map = {
'id': 'id',
'name': 'name',
'semester': 'semester',
'observation_blocks': 'observation_blocks',
'comment': 'comment'
}
self._id = id
self._name = name
self._semester = semester
self._observation_blocks = observation_blocks
self._comment = comment
@classmethod
def from_dict(cls, dikt) -> 'ContainerSummary':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The ContainerSummary of this ContainerSummary. # noqa: E501
:rtype: ContainerSummary
"""
return util.deserialize_model(dikt, cls)
@property
def id(self) -> str:
"""Gets the id of this ContainerSummary.
:return: The id of this ContainerSummary.
:rtype: str
"""
return self._id
@id.setter
def id(self, id: str):
"""Sets the id of this ContainerSummary.
:param id: The id of this ContainerSummary.
:type id: str
"""
self._id = id
@property
def name(self) -> str:
"""Gets the name of this ContainerSummary.
:return: The name of this ContainerSummary.
:rtype: str
"""
return self._name
@name.setter
def name(self, name: str):
"""Sets the name of this ContainerSummary.
:param name: The name of this ContainerSummary.
:type name: str
"""
self._name = name
@property
def semester(self) -> str:
"""Gets the semester of this ContainerSummary.
:return: The semester of this ContainerSummary.
:rtype: str
"""
return self._semester
@semester.setter
def semester(self, semester: str):
"""Sets the semester of this ContainerSummary.
:param semester: The semester of this ContainerSummary.
:type semester: str
"""
self._semester = semester
@property
def observation_blocks(self) -> List[str]:
"""Gets the observation_blocks of this ContainerSummary.
:return: The observation_blocks of this ContainerSummary.
:rtype: List[str]
"""
return self._observation_blocks
@observation_blocks.setter
def observation_blocks(self, observation_blocks: List[str]):
"""Sets the observation_blocks of this ContainerSummary.
:param observation_blocks: The observation_blocks of this ContainerSummary.
:type observation_blocks: List[str]
"""
self._observation_blocks = observation_blocks
@property
def comment(self) -> str:
"""Gets the comment of this ContainerSummary.
:return: The comment of this ContainerSummary.
:rtype: str
"""
return self._comment
@comment.setter
def comment(self, comment: str):
"""Sets the comment of this ContainerSummary.
:param comment: The comment of this ContainerSummary.
:type comment: str
"""
self._comment = comment
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
/**
* This component creates loading animation element
* <div class="spinner">
* <svg/>
* </div>
*/
var SpinnerComponent = function () {
function SpinnerComponent(utils) {
_classCallCheck(this, SpinnerComponent);
this.utils = utils;
}
_createClass(SpinnerComponent, [{
key: 'render',
value: function render() {
var spinner = this.utils.createDiv(['spinner']);
spinner.innerHTML = '<svg viewBox="0 0 64 64"><circle transform="translate(32,32)" r="26"></circle></svg>';
return spinner;
}
}]);
return SpinnerComponent;
}();
exports.SpinnerComponent = SpinnerComponent;
//# sourceMappingURL=spinner.component.js.map
|
import styled from '@emotion/styled'
export const CalendarNavbarEl = styled.div`
@media (min-width: ${(props) => props.theme.breakpoints.md}) {
}
.DayPicker-NavBar {
display: flex;
justify-content: space-between;
position: relative;
top: 41px;
align-items: center;
@media (min-width: ${(props) => props.theme.breakpoints.l}) {
top: 56px;
}
}
button {
@media (min-width: ${(props) => props.theme.breakpoints.md}) {
}
}
button:focus {
outline: none;
}
.Calendar__Btn-Prev {
font-size: ${(props) => props.theme.fonts.smallText};
background: transparent;
border: none;
display: flex;
font-family: ${(props) => props.theme.fontFamily.secondary};
opacity: 0.6;
text-transform: capitalize;
@media (min-width: ${(props) => props.theme.breakpoints.l}) {
font-size: 20px;
}
}
.Calendar__Btn-Next {
font-size: ${(props) => props.theme.fonts.smallText};
background: transparent;
border: none;
display: flex;
font-family: ${(props) => props.theme.fontFamily.secondary};
opacity: 0.6;
text-transform: capitalize;
@media (min-width: ${(props) => props.theme.breakpoints.l}) {
font-size: 20px;
}
}
`
export const CalendarNavIconNext = styled.div`
position: absolute;
right: 33px;
@media (min-width: ${(props) => props.theme.breakpoints.l}) {
right: 50px;
svg {
width: 50px;
}
}
`
export const CalendarNavIconPrev = styled.div`
position: absolute;
left: 33px;
@media (min-width: ${(props) => props.theme.breakpoints.l}) {
left: 50px;
svg {
width: 50px;
}
}
`
|
# Base code from PyTorch examples: https://github.com/pytorch/examples/blob/master/mnist/main.py
from __future__ import print_function
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.optim.lr_scheduler import StepLR
import sys, os
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 32, 3, 1)
self.conv2 = nn.Conv2d(32, 64, 3, 1)
self.dropout1 = nn.Dropout2d(0.25)
self.dropout2 = nn.Dropout2d(0.5)
self.fc1 = nn.Linear(9216, 128)
self.fc2 = nn.Linear(128, 10)
def forward(self, x):
x = self.conv1(x)
x = F.relu(x)
x = self.conv2(x)
x = F.max_pool2d(x, 2)
x = self.dropout1(x)
x = torch.flatten(x, 1)
x = self.fc1(x)
x = F.relu(x)
x = self.dropout2(x)
x = self.fc2(x)
output = F.log_softmax(x, dim=1)
return output
def train(args, model, device, train_loader, optimizer, epoch):
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
if batch_idx % args.log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
def test(args, model, device, test_loader):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
def main():
# Training settings
parser = argparse.ArgumentParser(description='PyTorch MNIST Example')
parser.add_argument('--batch-size', type=int, default=64, metavar='N',
help='input batch size for training (default: 64)')
parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N',
help='input batch size for testing (default: 1000)')
parser.add_argument('--epochs', type=int, default=14, metavar='N',
help='number of epochs to train (default: 14)')
parser.add_argument('--lr', type=float, default=1.0, metavar='LR',
help='learning rate (default: 1.0)')
parser.add_argument('--gamma', type=float, default=0.7, metavar='M',
help='Learning rate step gamma (default: 0.7)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
parser.add_argument('--log-interval', type=int, default=10, metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument('--save-model', action='store_true', default=False,
help='For Saving the current Model')
args = parser.parse_args()
use_cuda = not args.no_cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
device = torch.device("cuda" if use_cuda else "cpu")
kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
train_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=True, download=True,
transform=transforms.Compose([
transforms.Resize(28),
transforms.ToTensor(),
transforms.Normalize(mean=(0.5, ), std=(0.5, ))
])),
batch_size=args.batch_size, shuffle=True, **kwargs)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=False, transform=transforms.Compose([
transforms.Resize(28),
transforms.ToTensor(),
transforms.Normalize(mean=(0.5, ), std=(0.5, ))
])),
batch_size=args.test_batch_size, shuffle=True, **kwargs)
model = Net().to(device)
optimizer = optim.Adadelta(model.parameters(), lr=args.lr)
scheduler = StepLR(optimizer, step_size=1, gamma=args.gamma)
for epoch in range(1, args.epochs + 1):
train(args, model, device, train_loader, optimizer, epoch)
test(args, model, device, test_loader)
scheduler.step()
if args.save_model:
path_to_script = sys.argv[0]
path_to_script_dir = os.path.dirname(os.path.abspath(path_to_script))
full_path = os.path.join(path_to_script_dir, "mnist_cnn.pt")
torch.save(model.state_dict(), full_path)
if __name__ == '__main__':
main()
|
# from gearbox.main import main, Gearbox
from gearbox.main import main
__all__ = ['main']
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from test_dist_base import TestDistRunnerBase, runtime_main
import unittest
import paddle
import os
import paddle.distributed.fleet as fleet
import paddle.distributed.fleet.base.role_maker as role_maker
import numpy as np
from functools import reduce
import paddle.fluid as fluid
paddle.enable_static()
DTYPE = "float32"
paddle.dataset.mnist.fetch()
# Fix seed for test
fluid.default_startup_program().random_seed = 1
fluid.default_main_program().random_seed = 1
def cnn_model(data):
conv_pool_1 = fluid.nets.simple_img_conv_pool(
input=data,
filter_size=5,
num_filters=20,
pool_size=2,
pool_stride=2,
act="relu",
param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(
value=0.01)))
conv_pool_2 = fluid.nets.simple_img_conv_pool(
input=conv_pool_1,
filter_size=5,
num_filters=50,
pool_size=2,
pool_stride=2,
act="relu",
param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(
value=0.01)))
SIZE = 10
input_shape = conv_pool_2.shape
param_shape = [reduce(lambda a, b: a * b, input_shape[1:], 1)] + [SIZE]
scale = (2.0 / (param_shape[0]**2 * SIZE))**0.5
predict = fluid.layers.fc(
input=conv_pool_2,
size=SIZE,
act="softmax",
param_attr=fluid.param_attr.ParamAttr(
initializer=fluid.initializer.Constant(value=0.01)))
return predict
class TestFleetMetaOptimizerPrecision(TestDistRunnerBase):
def get_model(self, batch_size=2, single_device=False):
# Input data
images = fluid.layers.data(name='pixel', shape=[1, 28, 28], dtype=DTYPE)
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
# Train program
predict = cnn_model(images)
cost = fluid.layers.cross_entropy(input=predict, label=label)
avg_cost = fluid.layers.mean(x=cost)
# Evaluator
batch_size_tensor = fluid.layers.create_tensor(dtype='int64')
batch_acc = fluid.layers.accuracy(
input=predict, label=label, total=batch_size_tensor)
test_program = fluid.default_main_program().clone(for_test=True)
# Reader
train_reader = paddle.batch(
paddle.dataset.mnist.test(), batch_size=batch_size)
test_reader = paddle.batch(
paddle.dataset.mnist.test(), batch_size=batch_size)
optimizer = paddle.fluid.optimizer.Adam(0.01)
if single_device:
optimizer.minimize(avg_cost)
else:
role = role_maker.PaddleCloudRoleMaker(is_collective=True)
fleet.init(role)
strategy = paddle.distributed.fleet.DistributedStrategy()
strategy.without_graph_optimization = True
optimizer = fleet.distributed_optimizer(
optimizer, strategy=strategy)
optimizer.minimize(avg_cost)
return test_program, avg_cost, train_reader, test_reader, batch_acc, predict
if __name__ == "__main__":
runtime_main(TestFleetMetaOptimizerPrecision)
|
from setuptools import setup
# https://python-packaging.readthedocs.io/en/latest/minimal.html
setup(
author="Radon Rosborough",
author_email="radon.neon@gmail.com",
description="Internal utilities for straight.el.",
license="MIT",
install_requires=["psutil==5.6.6"],
name="straight-watcher",
url="https://github.com/raxod502/straight.el",
version="1.0-dev",
)
|
# Generated by Django 2.1.1 on 2019-03-13 12:41
import django.core.validators
import django.db.models.deletion
import django.utils.timezone
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AdvancedFields',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('regex_field', models.CharField(max_length=256)),
('choice_field', models.CharField(max_length=8, null=True)),
('multiplechoice_field', models.CharField(max_length=8, null=True)),
('filepath_field', models.FilePathField(null=True)),
('file_field', models.FileField(blank=True, null=True, upload_to='examples/')),
('image_field', models.ImageField(blank=True, null=True, upload_to='examples/')),
('hidden_field', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='BasicFields',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('boolean_field', models.BooleanField(default=False)),
('nullboolean_field', models.NullBooleanField()),
('char_field', models.CharField(max_length=32, null=True)),
('email_field', models.EmailField(max_length=254, null=True)),
('slug_field', models.SlugField(null=True)),
('url_field', models.URLField(null=True)),
('uuid_field', models.UUIDField(null=True)),
('ipaddress_field', models.GenericIPAddressField(null=True)),
('integer_field', models.IntegerField(null=True)),
('float_field', models.IntegerField(null=True)),
('decimal_field', models.DecimalField(decimal_places=2, max_digits=5, null=True)),
('datetime_field', models.DateTimeField(null=True)),
('date_field', models.DateField(null=True)),
('time_field', models.TimeField(null=True)),
('duration_field', models.DurationField(null=True)),
],
),
migrations.CreateModel(
name='Filter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('char_field', models.CharField(help_text='Char field', max_length=20, verbose_name='Char field')),
('datetime_field', models.DateTimeField(help_text='Datetime field', verbose_name='Datetime field')),
('int_field', models.IntegerField(help_text='Integer field', verbose_name='Integer field')),
('int_choice_field', models.IntegerField(choices=[(0, 'Choice 1'), (1, 'Choice 2'), (2, 'Choice 3'), (3, 'Choice 4')], help_text='Integer field with choices', verbose_name='Integer field with choices')),
('bool_field', models.BooleanField(help_text='Boolean field', verbose_name='Boolean field')),
],
),
migrations.CreateModel(
name='HiddenFields',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('note', models.CharField(help_text='Enter abc to hide unit field', max_length=20)),
('unit', models.CharField(blank=True, choices=[(None, 'No additional data'), ('pcs', 'Pieces'), ('wt', 'Weight'), ('cst', 'Custom')], max_length=10, null=True)),
('int_fld', models.IntegerField(blank=True, null=True, verbose_name='Quantity')),
('qty_fld', models.FloatField(blank=True, help_text='Fell free to use a decimal point / comma', null=True, verbose_name='Weight')),
('cst_fld', models.CharField(blank=True, help_text='Enter additional info here', max_length=80, null=True, verbose_name='Comment')),
('additional_text', models.CharField(blank=True, help_text='Now that you have shown me, please enter something', max_length=80, null=True)),
],
),
migrations.CreateModel(
name='PageLoad',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(help_text='Item description', max_length=20)),
('choice', models.IntegerField(choices=[(1, 'Choice 1'), (2, 'Choice 2'), (3, 'Choice 3')], default=1)),
],
),
migrations.CreateModel(
name='RefreshType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(help_text='Item description', max_length=20)),
],
),
migrations.CreateModel(
name='Relation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=16)),
],
),
migrations.CreateModel(
name='Validated',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=10, validators=[django.core.validators.RegexValidator('\\w\\w\\d+', 'Please enter a string starting with two characters, followed by up to 8 numbers')])),
('enabled', models.BooleanField()),
('amount', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(5), django.core.validators.MaxValueValidator(10)])),
('item_type', models.IntegerField(choices=[(0, 'Choice 1'), (1, 'Choice 2'), (2, 'Choice 3'), (3, 'Choice 4')])),
('item_flags', models.CharField(blank=True, choices=[('A', 'A'), ('B', 'B'), ('C', 'C'), ('D', 'D')], max_length=4, validators=[django.core.validators.RegexValidator('^[ABC]*$', 'Only options A-C may be chosen', 'regex')])),
('comment', models.TextField(blank=True, null=True)),
],
),
migrations.AddField(
model_name='advancedfields',
name='hyperlinked_identity_field',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='hyper_identity', to='examples.Relation'),
),
migrations.AddField(
model_name='advancedfields',
name='hyperlinked_related_field',
field=models.ManyToManyField(related_name='hyper_related', to='examples.Relation'),
),
migrations.AddField(
model_name='advancedfields',
name='primary_key_related_field',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='primary', to='examples.Relation'),
),
migrations.AddField(
model_name='advancedfields',
name='slug_related_field',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='slug', to='examples.Relation'),
),
]
|
import { Universe, Cell } from "wasm-game-of-life";
// Import the WebAssembly memory at the top of the file.
import { memory } from "wasm-game-of-life/wasm_game_of_life_bg";
const CELL_SIZE = 2; // px
const GRID_COLOR = "#EEEEEE";
const DEAD_COLOR = "#FFFFFF";
const ALIVE_COLOR = "#e34572";
// Construct the universe, and get its width and height.
const universe = Universe.new();
const width = universe.width();
const height = universe.height();
let animationId = null;
// Give the canvas room for all of our cells and a 1px border
// around each of them.
const canvas = document.getElementById("game-of-life-canvas");
canvas.height = (CELL_SIZE + 1) * height + 1;
canvas.width = (CELL_SIZE + 1) * width + 1;
const ctx = canvas.getContext("2d");
const isPaused = () => {
return animationId === null;
};
const playPauseButton = document.getElementById("play-pause");
const play = () => {
playPauseButton.textContent = "⏸";
renderLoop();
};
const pause = () => {
playPauseButton.textContent = "▶";
cancelAnimationFrame(animationId);
animationId = null;
};
playPauseButton.addEventListener("click", event => {
if (isPaused()) {
play();
} else {
pause();
}
});
const renderLoop = () => {
// debugger;
universe.tick();
drawGrid();
drawCells();
animationId = requestAnimationFrame(renderLoop);
};
const drawGrid = () => {
ctx.beginPath();
ctx.strokeStyle = GRID_COLOR;
// Vertical lines.
for (let i = 0; i <= width; i++) {
ctx.moveTo(i * (CELL_SIZE + 1) + 1, 0);
ctx.lineTo(i * (CELL_SIZE + 1) + 1, (CELL_SIZE + 1) * height + 1);
}
// Horizontal lines.
for (let j = 0; j <= height; j++) {
ctx.moveTo(0, j * (CELL_SIZE + 1) + 1);
ctx.lineTo((CELL_SIZE + 1) * width + 1, j * (CELL_SIZE + 1) + 1);
}
ctx.stroke();
};
const getIndex = (row, column) => {
return row * width + column;
};
const drawCells = () => {
const cellsPtr = universe.cells();
const cells = new Uint8Array(memory.buffer, cellsPtr, width * height);
ctx.beginPath();
// for (let row = 0; row < height; row++) {
// for (let col = 0; col < width; col++) {
// const idx = getIndex(row, col);
// ctx.fillStyle = cells[idx] === Cell.Dead ? DEAD_COLOR : ALIVE_COLOR;
// ctx.fillRect(
// col * (CELL_SIZE + 1) + 1,
// row * (CELL_SIZE + 1) + 1,
// CELL_SIZE,
// CELL_SIZE
// );
// }
// }
// Alive cells.
ctx.fillStyle = ALIVE_COLOR;
for (let row = 0; row < height; row++) {
for (let col = 0; col < width; col++) {
const idx = getIndex(row, col);
if (cells[idx] !== Cell.Alive) {
continue;
}
ctx.fillRect(
col * (CELL_SIZE + 1) + 1,
row * (CELL_SIZE + 1) + 1,
CELL_SIZE,
CELL_SIZE
);
}
}
// Dead cells.
ctx.fillStyle = DEAD_COLOR;
for (let row = 0; row < height; row++) {
for (let col = 0; col < width; col++) {
const idx = getIndex(row, col);
if (cells[idx] !== Cell.Dead) {
continue;
}
ctx.fillRect(
col * (CELL_SIZE + 1) + 1,
row * (CELL_SIZE + 1) + 1,
CELL_SIZE,
CELL_SIZE
);
}
}
ctx.stroke();
};
drawGrid();
drawCells();
// This used to be `requestAnimationFrame(renderLoop)`.
play();
canvas.addEventListener("click", event => {
const boundingRect = canvas.getBoundingClientRect();
const scaleX = canvas.width / boundingRect.width;
const scaleY = canvas.height / boundingRect.height;
const canvasLeft = (event.clientX - boundingRect.left) * scaleX;
const canvasTop = (event.clientY - boundingRect.top) * scaleY;
const row = Math.min(Math.floor(canvasTop / (CELL_SIZE + 1)), height - 1);
const col = Math.min(Math.floor(canvasLeft / (CELL_SIZE + 1)), width - 1);
universe.toggle_cell(row, col);
drawGrid();
drawCells();
});
|
module.exports = {
docs: [
'Overview',
'Glossary',
{
type: 'category',
label: 'Basics',
items: [
'Intro_to_Alaya',
'Intro_to_ATP',
'staking_and_delegation',
'Networks',
'Alaya_Account'
]
},
{
type: 'category',
label: 'Advanced',
items: ['Architecture', 'Economic_model', 'Consensus_mechanism', 'Governance_mechanism']
},
{
type: 'category',
label: 'Alaya Node',
items: [
'Intro_to_validator',
'Run_a_fullnode',
'Become_Validator',
'Data_snapshot',
{
type: 'category',
label: 'Tools for nodes',
items: ['Online_MTool', 'Offline_MTool', 'Command_Line_Tools']
}
]
},
{
type: 'category',
label: 'Developers',
items: [
'Development_guide',
{
type: 'category',
label: 'Set up local environment',
items: ['Join_the_dev_network', 'Run_a_dev_node', 'Private_network']
},
{
type: 'category',
label: 'Dev Tools',
items: ['Alaya-Truffle', 'IDE']
},
{
type: 'category',
label: 'Smart contract',
items: [{
type: 'category',
label: 'Solidity contract',
items: [
'Solidity_Getting_started',
'Solidity_Migration_tutorial',
'Solidity_Development_costs',
'Solidity_Best_practices',
'Solidity_Contract_security'
]
},
{
type: 'category',
label: 'Wasm contract',
items: ['Wasm_Getting_started', 'Wasm_Development_costs', 'Wasm_Best_practices', 'Wasm_API']
},
]
},
{
type: 'category',
label: 'References',
items: ['JS_SDK', 'Java_SDK', 'Python_SDK', 'Go_SDK', 'Json_Rpc', 'Explorer_API', 'Samurai_API']
},
{
type: 'category',
label: 'ARC Token',
items: ['ARC20', 'ARC721']
},
]
},
{
type: 'category',
label: 'Tutorials',
items: ['JS-Tutorials', 'DApp_migrate']
},
{
type: 'category',
label: 'Data and analytics',
items: ['AlayScan', 'PlatEye']
},
{
type: 'category',
label: 'Wallet',
items: ['Wallet_Guide', 'ATON_user_manual', 'Samurai_user_manual','Ledger-hardware-wallet']
},
{
type: 'category',
label: 'Community',
items: ['Join_the_Community', 'Ecosystem_Programs', 'Ways_to_contribute', 'Contribution_Guidelines']
}
]
}
|
import { Daemon } from "./daemon";
import { WalletRPC } from "./wallet-rpc";
import { SCEE } from "./SCEE-Node";
import { dialog } from "electron";
import semver from "semver";
import axios from "axios";
import { version } from "../../../package.json";
const bunyan = require("bunyan");
const WebSocket = require("ws");
const electron = require("electron");
const os = require("os");
const fs = require("fs-extra");
const path = require("upath");
const objectAssignDeep = require("object-assign-deep");
const { ipcMain: ipc } = electron;
const LOG_LEVELS = ["fatal", "error", "warn", "info", "debug", "trace"];
export class Backend {
constructor(mainWindow) {
this.mainWindow = mainWindow;
this.daemon = null;
this.walletd = null;
this.wss = null;
this.token = null;
this.config_dir = null;
this.wallet_dir = null;
this.config_file = null;
this.config_data = {};
this.scee = new SCEE();
this.log = null;
}
init(config) {
let configDir;
let legacyLokiConfigDir;
if (os.platform() === "win32") {
configDir = "C:\\ProgramData\\oxen";
legacyLokiConfigDir = "C:\\ProgramData\\loki\\";
this.wallet_dir = `${os.homedir()}\\Documents\\Oxen`;
} else {
configDir = path.join(os.homedir(), ".oxen");
legacyLokiConfigDir = path.join(os.homedir(), ".loki/");
this.wallet_dir = path.join(os.homedir(), "Oxen");
}
// if the user has used loki before, just keep the same stuff
if (fs.existsSync(legacyLokiConfigDir)) {
this.config_dir = legacyLokiConfigDir;
} else {
// create the new, Oxen location
this.config_dir = configDir;
if (!fs.existsSync(configDir)) {
fs.mkdirpSync(configDir);
}
}
if (!fs.existsSync(path.join(this.config_dir, "gui"))) {
fs.mkdirpSync(path.join(this.config_dir, "gui"));
}
this.config_file = path.join(this.config_dir, "gui", "config.json");
const daemon = {
type: "remote",
p2p_bind_ip: "0.0.0.0",
p2p_bind_port: 22022,
rpc_bind_ip: "127.0.0.1",
rpc_bind_port: 22023,
zmq_rpc_bind_ip: "127.0.0.1",
out_peers: -1,
in_peers: -1,
limit_rate_up: -1,
limit_rate_down: -1,
log_level: 0
};
const daemons = {
mainnet: {
...daemon,
remote_host: "imaginary.stream",
remote_port: 22023
},
stagenet: {
...daemon,
type: "local",
p2p_bind_port: 38153,
rpc_bind_port: 38154
},
testnet: {
...daemon,
type: "local",
p2p_bind_port: 38156,
rpc_bind_port: 38157
}
};
// Default values
this.defaults = {
daemons: objectAssignDeep({}, daemons),
app: {
data_dir: this.config_dir,
wallet_data_dir: this.wallet_dir,
ws_bind_port: 12313,
net_type: "mainnet"
},
wallet: {
rpc_bind_port: 22026,
log_level: 0
}
};
this.config_data = {
// Copy all the properties of defaults
...objectAssignDeep({}, this.defaults),
appearance: {
theme: "dark"
}
};
this.remotes = [
{
host: "imaginary.stream",
port: "22023"
},
{
host: "nodes.hashvault.pro",
port: "22023"
},
{
host: "explorer.loki.aussie-pools.com",
port: "18081"
},
{
host: "public.loki.foundation",
port: "22023"
}
];
this.token = config.token;
this.wss = new WebSocket.Server({
port: config.port,
maxPayload: Number.POSITIVE_INFINITY
});
this.wss.on("connection", ws => {
ws.on("message", data => this.receive(data));
});
}
send(event, data = {}) {
let message = {
event,
data
};
let encrypted_data = this.scee.encryptString(
JSON.stringify(message),
this.token
);
this.wss.clients.forEach(function each(client) {
if (client.readyState === WebSocket.OPEN) {
client.send(encrypted_data);
}
});
}
receive(data) {
let decrypted_data = JSON.parse(this.scee.decryptString(data, this.token));
// route incoming request to either the daemon, wallet, or here
switch (decrypted_data.module) {
case "core":
this.handle(decrypted_data);
break;
case "daemon":
if (this.daemon) {
this.daemon.handle(decrypted_data);
}
break;
case "wallet":
if (this.walletd) {
this.walletd.handle(decrypted_data);
}
break;
}
}
handle(data) {
let params = data.data;
// check if config has changed
let config_changed = false;
switch (data.method) {
case "set_language":
this.send("set_language", { lang: params.lang });
break;
case "quick_save_config":
// save only partial config settings
Object.keys(params).map(key => {
this.config_data[key] = Object.assign(
this.config_data[key],
params[key]
);
});
fs.writeFile(
this.config_file,
JSON.stringify(this.config_data, null, 4),
"utf8",
() => {
this.send("set_app_data", {
config: params,
pending_config: params
});
}
);
break;
case "save_config_init":
case "save_config": {
if (data.method === "save_config") {
Object.keys(this.config_data).map(i => {
if (i == "appearance") return;
Object.keys(this.config_data[i]).map(j => {
if (this.config_data[i][j] !== params[i][j]) {
config_changed = true;
}
});
});
}
Object.keys(params).map(key => {
this.config_data[key] = Object.assign(
this.config_data[key],
params[key]
);
});
const validated = Object.keys(this.defaults)
.filter(k => k in this.config_data)
.map(k => [
k,
this.validate_values(this.config_data[k], this.defaults[k])
])
.reduce((map, obj) => {
map[obj[0]] = obj[1];
return map;
}, {});
// Validate daemon data
this.config_data = {
...this.config_data,
...validated
};
fs.writeFile(
this.config_file,
JSON.stringify(this.config_data, null, 4),
"utf8",
() => {
if (data.method == "save_config_init") {
this.startup();
} else {
this.send("set_app_data", {
config: this.config_data,
pending_config: this.config_data
});
if (config_changed) {
this.send("settings_changed_reboot");
}
}
}
);
break;
}
case "init":
this.startup();
break;
case "open_explorer": {
const { net_type } = this.config_data.app;
let path = null;
if (params.type === "tx") {
path = "tx";
} else if (params.type === "service_node") {
path = "service_node";
}
if (path) {
const baseUrl =
net_type === "testnet"
? "https://testnet.oxen.observer"
: "https://oxen.observer";
const url = `${baseUrl}/${path}/`;
require("electron").shell.openExternal(url + params.id);
}
break;
}
case "open_url":
require("electron").shell.openExternal(params.url);
break;
case "save_png": {
let filename = dialog.showSaveDialog(this.mainWindow, {
title: "Save " + params.type,
filters: [{ name: "PNG", extensions: ["png"] }],
defaultPath: os.homedir()
});
if (filename) {
let base64Data = params.img.replace(/^data:image\/png;base64,/, "");
let binaryData = Buffer.from(base64Data, "base64").toString("binary");
fs.writeFile(filename, binaryData, "binary", err => {
if (err) {
this.send("show_notification", {
type: "negative",
i18n: [
"notification.errors.errorSavingItem",
{ item: params.type }
],
timeout: 2000
});
} else {
this.send("show_notification", {
i18n: [
"notification.positive.itemSaved",
{ item: params.type, filename }
],
timeout: 2000
});
}
});
}
break;
}
default:
break;
}
}
// if the version is a whole minor version out of date (hardfork out of date)
// set update required to true
async checkVersion() {
try {
const { data } = await axios.get(
"https://api.github.com/repos/loki-project/loki-electron-gui-wallet/releases/latest"
);
// remove the 'v' from front of the version
const latestVersion = data.tag_name.substring(1);
// can return "major", "minor", "patch"
const vSizeDiff = semver.diff(version, latestVersion);
const updateAvailable = semver.ltr(version, latestVersion);
const majorOrMinor = vSizeDiff === "major" || vSizeDiff == "minor";
const updateRequired = updateAvailable && majorOrMinor;
this.send("set_update_required", updateRequired);
} catch (e) {
this.send("set_updated_required", false);
}
}
initLogger(logPath) {
let log = bunyan.createLogger({
name: "log",
streams: [
{
type: "rotating-file",
path: path.join(logPath, "electron.log"),
period: "1d", // daily rotation
count: 4 // keep 4 days of logs
}
]
});
LOG_LEVELS.forEach(level => {
ipc.on(`log-${level}`, (first, ...rest) => {
log[level](...rest);
});
});
this.log = log;
process.on("uncaughtException", error => {
log.error("Unhandled Error", error);
});
process.on("unhandledRejection", error => {
log.error("Unhandled Promise Rejection", error);
});
}
startup() {
this.send("set_app_data", {
remotes: this.remotes,
defaults: this.defaults
});
this.checkVersion();
fs.readFile(this.config_file, "utf8", (err, data) => {
if (err) {
this.send("set_app_data", {
status: {
code: -1 // Config not found
},
config: this.config_data,
pending_config: this.config_data
});
return;
}
let disk_config_data = JSON.parse(data);
// semi-shallow object merge
Object.keys(disk_config_data).map(key => {
if (!this.config_data.hasOwnProperty(key)) {
this.config_data[key] = {};
}
this.config_data[key] = Object.assign(
this.config_data[key],
disk_config_data[key]
);
});
// here we may want to check if config data is valid, if not also send code -1
// i.e. check ports are integers and > 1024, check that data dir path exists, etc
const validated = Object.keys(this.defaults)
.filter(k => k in this.config_data)
.map(k => [
k,
this.validate_values(this.config_data[k], this.defaults[k])
])
.reduce((map, obj) => {
map[obj[0]] = obj[1];
return map;
}, {});
// Make sure the daemon data is valid
this.config_data = {
...this.config_data,
...validated
};
// save config file back to file, so updated options are stored on disk
fs.writeFile(
this.config_file,
JSON.stringify(this.config_data, null, 4),
"utf8",
() => {}
);
this.send("set_app_data", {
config: this.config_data,
pending_config: this.config_data
});
// Make the wallet dir
const { wallet_data_dir, data_dir } = this.config_data.app;
if (!fs.existsSync(wallet_data_dir)) {
fs.mkdirpSync(wallet_data_dir);
}
// Check to see if data and wallet directories exist
const dirs_to_check = [
{
path: data_dir,
error: "notification.errors.dataPathNotFound"
},
{
path: wallet_data_dir,
error: "notification.errors.walletPathNotFound"
}
];
for (const dir of dirs_to_check) {
// Check to see if dir exists
if (!fs.existsSync(dir.path)) {
this.send("show_notification", {
type: "negative",
i18n: dir.error,
timeout: 2000
});
// Go back to config
this.send("set_app_data", {
status: {
code: -1 // Return to config screen
}
});
return;
}
}
const { net_type } = this.config_data.app;
const dirs = {
mainnet: this.config_data.app.data_dir,
stagenet: path.join(this.config_data.app.data_dir, "stagenet"),
testnet: path.join(this.config_data.app.data_dir, "testnet")
};
// Make sure we have the directories we need
const net_dir = dirs[net_type];
if (!fs.existsSync(net_dir)) {
fs.mkdirpSync(net_dir);
}
const log_dir = path.join(net_dir, "logs");
if (!fs.existsSync(log_dir)) {
fs.mkdirpSync(log_dir);
}
this.initLogger(log_dir);
this.daemon = new Daemon(this);
this.walletd = new WalletRPC(this);
this.send("set_app_data", {
status: {
code: 3 // Starting daemon
}
});
// Make sure the remote node provided is accessible
const config_daemon = this.config_data.daemons[net_type];
this.daemon.checkRemote(config_daemon).then(data => {
if (data.error) {
// If we can default to local then we do so, otherwise we tell the user to re-set the node
if (config_daemon.type === "local_remote") {
this.config_data.daemons[net_type].type = "local";
this.send("set_app_data", {
config: this.config_data,
pending_config: this.config_data
});
this.send("show_notification", {
type: "warning",
textColor: "black",
i18n: "notification.warnings.usingLocalNode",
timeout: 2000
});
} else {
this.send("show_notification", {
type: "negative",
i18n: "notification.errors.cannotAccessRemoteNode",
timeout: 2000
});
// Go back to config
this.send("set_app_data", {
status: {
code: -1 // Return to config screen
}
});
return;
}
}
// If we got a net type back then check if ours match
if (data.net_type && data.net_type !== net_type) {
this.send("show_notification", {
type: "negative",
i18n: "notification.errors.differentNetType",
timeout: 2000
});
// Go back to config
this.send("set_app_data", {
status: {
code: -1 // Return to config screen
}
});
return;
}
this.daemon
.checkVersion()
.then(version => {
if (version) {
this.send("set_app_data", {
status: {
code: 4,
message: version
}
});
} else {
// daemon not found, probably removed by AV, set to remote node
this.config_data.daemons[net_type].type = "remote";
this.send("set_app_data", {
status: {
code: 5
},
config: this.config_data,
pending_config: this.config_data
});
}
this.daemon
.start(this.config_data)
.then(() => {
this.send("set_app_data", {
status: {
code: 6 // Starting wallet
}
});
this.walletd
.start(this.config_data)
.then(() => {
this.send("set_app_data", {
status: {
code: 7 // Reading wallet list
}
});
this.walletd.listWallets(true);
this.send("set_app_data", {
status: {
code: 0 // Ready
}
});
// eslint-disable-next-line
})
.catch(error => {
this.daemon.killProcess();
this.send("show_notification", {
type: "negative",
message: error.message,
timeout: 3000
});
this.send("set_app_data", {
status: {
code: -1 // Return to config screen
}
});
});
// eslint-disable-next-line
})
.catch(error => {
if (this.config_data.daemons[net_type].type == "remote") {
this.send("show_notification", {
type: "negative",
i18n: "notification.errors.remoteCannotBeReached",
timeout: 3000
});
} else {
this.send("show_notification", {
type: "negative",
message: error.message,
timeout: 3000
});
}
this.send("set_app_data", {
status: {
code: -1 // Return to config screen
}
});
});
// eslint-disable-next-line
})
.catch(() => {
this.send("set_app_data", {
status: {
code: -1 // Return to config screen
}
});
});
});
});
}
quit() {
return new Promise(resolve => {
let process = [];
if (this.daemon) {
process.push(this.daemon.quit());
}
if (this.walletd) {
process.push(this.walletd.quit());
}
if (this.wss) {
this.wss.close();
}
Promise.all(process).then(() => {
resolve();
});
});
}
// Replace any invalid value with default values
validate_values(values, defaults) {
const isDictionary = v =>
typeof v === "object" &&
v !== null &&
!(v instanceof Array) &&
!(v instanceof Date);
const modified = { ...values };
// Make sure we have valid defaults
if (!isDictionary(defaults)) return modified;
for (const key in modified) {
// Only modify if we have a default
if (!(key in defaults)) continue;
const defaultValue = defaults[key];
const invalidDefault =
defaultValue === null ||
defaultValue === undefined ||
Number.isNaN(defaultValue);
if (invalidDefault) continue;
const value = modified[key];
// If we have a object then recurse through it
if (isDictionary(value)) {
modified[key] = this.validate_values(value, defaultValue);
} else {
// Check if we need to replace the value
const isValidValue = !(
value === undefined ||
value === null ||
value === "" ||
Number.isNaN(value)
);
if (isValidValue) continue;
// Otherwise set the default value
modified[key] = defaultValue;
}
}
return modified;
}
}
|
"""gsi_audit_event.py: These tests validate auditing of events for GSI
__author__ = "Hemant Rajput"
__maintainer = "Hemant Rajput"
__email__ = "Hemant.Rajput@couchbase.com"
__git_user__ = "hrajput89"
__created_on__ = "08/17/20 12:31 pm"
"""
from remote.remote_util import RemoteMachineShellConnection
from security.rbac_base import RbacBase
from security.audittest import auditTest
from .base_gsi import BaseSecondaryIndexingTests
class GSIAuditEvent(BaseSecondaryIndexingTests, auditTest):
def setUp(self):
super(GSIAuditEvent, self).setUp()
self.log.info("============== GSIAuditEvent setup has started ==============")
self.rest.delete_all_buckets()
self.bucket_params = self._create_bucket_params(server=self.master, size=self.bucket_size,
replicas=self.num_replicas, bucket_type=self.bucket_type,
enable_replica_index=self.enable_replica_index,
eviction_policy=self.eviction_policy, lww=self.lww)
self.cluster.create_standard_bucket(name=self.test_bucket, port=11222,
bucket_params=self.bucket_params)
self.buckets = self.rest.get_buckets()
self.audit_url = "http://%s:%s/settings/audit" % (self.master.ip, self.master.port)
self.shell = RemoteMachineShellConnection(self.master)
curl_output = self.shell.execute_command(f"curl -u Administrator:password -X POST "
f"-d 'auditdEnabled=true' {self.audit_url}")
if "errors" in str(curl_output):
self.log.error("Auditing settings were not set correctly")
self.sleep(10)
self.log.info("============== GSIAuditEvent setup has completed ==============")
def tearDown(self):
self.log.info("============== GSIAuditEvent tearDown has started ==============")
super(GSIAuditEvent, self).tearDown()
self.log.info("============== GSIAuditEvent tearDown has completed ==============")
def suite_tearDown(self):
pass
def suite_setUp(self):
pass
def test_audit_of_forbidden_access_denied_event(self):
# create a cluster admin user
user = [{'id': 'test', 'password': 'password', 'name': 'test'}]
RbacBase().create_user_source(user, 'builtin', self.master)
user_role_list = [{'id': 'test', 'name': 'test', 'roles': 'query_manage_index[*]'}]
RbacBase().add_user_role(user_role_list, self.rest, 'builtin')
shell = RemoteMachineShellConnection(self.master)
curl_cmd = 'curl -u test:password http://localhost:9102/settings/storageMode'
try:
shell.execute_command(curl_cmd)
except Exception as err:
self.log.info(err)
expected_results = {'description': 'The user does not have permission to access the requested resource',
'enabled': True, 'id': 49153, 'name': 'HTTP 403: Forbidden', 'sync': False,
'real_userid': {'domain': 'local', 'user': 'test'},
'method': 'request_handler::isAllowed',
'service': 'Index', 'url': '/settings/storageMode',
'message': 'Called by RequestHandler::handleIndexStorageModeRequest'}
self.checkConfig(self.eventID, self.master, expected_results, n1ql_audit=False)
def test_audit_of_unauthorised_access_denied_event(self):
# create a cluster admin user
user = [{'id': 'test', 'password': 'password', 'name': 'test'}]
RbacBase().create_user_source(user, 'builtin', self.master)
user_role_list = [{'id': 'test', 'name': 'test', 'roles': 'query_manage_index[*]'}]
RbacBase().add_user_role(user_role_list, self.rest, 'builtin')
shell = RemoteMachineShellConnection(self.master)
curl_cmd = 'curl -u test:wrong_password http://localhost:9102/settings/storageMode'
try:
shell.execute_command(curl_cmd)
except Exception as err:
self.log.info(err)
expected_results = {'description': 'Authentication is required to access the requested resource',
'enabled': True, 'id': 49152, 'name': 'HTTP 401: Unauthorized', 'sync': False,
'real_userid': {'domain': 'internal', 'user': 'unknown'},
'method': 'request_handler::doAuth',
'message': 'Called by RequestHandler::handleIndexStorageModeRequest',
'service': 'Index', 'url': '/settings/storageMode'}
self.checkConfig(self.eventID, self.master, expected_results, n1ql_audit=False)
|
const request = require('supertest')
const { beforeAction, afterAction } = require('../../helpers/setup')
const { getAccessToken } = require('../../helpers/getAccessToken')
const { User } = require('../../../api/models')
const { Note } = require('../../../api/models')
let api
let token
beforeAll(async () => {
api = await beforeAction()
token = await getAccessToken()
})
afterAll(() => {
afterAction()
})
test('Note | create, update, delete', async () => {
const user = await User.create({
email: 'felix@test4.com',
})
const createMutation = `
mutation {
createNote(
userId: ${user.id},
note: "create note"
) {
id
userId
note
}
}
`
const res = await request(api)
.post('/graphql')
.set('Accept', /json/)
.set({
Authorization: `Bearer ${token}`,
})
.send({ query: createMutation })
.expect(200)
.expect('Content-Type', /json/)
expect(res.body.data.createNote.userId).toBe(user.id)
expect(res.body.data.createNote.note).toBe('create note')
})
test('Note | updateNote', async () => {
const user = await User.create({
email: 'felix@test5.com',
})
const note = await Note.create({
userId: user.id,
note: 'update note',
})
const updateMutation = `
mutation {
updateNote(
id: ${note.id}
userId: ${user.id}
note: "update note update"
) {
userId
note
}
}
`
const res = await request(api)
.post('/graphql')
.set('Accept', /json/)
.set({
Authorization: `Bearer ${token}`,
})
.send({ query: updateMutation })
.expect(200)
.expect('Content-Type', /json/)
expect(res.body.data.updateNote.userId).toBe(user.id)
expect(res.body.data.updateNote.note).toBe('update note update')
})
test('Note | updateNote | note does not exist', async () => {
const updateMutation = `
mutation {
updateNote(
id: 9999
userId: 1
note: "update"
) {
note
}
}
`
const res = await request(api)
.post('/graphql')
.set('Accept', /json/)
.set({
Authorization: `Bearer ${token}`,
})
.send({ query: updateMutation })
.expect(200)
.expect('Content-Type', /json/)
expect(res.body.data.updateNote).toBe(null)
expect(res.body.errors[0].message).toBe('Note with id: 9999 not found!')
})
test('Note | deleteNote', async () => {
const user = await User.create({
email: 'felix@test6.com',
})
const note = await Note.create({
userId: user.id,
note: 'delete note',
})
const deleteMutation = `
mutation {
deleteNote(
id: ${note.id}
) {
note
}
}
`
const res = await request(api)
.post('/graphql')
.set('Accept', /json/)
.set({
Authorization: `Bearer ${token}`,
})
.send({ query: deleteMutation })
.expect(200)
.expect('Content-Type', /json/)
expect(res.body.data.deleteNote.note).toBe('delete note')
})
test('Note | deleteNote | note does not exist', async () => {
const deleteMutation = `
mutation {
deleteNote(
id: 9999
) {
note
}
}
`
const res = await request(api)
.post('/graphql')
.set('Accept', /json/)
.set({
Authorization: `Bearer ${token}`,
})
.send({ query: deleteMutation })
.expect(200)
.expect('Content-Type', /json/)
expect(res.body.data.deleteNote).toBe(null)
expect(res.body.errors[0].message).toBe('Note with id: 9999 not found!')
})
|
'use strict';
const Q = require('q');
const pm2 = require('pm2');
const dialog = require('dialog');
const yargs = require('yargs');
const Core = require('../core');
const db = require('../lib/db');
const path = require('path');
const configure = require('./configure');
global.__nodeModules = path.join(__dirname, '../../node_modules/');
db.startDb()
.then(getConfig)
.then(validate)
.then(initCore)
.catch(showErrorMessage);
function getConfig() {
const hasParameter = yargs.argv.token || yargs.argv.name;
if (hasParameter) {
configure.configure(yargs.argv);
return { token: yargs.argv.token, name: yargs.argv.name };
}
return db.getDb().get('SELECT * FROM config');
}
function validate(config) {
const deferred = Q.defer();
if (!config) {
deferred.reject('Please make config before start');
} else if (!config.token) {
deferred.reject('Please config token before start');
} else if (!config.name) {
deferred.reject('Please config name before start');
} else {
deferred.resolve(config);
}
return deferred.promise;
}
function initCore(config) {
new Core({ token: config.token, name: config.name }).run();
}
function showErrorMessage(err) {
dialog.err(err, 'Error', () => {
pm2.stop('hubot', () => pm2.disconnect());
});
}
|
#!/usr/bin/python
# The contents of this file are in the public domain. See LICENSE_FOR_EXAMPLE_PROGRAMS.txt
#
#
# This is an example illustrating the use of a binary SVM classifier tool from
# the dlib C++ Library. In this example, we will create a simple test dataset
# and show how to learn a classifier from it.
#
#
# COMPILING/INSTALLING THE DLIB PYTHON INTERFACE
# You can install dlib using the command:
# pip install dlib
#
# Alternatively, if you want to compile dlib yourself then go into the dlib
# root folder and run:
# python setup.py install
# or
# python setup.py install --yes USE_AVX_INSTRUCTIONS
# if you have a CPU that supports AVX instructions, since this makes some
# things run faster.
#
# Compiling dlib should work on any operating system so long as you have
# CMake installed. On Ubuntu, this can be done easily by running the
# command:
# sudo apt-get install cmake
#
import dlib
try:
import cPickle as pickle
except ImportError:
import pickle
x = dlib.vectors()
y = dlib.array()
# Make a training dataset. Here we have just two training examples. Normally
# you would use a much larger training dataset, but for the purpose of example
# this is plenty. For binary classification, the y labels should all be either +1 or -1.
x.append(dlib.vector([1, 2, 3, -1, -2, -3]))
y.append(+1)
x.append(dlib.vector([-1, -2, -3, 1, 2, 3]))
y.append(-1)
# Now make a training object. This object is responsible for turning a
# training dataset into a prediction model. This one here is a SVM trainer
# that uses a linear kernel. If you wanted to use a RBF kernel or histogram
# intersection kernel you could change it to one of these lines:
# svm = dlib.svm_c_trainer_histogram_intersection()
# svm = dlib.svm_c_trainer_radial_basis()
svm = dlib.svm_c_trainer_linear()
svm.be_verbose()
svm.set_c(10)
# Now train the model. The return value is the trained model capable of making predictions.
classifier = svm.train(x, y)
# Now run the model on our data and look at the results.
print("prediction for first sample: {}".format(classifier(x[0])))
print("prediction for second sample: {}".format(classifier(x[1])))
# classifier models can also be pickled in the same was as any other python object.
with open('saved_model.pickle', 'wb') as handle:
pickle.dump(classifier, handle, 2)
|
const gamesModel = require('../models/gamesModel')
function postGamesDD (req, res) {
const gameData = {
userName: req.body.userName,
titleGame1: req.body.dd_game1
}
const NewDropDownGames = new gamesModel(gameData)
NewDropDownGames.save((err) => {
if (err) {
console.log('Could not save games')
res.status(400).send('Games were not saved')
} else {
console.log('Dropdown games saved succesfully')
res.redirect('pages/mygames')
}
})
}
module.exports = postGamesDD
|
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var CommentSchema = new Schema({
articleId: String,
author: String,
text: String
});
module.exports = CommentSchema;
|
function printSysex(data)
{
println("Sysex: " + prettyHex(data));
}
function uint8ToHex(x)
{
var upper = (x >> 4) & 0xF;
var lower = x & 0xF;
return upper.toString(16) + lower.toString(16) + " ";
}
function uint7ToHex(x)
{
var upper = (x >> 4) & 0x7;
var lower = x & 0xF;
return upper.toString(16) + lower.toString(16) + " ";
}
/** Get the integer value of a byte in a hex-string (index is bytes, not hex characters) **/
String.prototype.hexByteAt = function (byteIndex)
{
var hex = this.cleanupHex();
return parseInt(hex.substr(byteIndex*2, 2), 16);
}
function asciiCharToHex(c)
{
var charcode = c.charCodeAt(0);
var upper = (charcode >> 4) & 0xF;
var lower = charcode & 0xF;
return upper.toString(16) + lower.toString(16) + " ";
}
/**
* Return the contents of the string encoded as hex, intended for sending SysEx.
* @param {int} len Length in bytes of the resulting hex-encoded string
* @return {string}
*/
String.prototype.toHex = function(len)
{
var text = this.forceLength(len);
var result = "";
for(i=0; i<len; i++)
{
result += asciiCharToHex(text.charAt(i));
}
return result;
}
/** Clean-up hex code to a lower-case variant with no whitespace. **/
String.prototype.cleanupHex = function()
{
var hex = this.replace(" ", "", "g");
hex = hex.toLowerCase();
return hex;
}
/** Check if the hex string mathces a pattern,
* (which can contain either hex characters or the ? wildcard)
* @return boolean
*/
String.prototype.matchesHexPattern = function(pattern)
{
// remove spaces and conver-to lower case
var hex = this.cleanupHex();
pattern = pattern.cleanupHex();
if (hex.length != pattern.length)
{
return false;
}
for(i=0; i<hex.length; i++)
{
if (pattern.charAt(i) != "?" && pattern.charAt(i) != hex.charAt(i))
{
return false;
}
}
return true;
}
/**
* Clean-up hex for printing (groups bytes as pairs, upper case).
* @return {string}
*/
function prettyHex(hex)
{
hex = hex.replace(" ", "", "g"); // remove spaces
var result = "";
var first = true;
for(i=0; i<hex.length; i+=2)
{
if (!first)
{
result += " ";
}
result += hex.substr(i, 2);
first = false;
}
return result.toUpperCase();
}
|
import cx from 'classnames';
import React from 'react';
import PropTypes from 'prop-types';
import { translate } from 'react-i18next';
const EmptyPanel = ({ t, className }) => (
<div className={cx('PlaylistPanel', 'PlaylistPanel--empty', className)}>
{t('playlists.noPlaylists')}
</div>
);
EmptyPanel.propTypes = {
t: PropTypes.func.isRequired,
className: PropTypes.string
};
export default translate()(EmptyPanel);
|
# Copyright 1999-2020 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .core import SessionManagerActor, SessionActor
from .custom_log import CustomLogMetaActor
from .service import start
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2016 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from setuptools import setup, find_packages, Command
from setuptools.command.test import test as TestCommand
here = os.path.abspath(os.path.dirname(__file__))
def read(filename):
with open(os.path.join(here, filename), 'r') as fh:
return fh.read()
def test_suite():
"""Define the tests that should be run
Providing a custom test suite so that url.py is not imported,
as importing it makes django-oidc try to connect to Keycloak
Returns:
TestSuite
"""
import unittest
loader = unittest.TestLoader()
suite = loader.discover('tests', pattern='test_*.py')
return suite
class DjangoMixin(object):
"""Mixin that enables calling Django commands"""
def django_configure(self):
"""Provide Django a minimal configuration needed for running tests
This method doesn't set any bossoidc settings
"""
import django
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.admin.apps.SimpleAdminConfig',
'django.contrib.staticfiles',
'bossoidc',
'djangooidc',
),
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
),
ROOT_URLCONF = 'tests.urls',
ALLOWED_HOSTS = ['testserver'],
AUTHENTICATION_BACKENDS = [
'bossoidc.backend.OpenIdConnectBackend',
],
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
#'rest_framework.authentication.SessionAuthentication',
'oidc_auth.authentication.BearerTokenAuthentication',
),
},
)
django.setup()
def django_migrate(self):
"""Call the Django manage.py migrate command to populate the test database"""
from django.core.management import call_command
call_command('migrate', interactive=False)
def django_makemigrations(self):
"""Call the Django mange.py makemigrations bossoidc command to create
new migrations
"""
from django.core.management import call_command
# Called as interactive because making migrations may require the developer
# to make decisions (like a default value for a non-null field)
call_command('makemigrations', 'bossoidc')
# Inspired by the example at https://pytest.org/latest/goodpractises.html
class DjangoTestCommand(TestCommand, DjangoMixin):
def run_tests(self):
# Move into the current directory, so results are saved where we want
curdir = os.path.dirname(os.path.realpath(__file__))
os.chdir(curdir)
# Add current directory to path so imports work
sys.path.insert(0, curdir)
# Start coverage tracing
import coverage
cov = coverage.Coverage(source=["bossoidc"],
omit=["bossoidc/admin.py"])
# Since admin.py is used by the Django admin pages
# there are no tests written for it
cov.start()
# Configure Django to support the tests
# Called after starting coverage to track migration coverage
self.django_configure()
self.django_migrate()
# Run unit tests
super(DjangoTestCommand, self).run_tests()
# Stop coverage tracing
cov.stop()
#cov.save()
# Display the coverage report
cov.report()
class MakeMigrationsCommand(Command, DjangoMixin):
description = 'Run Django makemigrations'
user_options = [
# The format is (long option, short option, description).
]
def initialize_options(self):
"""Abstract method that is required to be overwritten"""
def finalize_options(self):
"""Abstract method that is required to be overwritten"""
def run(self):
self.django_configure()
self.django_makemigrations()
if __name__ == '__main__':
setup(
name='boss-oidc',
version='1.2.2',
packages=find_packages(),
url='https://github.com/jhuapl-boss/boss-oidc',
license="Apache Software License",
author='Derek Pryor',
author_email='Derek.Pryor@jhuapl.edu',
description='Django Authentication OpenID Connect plugin for the Boss SSO',
long_description=read('README.md'),
install_requires = [
'django>2.0',
'djangorestframework',
'oic==0.13.0', # Pinned due to issues with the library
'pyjwkest>=1.0.0',
#'django-oidc@http://github.com/jhuapl-boss/django-oidc/archive/master.zip',
#'drf-oidc-auth@http://github.com/jhuapl-boss/drf-oidc-auth/archive/master.zip'
],
# TODO pin versions of django-oidc / drf-oidc-auth
# Depdency Links are deprecated but full support for PEP 508 isn't expected
# until version 10. Commented links in install_requires are the PEP 508 format
dependency_links = [
'git+http://github.com/WellCastro2/django-oidc.git#egg=django-oidc',
'git+http://github.com/WellCastro2/drf-oidc-auth.git#egg=drf-oidc-auth',
],
tests_require = [
'coverage',
'requests_mock',
'pyjwt',
],
test_suite = 'setup.test_suite',
classifiers=[
'Environment :: Web Environment',
'Development Status :: 5 - Production',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
cmdclass = {
'test': DjangoTestCommand,
'makemigrations': MakeMigrationsCommand,
},
)
|
import sys, csv, requests, multiprocessing
import sfr_ftth
#csv file format: x,y,imb_id,num_voie,cp_no_voie,type_voie,nom_voie,batiment,code_poste,nom_com,catg_loc_imb,imb_etat,pm_ref,pm_etat,code_l331,geom_mod,type_imb
def MakeAddressFromCsvRow(row):
address = '{} {} {}, {} {}'.format(row['num_voie'], row['type_voie'], row['nom_voie'], row['code_poste'], row['nom_com'])
return sfr_ftth.NormalizePostalAddress(address)
def GetAddressesFromCsvFile(csvFilePath):
addresses = set()
with open(csvFilePath, encoding='utf-8') as csvFile:
reader = csv.DictReader(csvFile)
next(reader)
for row in reader:
address = MakeAddressFromCsvRow(row)
addresses.add(address)
return sorted(addresses)
def GetNonDeployedAddressesFromCsvFile(csvFilePath):
addressToDeployed = {}
with open(csvFilePath, encoding='utf-8') as csvFile:
reader = csv.DictReader(csvFile)
next(reader)
for row in reader:
address = MakeAddressFromCsvRow(row)
deployed = row['imb_etat']=='deploye'
previousDeployed = addressToDeployed.get(address)
#print('{} {} {}'.format(address, previousDeployed, deployed))
if previousDeployed is None:
addressToDeployed[address] = deployed
else:
addressToDeployed[address] = previousDeployed or deployed
addresses = [a for a,d in addressToDeployed.items() if not d]
addresses.sort()
return addresses
def PrintSfrEligibility(csvFilePath, debug):
addresses = GetAddressesFromCsvFile(csvFilePath)
#addresses = GetNonDeployedAddressesFromCsvFile(csvFilePath)
session = requests.Session()
with multiprocessing.Pool(processes=50) as pool:
results = pool.imap(sfr_ftth.GetEligibilityByPostalAddress2, [(a, session, debug) for a in addresses])
numEligible = 0
num666 = 0
numWip = 0
for r, a in zip(results, addresses):
print('{}, code {}, eligible {}, workInProgress {}'.format(a, r[0], r[1], r[2]))
if r[1] == True: numEligible += 1
if r[0] == 666: num666 += 1
if r[2] == True: numWip += 1
numAddresses = len(addresses)
print("Number of eligible addresses {} / {} ({:.2f}%)".format(numEligible, numAddresses, numEligible/numAddresses*100))
print("Number of workInProgress==True {} / {} ({:.2f}%)".format(numWip, numAddresses, numWip/numAddresses*100))
print("Number of code 666 {} / {} ({:.2f}%)".format(num666, numAddresses, num666/numAddresses*100))
if __name__ == "__main__":
debug = False
if len(sys.argv) >= 2:
csvFilePath = sys.argv[1]
PrintSfrEligibility(csvFilePath, debug)
#CompareArcepSfrEligibility(csvFilePath, debug)
else:
print('Usage: {} <csv_file_path>'.format(sys.argv[0]))
sys.exit(1)
|
$(document).ready(function(){
var date = new Date();
var hour = date.getHours().toString();
var min = date.getMinutes().toString();
var str = hour + ":" + min;
$("#showTime").append(str);
// $('p span').css('color' ,'red');
$('ul#list li:first').css('color' ,'red');
$("#nameOfUser").dblclick(function(){
var newName = prompt("Please enter your name");
$("#nameOfUser").empty().append(newName);
});
});
function highLightButton(objectThatWasPressed) {
objectThatWasPressed.style.backgroundColor = "black";
}
function NOhighLightButton(objectThatWasPressed) {
objectThatWasPressed.style.backgroundColor = "rgba(0,0,0,0.2)";
}
$(".addTile").click(function(){
var nameOfTile = prompt("Enter name of tile:");
var linkToSite = prompt("Enter site address:");
var fullLinkPath = 'http://' + linkToSite;
console.log(fullLinkPath);
var aLink = $('<a />').attr('href', fullLinkPath)
.attr('target', '_blank')
.text(nameOfTile);
var closeSpan = $('<span />').text(aLink)
.attr('id', 'menuButton')
.attr('onemouseover', 'highLightButton(this)')
.attr('onmouseleave', 'NOhighLightButton(this)');
console.log(closeSpan);
$('#menu').append(aLink);
});
|
import {detectReferer} from '../src/refererDetection';
import {ajax} from '../src/ajax';
import {registerBidder} from '../src/adapters/bidderFactory';
export const spec = {
code: 'orbidder',
bidParams: {},
orbidderHost: (() => {
let ret = 'https://orbidder.otto.de';
try {
ret = localStorage.getItem('ov_orbidder_host') || ret;
} catch (e) {
}
return ret;
})(),
isBidRequestValid(bid) {
return !!(bid.sizes && bid.bidId && bid.params &&
(bid.params.accountId && (typeof bid.params.accountId === 'string')) &&
(bid.params.placementId && (typeof bid.params.placementId === 'string')) &&
((typeof bid.params.bidfloor === 'undefined') || (typeof bid.params.bidfloor === 'number')) &&
((typeof bid.params.keyValues === 'undefined') || (typeof bid.params.keyValues === 'object')));
},
buildRequests(validBidRequests, bidderRequest) {
return validBidRequests.map((bidRequest) => {
let referer = '';
if (bidderRequest && bidderRequest.refererInfo) {
referer = bidderRequest.refererInfo.referer || '';
}
const ret = {
url: `${spec.orbidderHost}/bid`,
method: 'POST',
data: {
pageUrl: referer,
bidId: bidRequest.bidId,
auctionId: bidRequest.auctionId,
transactionId: bidRequest.transactionId,
adUnitCode: bidRequest.adUnitCode,
sizes: bidRequest.sizes,
params: bidRequest.params
}
};
spec.bidParams[bidRequest.bidId] = bidRequest.params;
if (bidRequest && bidRequest.gdprConsent) {
ret.data.gdprConsent = {
consentString: bidRequest.gdprConsent.consentString,
consentRequired: (typeof bidRequest.gdprConsent.gdprApplies === 'boolean')
? bidRequest.gdprConsent.gdprApplies
: true
};
}
return ret;
});
},
interpretResponse(serverResponse) {
const bidResponses = [];
serverResponse = serverResponse.body;
if (serverResponse && (serverResponse.length > 0)) {
serverResponse.forEach((bid) => {
const bidResponse = {};
for (const requiredKey of ['requestId', 'cpm', 'width', 'height', 'ad', 'ttl', 'creativeId', 'netRevenue', 'currency']) {
if (!bid.hasOwnProperty(requiredKey)) {
return [];
}
bidResponse[requiredKey] = bid[requiredKey];
}
bidResponses.push(bidResponse);
});
}
return bidResponses;
},
onBidWon(winObj) {
const getRefererInfo = detectReferer(window);
winObj.pageUrl = getRefererInfo().referer;
if (spec.bidParams[winObj.adId]) {
winObj.params = spec.bidParams[winObj.adId];
}
spec.ajaxCall(`${spec.orbidderHost}/win`, JSON.stringify(winObj));
},
ajaxCall(endpoint, data) {
ajax(endpoint, null, data);
}
};
registerBidder(spec);
|
/* -*- mode: javascript; tab-width: 8; indent-tabs-mode: nil; js-indent-level: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
const WNDH_OPTIONS_EXPANDED = 486;
const DLG_HEIGHT_ADJ_WINDOWS = 48;
const DLG_HEIGHT_ADJ_LOCALE_ES = 20;
const DLG_HEIGHT_ADJ_LOCALE_DE = 10;
let gOS;
let gClippingsDB = null;
let gClippings = null;
let gParentFolderID = 0;
let gSrcURL = "";
let gCreateInFldrMenu;
let gFolderPickerPopup;
let gNewFolderDlg, gPreviewDlg;
let gPrefs;
// Page initialization
$(async () => {
gClippings = messenger.extension.getBackgroundPage();
if (gClippings) {
gClippingsDB = gClippings.getClippingsDB();
}
else {
// gClippingsDB is null if Private Browsing mode is turned on.
console.error("Clippings/mx::new.js: Error initializing New Clipping dialog - unable to locate parent browser window.");
showInitError();
return;
}
try {
await gClippings.verifyDB();
}
catch (e) {
showInitError();
return;
};
let platform = await messenger.runtime.getPlatformInfo();
document.body.dataset.os = gOS = platform.os;
gPrefs = await aePrefs.getAllPrefs();
document.body.dataset.laf = gPrefs.enhancedLaF;
$("#btn-expand-options").click(async (aEvent) => {
let height = WNDH_OPTIONS_EXPANDED;
if (gOS == "win") {
height += DLG_HEIGHT_ADJ_WINDOWS;
}
let lang = messenger.i18n.getUILanguage();
if (lang == "es-ES") {
height += DLG_HEIGHT_ADJ_LOCALE_ES;
}
else if (lang == "de" && gOS == "mac") {
height += DLG_HEIGHT_ADJ_LOCALE_DE;
}
await messenger.windows.update(messenger.windows.WINDOW_ID_CURRENT, { height });
$("#clipping-options").show();
$("#new-clipping-fldr-tree-popup").addClass("new-clipping-fldr-tree-popup-fixpos");
});
$("#clipping-text").attr("placeholder", messenger.i18n.getMessage("clipMgrContentHint"));
messenger.runtime.sendMessage({
msgID: "init-new-clipping-dlg"
}).then(aResp => {
if (! aResp) {
console.warn("Clippings/mx::new.js: No response was received from the background script!");
return;
}
let clippingName = $("#clipping-name")[0];
clippingName.value = aResp.name;
clippingName.focus();
$("#clipping-text").val(aResp.content).attr("spellcheck", aResp.checkSpelling)
.focus(aEvent => {
aEvent.target.select();
});
gSrcURL = aResp.url || "";
});
$("#clipping-name").focus(aEvent => {
aEvent.target.select();
});
$("#clipping-name").blur(aEvent => {
let name = aEvent.target.value;
if (! name) {
$("#clipping-name").val(messenger.i18n.getMessage("untitledClipping"));
}
});
initDialogs();
initFolderPicker();
initLabelPicker();
initShortcutKeyMenu();
let newFolderBtn = $("#new-folder-btn");
newFolderBtn.attr("title", messenger.i18n.getMessage("btnNewFolder"));
newFolderBtn.click(aEvent => { gNewFolderDlg.showModal() });
$("#show-preview").click(aEvent => { gPreviewDlg.showModal() });
$("#btn-accept").click(aEvent => { accept(aEvent) });
$("#btn-cancel").click(aEvent => { cancel(aEvent) });
window.focus();
// Fix for Fx57 bug where bundled page loaded using
// browser.windows.create won't show contents unless resized.
// See <https://bugzilla.mozilla.org/show_bug.cgi?id=1402110>
let wnd = await messenger.windows.getCurrent();
messenger.windows.update(wnd.id, {
width: wnd.width + 1,
focused: true,
});
});
$(window).keydown(aEvent => {
if (aEvent.key == "Enter") {
if (aEvent.target.tagName == "TEXTAREA") {
return;
}
if (aeDialog.isOpen()) {
aeDialog.acceptDlgs();
return;
}
accept(aEvent);
}
else if (aEvent.key == "Escape") {
if (aeDialog.isOpen()) {
aeDialog.cancelDlgs();
return;
}
cancel(aEvent);
}
});
$(window).on("contextmenu", aEvent => {
if (aEvent.target.tagName != "INPUT" && aEvent.target.tagName != "TEXTAREA") {
aEvent.preventDefault();
}
});
function showInitError()
{
let errorMsgBox = new aeDialog("#create-clipping-error-msgbox");
errorMsgBox.onInit = function ()
{
let errMsgElt = $("#create-clipping-error-msgbox > .dlg-content > .msgbox-error-msg");
errMsgElt.text(messenger.i18n.getMessage("initError"));
};
errorMsgBox.onAccept = function ()
{
this.close();
closeDlg();
};
errorMsgBox.showModal();
}
function initDialogs()
{
$(".msgbox-error-icon").attr("os", gOS);
gNewFolderDlg = new aeDialog("#new-folder-dlg");
gNewFolderDlg.setProps({
fldrTree: null,
selectedFldrNode: null,
});
gNewFolderDlg.resetTree = function ()
{
let fldrTree = this.fldrTree.getTree();
fldrTree.clear();
this.fldrTree = null;
this.selectedFldrNode = null;
// Remove and recreate the Fancytree <div> element.
$("#new-folder-dlg-fldr-tree").children().remove();
let parentElt = $("#new-folder-dlg-fldr-tree").parent();
parentElt.children("#new-folder-dlg-fldr-tree").remove();
$('<div id="new-folder-dlg-fldr-tree" class="folder-tree"></div>').appendTo("#new-folder-dlg-fldr-tree-popup");
};
gNewFolderDlg.onFirstInit = function ()
{
let fldrPickerMnuBtn = $("#new-folder-dlg-fldr-picker-mnubtn");
let fldrPickerPopup = $("#new-folder-dlg-fldr-tree-popup");
fldrPickerMnuBtn.click(aEvent => {
if (fldrPickerPopup.css("visibility") == "visible") {
fldrPickerPopup.css({ visibility: "hidden" });
$("#new-folder-dlg-fldr-tree-popup-bkgrd-ovl").hide();
}
else {
fldrPickerPopup.css({ visibility: "visible" });
$("#new-folder-dlg-fldr-tree-popup-bkgrd-ovl").show();
}
});
$("#new-fldr-name").on("blur", aEvent => {
if (! aEvent.target.value) {
aEvent.target.value = messenger.i18n.getMessage("newFolder");
}
});
};
gNewFolderDlg.onInit = function ()
{
let parentDlgFldrPickerMnuBtn = $("#new-clipping-fldr-picker-menubtn");
let fldrPickerMnuBtn = $("#new-folder-dlg-fldr-picker-mnubtn");
let fldrPickerPopup = $("#new-folder-dlg-fldr-tree-popup");
let selectedFldrID = parentDlgFldrPickerMnuBtn.val();
let selectedFldrName = parentDlgFldrPickerMnuBtn.text();
let rootFldrID = aeConst.ROOT_FOLDER_ID;
let rootFldrName = messenger.i18n.getMessage("rootFldrName");
let rootFldrCls = aeFolderPicker.ROOT_FOLDER_CLS;
if (gPrefs.syncClippings) {
if (gPrefs.newClippingSyncFldrsOnly) {
rootFldrID = gPrefs.syncFolderID;
rootFldrName = messenger.i18n.getMessage("syncFldrName");
rootFldrCls = aeFolderPicker.SYNCED_ROOT_FOLDER_CLS;
}
else if (gPrefs.cxtMenuSyncItemsOnly) {
$("#new-folder-dlg-fldr-tree").addClass("show-sync-items-only");
}
}
this.fldrTree = new aeFolderPicker(
"#new-folder-dlg-fldr-tree",
gClippingsDB,
rootFldrID,
rootFldrName,
rootFldrCls,
selectedFldrID
);
this.fldrTree.onSelectFolder = aFolderData => {
this.selectedFldrNode = aFolderData.node;
let fldrID = aFolderData.node.key;
fldrPickerMnuBtn.val(fldrID).text(aFolderData.node.title);
if (fldrID == gPrefs.syncFolderID) {
fldrPickerMnuBtn.attr("syncfldr", "true");
}
else {
fldrPickerMnuBtn.removeAttr("syncfldr");
}
fldrPickerPopup.css({ visibility: "hidden" });
$("#new-folder-dlg-fldr-tree-popup-bkgrd-ovl").hide();
};
fldrPickerMnuBtn.val(selectedFldrID).text(selectedFldrName);
if (selectedFldrID == gPrefs.syncFolderID) {
fldrPickerMnuBtn.attr("syncfldr", "true");
}
else {
fldrPickerMnuBtn.removeAttr("syncfldr");
}
$("#new-fldr-name").val(messenger.i18n.getMessage("newFolder"));
};
gNewFolderDlg.onShow = function ()
{
$("#new-fldr-name").select().focus();
};
gNewFolderDlg.onAccept = function (aEvent)
{
let newFldrDlgTree = this.fldrTree.getTree();
let parentFldrID = aeConst.ROOT_FOLDER_ID;
if (this.selectedFldrNode) {
parentFldrID = Number(this.selectedFldrNode.key);
}
else {
// User didn't choose a different parent folder.
parentFldrID = Number($("#new-folder-dlg-fldr-picker-mnubtn").val());
}
log("Clippings/mx::new.js: gNewFolderDlg.onAccept(): parentFldrID = " + parentFldrID);
let numItemsInParent = 0; // For calculating display order of new folder.
let newFolder = {
name: $("#new-fldr-name").val(),
parentFolderID: parentFldrID,
displayOrder: 0,
};
gClippingsDB.transaction("rw", gClippingsDB.clippings, gClippingsDB.folders, () => {
gClippingsDB.folders.where("parentFolderID").equals(parentFldrID).count().then(aNumFldrs => {
numItemsInParent += aNumFldrs;
return gClippingsDB.clippings.where("parentFolderID").equals(parentFldrID).count();
}).then(aNumClippings => {
numItemsInParent += aNumClippings;
newFolder.displayOrder = numItemsInParent;
return gClippingsDB.folders.add(newFolder);
}).then(aFldrID => {
let newFldrName = $("#new-fldr-name").val();
// Update the folder tree in the main dialog.
let newFldrNodeData = {
key: aFldrID,
title: newFldrName,
folder: true,
children: []
};
let mainFldrTree = gFolderPickerPopup.getTree();
let parentNode;
if (parentFldrID == aeConst.ROOT_FOLDER_ID) {
parentNode = mainFldrTree.rootNode.getFirstChild();
}
else {
parentNode = mainFldrTree.getNodeByKey(Number(parentFldrID).toString());
}
let newFldrNode = parentNode.addNode(newFldrNodeData);
newFldrNode.setActive();
$("#new-clipping-fldr-picker-menubtn").text(newFldrName).val(aFldrID);
gParentFolderID = aFldrID;
this.resetTree();
let clipgsLstrs = gClippings.getClippingsListeners();
clipgsLstrs.forEach(aListener => {
aListener.newFolderCreated(aFldrID, newFolder, aeConst.ORIGIN_HOSTAPP);
});
return unsetClippingsUnchangedFlag();
}).then(() => {
this.close();
});
}).catch(aErr => {
window.alert(aErr);
});
};
gPreviewDlg = new aeDialog("#preview-dlg");
gPreviewDlg.onShow = function ()
{
let content = $("#clipping-text").val();
if ($("#create-as-unquoted")[0].checked) {
content = formatUnquoted(content);
}
if ($("#remove-extra-linebreaks")[0].checked) {
content = formatRemoveLineBreaks(content);
}
$("#clipping-preview").val(content);
};
gPreviewDlg.onAccept = function (aEvent)
{
$("#clipping-preview").val("");
this.close();
};
}
function initFolderPicker()
{
function selectSyncedClippingsFldr()
{
$("#new-clipping-fldr-picker-menubtn").val(gPrefs.syncFolderID)
.text(messenger.i18n.getMessage("syncFldrName"))
.attr("syncfldr", "true");
}
// Initialize the transparent background that user can click on to dismiss an
// open folder picker popup.
$(".popup-bkgrd").click(aEvent => {
$(".folder-tree-popup").css({ visibility: "hidden" });
$(".popup-bkgrd").hide();
});
// Initialize the folder picker in the main New Clipping dialog.
$("#new-clipping-fldr-picker-menubtn").click(aEvent => {
let popup = $("#new-clipping-fldr-tree-popup");
if (popup.css("visibility") == "hidden") {
popup.css({ visibility: "visible" });
$(".popup-bkgrd").show();
}
else {
popup.css({ visibility: "hidden" });
$(".popup-bkgrd").hide();
}
});
// Set the width of the folder picker drop-down to match the width of the menu
// button that opens it.
let menuBtnStyle = window.getComputedStyle($("#new-clipping-fldr-picker-menubtn")[0]);
let menuBtnWidth = parseInt(menuBtnStyle.width);
// Need to add 1px to the popup width to compensate for having to add 1 pixel
// to the width of the New Clipping popup window.
$("#new-clipping-fldr-tree-popup").css({ width: `${menuBtnWidth + 1}px` });
let rootFldrID = aeConst.ROOT_FOLDER_ID;
let rootFldrName = messenger.i18n.getMessage("rootFldrName");
let rootFldrCls = aeFolderPicker.ROOT_FOLDER_CLS;
let selectedFldrID = aeConst.ROOT_FOLDER_ID;
if (gPrefs.syncClippings) {
if (gPrefs.newClippingSyncFldrsOnly) {
selectSyncedClippingsFldr();
rootFldrID = gPrefs.syncFolderID;
rootFldrName = messenger.i18n.getMessage("syncFldrName");
rootFldrCls = aeFolderPicker.SYNCED_ROOT_FOLDER_CLS;
}
else if (gPrefs.cxtMenuSyncItemsOnly) {
selectSyncedClippingsFldr();
$("#new-clipping-fldr-tree").addClass("show-sync-items-only");
selectedFldrID = gPrefs.syncFolderID;
}
}
gFolderPickerPopup = new aeFolderPicker(
"#new-clipping-fldr-tree",
gClippingsDB,
rootFldrID,
rootFldrName,
rootFldrCls,
selectedFldrID
);
gFolderPickerPopup.onSelectFolder = selectFolder;
}
function selectFolder(aFolderData)
{
gParentFolderID = Number(aFolderData.node.key);
let fldrPickerMenuBtn = $("#new-clipping-fldr-picker-menubtn");
fldrPickerMenuBtn.text(aFolderData.node.title).val(gParentFolderID);
if (gParentFolderID == gPrefs.syncFolderID) {
fldrPickerMenuBtn.attr("syncfldr", "true");
}
else {
fldrPickerMenuBtn.removeAttr("syncfldr");
}
$("#new-clipping-fldr-tree-popup").css({ visibility: "hidden" });
$(".popup-bkgrd").hide();
}
async function initShortcutKeyMenu()
{
let shortcutKeyMenu = $("#clipping-key")[0];
let assignedKeysLookup = {};
gClippingsDB.clippings.where("shortcutKey").notEqual("").each((aItem, aCursor) => {
assignedKeysLookup[aItem.shortcutKey] = 1;
}).then(() => {
for (let option of shortcutKeyMenu.options) {
if (assignedKeysLookup[option.text]) {
option.setAttribute("disabled", "true");
option.setAttribute("title", messenger.i18n.getMessage("shortcutKeyAssigned", option.text));
}
}
});
let keybPasteKeys = await messenger.runtime.sendMessage({msgID: "get-shct-key-prefix-ui-str"});
let tooltip = messenger.i18n.getMessage("shctKeyHintTB", keybPasteKeys);
$("#shct-key-tooltip-text").attr("title", tooltip);
}
function initLabelPicker()
{
$("#clipping-label-picker").on("change", aEvent => {
let label = aEvent.target.value;
let color = label;
if (! label) {
color = "black";
}
else if (label == "yellow") {
color = "rgb(200, 200, 0)";
}
$(aEvent.target).css({ color });
});
}
function isClippingOptionsSet()
{
return ($("#clipping-key")[0].selectedIndex != 0
|| $("#clipping-label-picker").val() != "");
}
function formatUnquoted(aClippingText)
{
let rv = aClippingText.replace(/^>>* ?(>>* ?)*/gm, "");
return rv;
}
function formatRemoveLineBreaks(aClippingText)
{
let rv = aClippingText.replace(/([^\n])( )?\n([^\n])/gm, "$1 $3");
return rv;
}
function unsetClippingsUnchangedFlag()
{
if (gPrefs.clippingsUnchanged) {
return aePrefs.setPrefs({ clippingsUnchanged: false });
}
return Promise.resolve();
}
function accept(aEvent)
{
let prefs = gClippings.getPrefs();
let name = $("#clipping-name").val();
let content = $("#clipping-text").val();
if ($("#create-as-unquoted")[0].checked) {
content = formatUnquoted(content);
}
if ($("#remove-extra-linebreaks")[0].checked) {
content = formatRemoveLineBreaks(content);
}
let shortcutKeyMenu = $("#clipping-key")[0];
let shortcutKey = "";
if (shortcutKeyMenu.selectedIndex != 0) {
shortcutKey = shortcutKeyMenu.options[shortcutKeyMenu.selectedIndex].text;
}
let labelPicker = $("#clipping-label-picker");
let label = labelPicker.val() ? labelPicker.val() : "";
let errorMsgBox = new aeDialog("#create-clipping-error-msgbox");
let numItemsInParent = 0; // For calculating display order of new clipping.
let newClipping = {
name, content, shortcutKey, label,
parentFolderID: gParentFolderID,
displayOrder: 0,
sourceURL: "",
};
gClippingsDB.transaction("rw", gClippingsDB.clippings, gClippingsDB.folders, () => {
gClippingsDB.folders.where("parentFolderID").equals(gParentFolderID).count().then(aNumFldrs => {
numItemsInParent += aNumFldrs;
return gClippingsDB.clippings.where("parentFolderID").equals(gParentFolderID).count();
}).then(aNumClippings => {
numItemsInParent += aNumClippings;
newClipping.displayOrder = numItemsInParent;
return gClippingsDB.clippings.add(newClipping);
}).then(aNewClippingID => {
let clipgsLstrs = gClippings.getClippingsListeners();
clipgsLstrs.forEach(aListener => {
aListener.newClippingCreated(aNewClippingID, newClipping, aeConst.ORIGIN_HOSTAPP);
});
return unsetClippingsUnchangedFlag();
}).then(() => {
if (gPrefs.syncClippings) {
aeImportExport.setDatabase(gClippingsDB);
return aeImportExport.exportToJSON(true, true, gPrefs.syncFolderID, false, true);
}
return null;
}).then(aSyncData => {
if (aSyncData) {
let msg = {
msgID: "set-synced-clippings",
syncData: aSyncData.userClippingsRoot,
};
log("Clippings/mx::new.js: accept(): Sending message 'set-synced-clippings' to the Sync Clippings helper app. Message data:");
log(msg);
return messenger.runtime.sendNativeMessage(aeConst.SYNC_CLIPPINGS_APP_NAME, msg);
}
return null;
}).then(async (aMsgResult) => {
if (aMsgResult) {
log("Clippings/mx::new.js: accept(): Response from the Sync Clippings helper app:");
log(aMsgResult);
}
if (gPrefs.clippingsMgrAutoShowDetailsPane && isClippingOptionsSet()) {
aePrefs.setPrefs({
clippingsMgrAutoShowDetailsPane: false,
clippingsMgrDetailsPane: true,
});
}
closeDlg();
}).catch("OpenFailedError", aErr => {
// OpenFailedError exception thrown if Firefox is set to "Never remember
// history."
errorMsgBox.onInit = () => {
console.error(`Error creating clipping: ${aErr}`);
let errMsgElt = $("#create-clipping-error-msgbox > .dlg-content > .msgbox-error-msg");
errMsgElt.text(messenger.i18n.getMessage("saveClippingError"));
};
errorMsgBox.showModal();
}).catch(aErr => {
console.error("Clippings/mx::new.js: accept(): " + aErr);
errorMsgBox.onInit = () => {
let errMsgElt = $("#create-clipping-error-msgbox > .dlg-content > .msgbox-error-msg");
let errText = `Error creating clipping: ${aErr}`;
if (aErr == aeConst.SYNC_ERROR_CONXN_FAILED) {
errText = messenger.i18n.getMessage("syncPushFailed");
errorMsgBox.onAfterAccept = () => {
// Despite the native app connection error, the new clipping was
// successfully created, so just close the main dialog.
closeDlg();
};
}
errMsgElt.text(errText);
};
errorMsgBox.showModal();
});
});
}
function cancel(aEvent)
{
closeDlg();
}
async function closeDlg()
{
await messenger.runtime.sendMessage({ msgID: "close-new-clipping-dlg" });
messenger.windows.remove(messenger.windows.WINDOW_ID_CURRENT);
}
function log(aMessage)
{
if (aeConst.DEBUG) { console.log(aMessage); }
}
|
/*
* Copyright (c) 2020, Oracle and/or its affiliates.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License, version 2.0, as
* published by the Free Software Foundation.
*
* This program is also distributed with certain software (including
* but not limited to OpenSSL) that is licensed under separate terms,
* as designated in a particular file or component or in included license
* documentation. The authors of MySQL hereby grant you an
* additional permission to link the program and your derivative works
* with the separately licensed software that they have included with
* MySQL.
*
* Without limiting anything contained in the foregoing, this file,
* which is part of MySQL Connector/Node.js, is also subject to the
* Universal FOSS Exception, version 1.0, a copy of which can be found at
* http://oss.oracle.com/licenses/universal-foss-exception.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License, version 2.0, for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
'use strict';
const bytes = require('../../Wrappers/ScalarValues/bytes');
/**
* @private
* @alias module:adapters.Mysqlx.Tokenizable
* @param {proto.Mysqlx.Session.AuthenticateContinue|proto.Mysqlx.Session.AuthenticateOk} proto - protobuf stub
* @returns {module:adapters.Mysqlx.Tokenizable}
*/
function Tokenizable (proto) {
return {
/**
* Serialize to JSON using a protobuf-like convention.
* @function
* @name module:adapters.Mysqlx.Tokenizable#toJSON
* @returns {Object}
*/
toJSON () {
return {
auth_data: bytes(proto.getAuthData()).toJSON()
};
},
/**
* Return a plain JavaScript object version of the underlying protocol message.
* @function
* @name module:adapters.Mysqlx.Tokenizable#toObject
* @returns {Object}
*/
toObject () {
return proto.toObject();
}
};
}
module.exports = Tokenizable;
|
# Copyright 2011-2021 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Docstring
"""
# Futures
from __future__ import absolute_import
# Own modules
from microprobe.code.address import InstructionAddress
from microprobe.code.context import Context
from microprobe.target.env import GenericEnvironment
# Constants
# Functions
# Classes
class riscv64_linux_gcc(GenericEnvironment):
_elf_code = ""\
""\
""
def __init__(self, isa):
super(
riscv64_linux_gcc,
self).__init__(
"riscv64_linux_gcc",
"RISC-V architecture (64bit addressing mode), "
"Linux operating system, GCC compiler",
isa,
little_endian=True
)
self._default_wrapper = "CWrapper"
@property
def stack_pointer(self):
""" """
return self.isa.registers["X2"]
@property
def stack_direction(self):
""" """
return "increase"
def elf_abi(self, stack_size, start_symbol, **kwargs):
return super(riscv64_linux_gcc, self).elf_abi(stack_size,
start_symbol,
stack_alignment=16,
**kwargs)
def function_call(self, target,
return_address_reg=None,
long_jump=False):
if return_address_reg is None:
return_address_reg = self.target.isa.registers["X1"]
if isinstance(target, str):
target = InstructionAddress(base_address=target)
if long_jump:
assert isinstance(target, int)
instrs = self.target.set_register(
return_address_reg, target, Context()
)
jalr_ins = self.target.new_instruction("JALR_V0")
jalr_ins.set_operands([0, return_address_reg, return_address_reg])
jalr_ins.add_comment("Long jump to address 0X%016X" % target)
instrs.append(jalr_ins)
return instrs
else:
jal_ins = self.target.new_instruction("JAL_V0")
jal_ins.set_operands([target, return_address_reg])
return [jal_ins]
def function_return(self,
return_address_reg=None):
if return_address_reg is None:
return_address_reg = self.target.isa.registers["X1"]
ret_ins = self.target.new_instruction("JALR_V0")
ret_ins.set_operands([0,
return_address_reg,
self.target.isa.registers["X0"]])
return [ret_ins]
@property
def volatile_registers(self):
rlist = []
for idx in [
1, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17,
28, 29, 30, 31]:
rlist += [self.target.registers['X%d' % idx]]
for idx in [
0, 1, 2, 3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 15, 16, 17,
28, 29, 30, 31]:
rlist += [self.target.registers['F%d' % idx]]
return rlist
|
# coding=utf8
"""
units.py - Unit conversion module for Sopel
Copyright © 2013, Elad Alfassa, <elad@fedoraproject.org>
Copyright © 2013, Dimitri Molenaars, <tyrope@tyrope.nl>
Licensed under the Eiffel Forum License 2.
"""
from __future__ import unicode_literals, division
from sopel.module import commands, example, NOLIMIT
import re
find_temp = re.compile('(-?[0-9]*\.?[0-9]*)[ °]*(K|C|F)', re.IGNORECASE)
find_length = re.compile('([0-9]*\.?[0-9]*)[ ]*(mile[s]?|mi|inch|in|foot|feet|ft|yard[s]?|yd|(?:milli|centi|kilo|)meter[s]?|[mkc]?m|ly|light-year[s]?|au|astronomical unit[s]?|parsec[s]?|pc)', re.IGNORECASE)
find_mass = re.compile('([0-9]*\.?[0-9]*)[ ]*(lb|lbm|pound[s]?|ounce|oz|(?:kilo|)gram(?:me|)[s]?|[k]?g)', re.IGNORECASE)
def f_to_c(temp):
return (float(temp) - 32) * 5 / 9
def c_to_k(temp):
return temp + 273.15
def c_to_f(temp):
return (9.0 / 5.0 * temp + 32)
def k_to_c(temp):
return temp - 273.15
@commands('temp')
@example('.temp 100F', '37.78°C = 100.00°F = 310.93K')
@example('.temp 100C', '100.00°C = 212.00°F = 373.15K')
@example('.temp 100K', '-173.15°C = -279.67°F = 100.00K')
def temperature(bot, trigger):
"""
Convert temperatures
"""
try:
source = find_temp.match(trigger.group(2)).groups()
except (AttributeError, TypeError):
bot.reply("That's not a valid temperature.")
return NOLIMIT
unit = source[1].upper()
numeric = float(source[0])
celsius = 0
if unit == 'C':
celsius = numeric
elif unit == 'F':
celsius = f_to_c(numeric)
elif unit == 'K':
celsius = k_to_c(numeric)
kelvin = c_to_k(celsius)
fahrenheit = c_to_f(celsius)
bot.reply("{:.2f}°C = {:.2f}°F = {:.2f}K".format(celsius, fahrenheit, kelvin))
@commands('length', 'distance')
@example('.distance 3m', '3.00m = 9 feet, 10.11 inches')
@example('.distance 3km', '3.00km = 1.86 miles')
@example('.distance 3 miles', '4.83km = 3.00 miles')
@example('.distance 3 inch', '7.62cm = 3.00 inches')
@example('.distance 3 feet', '91.44cm = 3 feet, 0.00 inches')
@example('.distance 3 yards', '2.74m = 9 feet, 0.00 inches')
@example('.distance 155cm', '1.55m = 5 feet, 1.02 inches')
@example('.length 3 ly', '28382191417742.40km = 17635876112814.77 miles')
@example('.length 3 au', '448793612.10km = 278867421.71 miles')
@example('.length 3 parsec', '92570329129020.20km = 57520535754731.61 miles')
def distance(bot, trigger):
"""
Convert distances
"""
try:
source = find_length.match(trigger.group(2)).groups()
except (AttributeError, TypeError):
bot.reply("That's not a valid length unit.")
return NOLIMIT
unit = source[1].lower()
numeric = float(source[0])
meter = 0
if unit in ("meters", "meter", "m"):
meter = numeric
elif unit in ("millimeters", "millimeter", "mm"):
meter = numeric / 1000
elif unit in ("kilometers", "kilometer", "km"):
meter = numeric * 1000
elif unit in ("miles", "mile", "mi"):
meter = numeric / 0.00062137
elif unit in ("inch", "in"):
meter = numeric / 39.370
elif unit in ("centimeters", "centimeter", "cm"):
meter = numeric / 100
elif unit in ("feet", "foot", "ft"):
meter = numeric / 3.2808
elif unit in ("yards", "yard", "yd"):
meter = numeric / (3.2808 / 3)
elif unit in ("light-year", "light-years", "ly"):
meter = numeric * 9460730472580800
elif unit in ("astronomical unit", "astronomical units", "au"):
meter = numeric * 149597870700
elif unit in ("parsec", "parsecs", "pc"):
meter = numeric * 30856776376340068
if meter >= 1000:
metric_part = '{:.2f}km'.format(meter / 1000)
elif meter < 0.01:
metric_part = '{:.2f}mm'.format(meter * 1000)
elif meter < 1:
metric_part = '{:.2f}cm'.format(meter * 100)
else:
metric_part = '{:.2f}m'.format(meter)
# Shit like this makes me hate being an American.
inch = meter * 39.37
foot = int(inch) // 12
inch = inch - (foot * 12)
yard = foot // 3
mile = meter * 0.000621371192
if yard > 500:
stupid_part = '{:.2f} miles'.format(mile)
else:
parts = []
if yard >= 100:
parts.append('{} yards'.format(yard))
foot -= (yard * 3)
if foot == 1:
parts.append('1 foot')
elif foot != 0:
parts.append('{:.0f} feet'.format(foot))
parts.append('{:.2f} inches'.format(inch))
stupid_part = ', '.join(parts)
bot.reply('{} = {}'.format(metric_part, stupid_part))
@commands('weight', 'mass')
def mass(bot, trigger):
"""
Convert mass
"""
try:
source = find_mass.match(trigger.group(2)).groups()
except (AttributeError, TypeError):
bot.reply("That's not a valid mass unit.")
return NOLIMIT
unit = source[1].lower()
numeric = float(source[0])
metric = 0
if unit in ("gram", "grams", "gramme", "grammes", "g"):
metric = numeric
elif unit in ("kilogram", "kilograms", "kilogramme", "kilogrammes", "kg"):
metric = numeric * 1000
elif unit in ("lb", "lbm", "pound", "pounds"):
metric = numeric * 453.59237
elif unit in ("oz", "ounce"):
metric = numeric * 28.35
if metric >= 1000:
metric_part = '{:.2f}kg'.format(metric / 1000)
else:
metric_part = '{:.2f}g'.format(metric)
ounce = metric * .035274
pound = int(ounce) // 16
ounce = ounce - (pound * 16)
if pound > 1:
stupid_part = '{} pounds'.format(pound)
if ounce > 0.01:
stupid_part += ' {:.2f} ounces'.format(ounce)
else:
stupid_part = '{:.2f} oz'.format(ounce)
bot.reply('{} = {}'.format(metric_part, stupid_part))
if __name__ == "__main__":
from sopel.test_tools import run_example_tests
run_example_tests(__file__)
|
import ActionsTable from './ActionsTable.react';
export default ActionsTable;
|
const Config = {
socket: {
host: 'http://localhost:8086',
},
api: {
host: 'http://localhost:8087',
},
caching: { // in seconds
strategies: 300,
},
};
export default Config;
|
const Database = require("./database/db")
const {
subjects,
weekdays,
getSubject,
convertHoursToMinute
} = require("./utils/format")
// Funcionalidades
function pageLanding(req, res) {
return res.render("index.html")
}
async function pageStudy(req, res) {
const filters = req.query // recebe os dados provenientes do navegador
if(!filters.subject || !filters.weekday || !filters.time) {
return res.render("study.html", { filters, subjects, weekdays})
}
// converter horas em minutos
const timeToMinutes = convertHoursToMinute(filters.time)
const query = `
SELECT classes.*, proffys.*
FROM proffys
JOIN classes ON (proffys.id = classes.proffy_id)
WHERE EXISTS (
SELECT class_schedule.*
FROM class_schedule
WHERE class_schedule.class_id = classes.id
AND class_schedule.weekday = ${filters.weekday}
AND class_schedule.time_from <= ${timeToMinutes}
AND class_schedule.time_to > ${timeToMinutes}
)
AND classes.subject = ${filters.subject}
`
// caso haja erro na hora da consulta do banco de dados
try {
const db = await Database
const proffys = await db.all(query)
proffys.map((proffy) => {
proffy.subject = getSubject(proffy.subject)
})
return res.render("study.html", { proffys, subjects, filters, weekdays })
} catch (error) {
console.log(error)
}
}
function pageGiveClasses(req, res) {
return res.render("give-classes.html", {subjects, weekdays})
}
async function saveClasses(req, res) {
const createProffy = require("./database/createProffy")
const data = req.body
const proffyValue = {
name: req.body.name,
avatar: req.body.avatar,
whatsapp: req.body.avatar,
bio: req.body.bio
}
const classValue = {
subject: req.body.subject,
cost: req.body.cost
}
const classScheduleValues = req.body.weekday.map((weekday, index) => {
return {
weekday,
time_from: convertHoursToMinute(req.body.time_from[index]),
time_to: convertHoursToMinute(req.body.time_to[index])
}
})
try {
const db = await Database
await createProffy(db, {proffyValue, classValue, classScheduleValues })
let queryString = "?subject=" + req.body.subject
queryString += "&weekday=" + req.body.weekday[0]
queryString += "&time=" + req.body.time_from[0]
return res.redirect("/study" + queryString)
} catch (error) {
console.log(error)
}
return res.redirect("/study")
}
module.exports = {
pageLanding,
pageStudy,
pageGiveClasses,
saveClasses
}
|
import React from "react"
import loadable from '@loadable/component'
const RebornComponent = loadable(() => import('../components/RebornInput'))
function LoadableReborn() {
return (
<div>
<RebornComponent />
</div>
)
}
export default LoadableReborn
|
"""
This is the main script for predicting a segmentation of an input MRA image. Segmentations can be predicted for multiple
models eather on rough grid (the parameters are then read out from the Unet/models/tuned_params.cvs file) or on fine
grid.
"""
import os
from scipy.ndimage.filters import convolve
import numpy as np
import helper
import time
class Predictor():
def __init__(self, model, train_metadata, prob_dir, error_dir, patients, patients_dir, label_filename, threshold=0.5):
self.model = model
self.train_metadata = train_metadata
self.PROB_DIR = prob_dir
self.ERROR_DIR = error_dir
self.patients = patients
self.PATIENTS_DIR = patients_dir
self.threshold = threshold
self.label_filename = label_filename
return
# where to save probability map from validation as nifti
def get_probs_filepath(self, patient):
return os.path.join(self.PROB_DIR, 'probs_' + patient + '_.nii')
# where to save error mask
def get_errormasks_filepath(self, patient):
return os.path.join(self.ERROR_DIR, 'error_mask_' + patient + '_.nii')
def predict(self, patch_size, data_dir, patch_size_z=None):
print('________________________________________________________________________________')
print('patient dir:', data_dir)
# -----------------------------------------------------------
# LOADING MODEL, IMAGE AND MASK
# -----------------------------------------------------------
print('> Loading image...')
img_mat = helper.load_nifti_mat_from_file(
os.path.join(data_dir, '001.nii')).astype(np.float32)
print('> Loading mask...')
if not os.path.exists(os.path.join(data_dir, 'mask.nii')):
avg_mat = convolve(img_mat.astype(dtype=float), np.ones((16,16,16), dtype=float)/4096, mode='constant', cval=0)
mask_mat = np.where(avg_mat > 10.0, 1, 0)
helper.create_and_save_nifti(mask_mat, os.path.join(data_dir, 'mask.nii'))
else:
mask_mat = helper.load_nifti_mat_from_file(
os.path.join(data_dir, 'mask.nii'))
# -----------------------------------------------------------
# PREDICTION
# -----------------------------------------------------------
# the segmentation is going to be saved in this probability matrix
prob_mat = np.zeros(img_mat.shape, dtype=np.float32)
x_dim, y_dim, z_dim = prob_mat.shape
# get the x, y and z coordinates where there is brain
x, y, z = np.where(mask_mat > 0)
print('x shape:', x.shape)
print('y shape:', y.shape)
print('z shape:', z.shape)
# get the z slices with brain
z_slices = np.unique(z)
# start cutting out and predicting the patches
starttime_total = time.time()
if '3d' in self.train_metadata['params']['model']:
x_min = 0 # min(x)
y_min = 0 # min(y)
z_min = 0 # min(z)
x_max = img_mat.shape[0] # max(x)
y_max = img_mat.shape[1] # max(y)
z_max = img_mat.shape[2] # max(z)
num_x_patches = np.int(np.ceil((x_max - x_min) / patch_size[0]))
num_y_patches = np.int(np.ceil((y_max - y_min) / patch_size[0]))
num_z_patches = np.int(np.ceil((z_max - z_min) / patch_size_z[0]))
if num_z_patches*patch_size_z[0] + (np.max(patch_size_z)-np.min(patch_size_z))//2 > img_mat.shape[2]:
new_z = (num_z_patches-1)*patch_size_z[0] + patch_size_z[0]//2 + np.max(patch_size_z)//2 # so that we can feed sufficient patches
temp = np.zeros((img_mat.shape[0], img_mat.shape[1], new_z))
temp[:, :, :img_mat.shape[2]] = img_mat
temp[:, :, img_mat.shape[2]:] = img_mat[:,:,-(new_z - img_mat.shape[2]):]
img_mat = temp
for ix in range(num_x_patches):
for iy in range(num_y_patches):
for iz in range(num_z_patches):
# find the starting and ending x and y coordinates of given patch
patch_start_x = patch_size[0] * ix
patch_end_x = patch_size[0] * (ix + 1)
patch_start_y = patch_size[0] * iy
patch_end_y = patch_size[0] * (iy + 1)
patch_start_z = patch_size_z[0] * iz
patch_end_z = patch_size_z[0] * (iz + 1)
if patch_end_x > x_max:
patch_end_x = x_max
if patch_end_y > y_max:
patch_end_y = y_max
if patch_end_z > z_max:
patch_end_z = z_max
# find center loc with ref. size
center_x = patch_start_x + int(patch_size[0]/2)
center_y = patch_start_y + int(patch_size[0]/2)
center_z = patch_start_z + int(patch_size_z[0]/2)
img_patches = []
for h, size in enumerate(patch_size):
img_patch = np.zeros((size, size, patch_size_z[h], 1))
offset_x = 0
offset_y = 0
offset_z = 0
# find the starting and ending x and y coordinates of given patch
img_patch_start_x = center_x - int(size/2)
img_patch_end_x = center_x + int(size/2)
img_patch_start_y = center_y - int(size/2)
img_patch_end_y = center_y + int(size/2)
img_patch_start_z = center_z - int(patch_size_z[h]/2)
img_patch_end_z = center_z + int(patch_size_z[h]/2)
if img_patch_end_x > x_max:
img_patch_end_x = x_max
if img_patch_end_y > y_max:
img_patch_end_y = y_max
if img_patch_start_x < x_min:
offset_x = x_min - img_patch_start_x
img_patch_start_x = x_min
if img_patch_start_y < y_min:
offset_y = y_min - img_patch_start_y
img_patch_start_y = y_min
if img_patch_start_z < z_min:
offset_z = z_min - img_patch_start_z
img_patch_start_z = z_min
# get the patch with the found coordinates from the image matrix
img_patch[offset_x : offset_x + (img_patch_end_x-img_patch_start_x),
offset_y : offset_y + (img_patch_end_y-img_patch_start_y),
offset_z : offset_z + (img_patch_end_z-img_patch_start_z), 0] \
= img_mat[img_patch_start_x: img_patch_end_x, img_patch_start_y: img_patch_end_y, img_patch_start_z:img_patch_end_z]
img_patches.append(np.expand_dims(img_patch.astype(np.float32),0))
# predict the patch with the model and save to probability matrix
prob_mat[patch_start_x: patch_end_x, patch_start_y: patch_end_y, patch_start_z:patch_end_z] = \
(np.reshape(
np.squeeze(self.model.predict(img_patches)[-1]),
(patch_size[0], patch_size[0], patch_size_z[0])
) > self.THRESHOLD).astype(np.uint8) \
[:patch_end_x-patch_start_x, :patch_end_y-patch_start_y, :patch_end_z-patch_start_z]
else:
# proceed slice by slice
for i in z_slices:
print('Slice:', i)
starttime_slice = time.time()
slice_vox_inds = np.where(z == i)
# find all x and y coordinates with brain in given slice
x_in_slice = x[slice_vox_inds]
y_in_slice = y[slice_vox_inds]
# find min and max x and y coordinates
slice_x_min = min(x_in_slice)
slice_x_max = max(x_in_slice)
slice_y_min = min(y_in_slice)
slice_y_max = max(y_in_slice)
# calculate number of predicted patches in x and y direction in given slice
if isinstance(patch_size, list):
num_of_x_patches = np.int(np.ceil((slice_x_max - slice_x_min) / patch_size[0]))
num_of_y_patches = np.int(np.ceil((slice_y_max - slice_y_min) / patch_size[0]))
else:
num_of_x_patches = np.int(np.ceil((slice_x_max - slice_x_min) / patch_size))
num_of_y_patches = np.int(np.ceil((slice_y_max - slice_y_min) / patch_size))
print('num x patches', num_of_x_patches)
print('num y patches', num_of_y_patches)
for j in range(num_of_x_patches):
for k in range(num_of_y_patches):
# find the starting and ending x and y coordinates of given patch
patch_start_x = slice_x_min + patch_size[0] * j
patch_end_x = slice_x_min + patch_size[0] * (j + 1)
patch_start_y = slice_y_min + patch_size[0] * k
patch_end_y = slice_y_min + patch_size[0] * (k + 1)
# if the dimensions of the probability matrix are exceeded shift back the last patch
if patch_end_x > slice_x_max:
patch_end_x = slice_x_max
if patch_end_y > slice_y_max:
patch_end_y = slice_y_max
# find center loc with ref. size
center_x = patch_start_x + int(patch_size[0]/2)
center_y = patch_start_y + int(patch_size[0]/2)
img_patches = []
for h, size in enumerate(patch_size):
img_patch = np.zeros((size, size, 1))
offset_x = 0
offset_y = 0
# find the starting and ending x and y coordinates of given patch
img_patch_start_x = center_x - int(size/2)
img_patch_end_x = center_x + int(size/2)
img_patch_start_y = center_y - int(size/2)
img_patch_end_y = center_y + int(size/2)
if img_patch_end_x > slice_x_max:
img_patch_end_x = slice_x_max
if img_patch_end_y > slice_y_max:
img_patch_end_y = slice_y_max
if img_patch_start_x < slice_x_min:
offset_x = slice_x_min - img_patch_start_x
img_patch_start_x = slice_x_min
if img_patch_start_y < slice_y_min:
offset_y = slice_y_min - img_patch_start_y
img_patch_start_y = slice_y_min
# get the patch with the found coordinates from the image matrix
img_patch[offset_x : offset_x + (img_patch_end_x-img_patch_start_x),
offset_y : offset_y + (img_patch_end_y-img_patch_start_y), 0] \
= img_mat[img_patch_start_x: img_patch_end_x, img_patch_start_y: img_patch_end_y, i]
img_patches.append(np.expand_dims(img_patch,0))
# predict the patch with the model and save to probability matrix
prob_mat[patch_start_x: patch_end_x, patch_start_y: patch_end_y, i] = (np.reshape(
np.squeeze(self.model.predict(img_patches)[-1]),
(patch_size[0], patch_size[0])) > self.THRESHOLD).astype(np.uint8)[:patch_end_x-patch_start_x, :patch_end_y-patch_start_y]
# how long does the prediction take for a patient
duration_total = time.time() - starttime_total
print('prediction in total took:', (duration_total // 3600) % 60, 'hours',
(duration_total // 60) % 60, 'minutes',
duration_total % 60, 'seconds')
return prob_mat
def predict_and_save(self, patch_size, patch_size_z):
# Create results dir
if not os.path.exists(self.PROB_DIR):
os.makedirs(self.PROB_DIR)
for patient in self.patients:
if not os.path.exists(self.get_probs_filepath(patient)):
# predict
data_dir = os.path.join(self.PATIENTS_DIR, patient)
prob_mat = self.predict(patch_size, data_dir, patch_size_z)
# save
helper.create_and_save_nifti(prob_mat, self.get_probs_filepath(patient))
return
def make_error_mask(self, prob_path, ground_truth_path):
seg_data = helper.load_nifti_mat_from_file(prob_path)
ground_truth_data = helper.load_nifti_mat_from_file(ground_truth_path)
error_array = np.zeros(ground_truth_data.shape)
equal_mask = seg_data == ground_truth_data
TP = equal_mask + ground_truth_data == 2
FP = (seg_data > ground_truth_data)
FN = (seg_data < ground_truth_data)
# TP = 1-red, FP = 2-green, FN = 3-blue
error_array = error_array + TP + FP*2 + FN*3
return error_array
def make_and_save_error_masks(self):
# Create results dir
if not os.path.exists(self.ERROR_DIR):
os.makedirs(self.ERROR_DIR)
for patient in self.patients:
if not os.path.exists(self.get_errormasks_filepath(patient)):
prob_filepath = self.get_probs_filepath(patient)
if not os.path.exists(prob_filepath):
print("No probability mask found.")
else:
label_path = os.path.join(self.PATIENTS_DIR, patient, self.label_filename)
output_path = self.get_errormasks_filepath(patient)
# get mask
error_mask = self.make_error_mask(prob_filepath, label_path)
# save
helper.create_and_save_nifti(error_mask, output_path)
return
|
// this file create the Schema and define foreign keys
const { DataTypes } = require("sequelize");
const Organisation = require("../models/organisation");
const Team = require("../models/team");
const Person = require("../models/person");
const Report = require("../models/report");
const User = require("../models/user");
const Place = require("../models/place");
const RelPersonPlace = require("../models/relPersonPlace");
const RelUserTeam = require("../models/relUserTeam");
const Structure = require("../models/structure");
const Action = require("../models/action");
const Comment = require("../models/comment");
const Territory = require("../models/territory");
const TerritoryObservation = require("../models/territoryObservation");
const RelPersonTeam = require("../models/relPersonTeam");
const { ENCRYPTED_FIELDS_ONLY } = require("../config");
const generateForeignKey = (key) => ({ foreignKey: { type: DataTypes.UUID, name: key, field: key } });
const organisationForeignKey = generateForeignKey("organisation");
const teamForeignKey = generateForeignKey("team");
const userForeignKey = generateForeignKey("user");
const actionForeignKey = generateForeignKey("action");
const personForeignKey = generateForeignKey("person");
const placeForeignKey = generateForeignKey("place");
const structureForeignKey = generateForeignKey("structure");
const territoryForeignKey = generateForeignKey("territory");
// Team
Team.belongsTo(Organisation, organisationForeignKey);
Organisation.hasMany(Team, organisationForeignKey);
// User
User.belongsTo(Organisation, organisationForeignKey);
Organisation.hasMany(User, organisationForeignKey);
User.belongsToMany(Team, { ...userForeignKey, through: RelUserTeam });
Team.belongsToMany(User, { ...teamForeignKey, through: RelUserTeam });
// Person
if (!ENCRYPTED_FIELDS_ONLY) {
Person.belongsTo(User, userForeignKey);
User.hasMany(Person, userForeignKey);
Person.belongsToMany(Team, { ...personForeignKey, through: RelPersonTeam });
Team.belongsToMany(Person, { ...teamForeignKey, through: RelPersonTeam });
}
// Territory
if (!ENCRYPTED_FIELDS_ONLY) {
Territory.belongsTo(User, userForeignKey);
User.hasMany(Territory, userForeignKey);
}
Territory.belongsTo(Organisation, organisationForeignKey);
Organisation.hasMany(Territory, organisationForeignKey);
// TerritoryObservation
if (!ENCRYPTED_FIELDS_ONLY) {
TerritoryObservation.belongsTo(Territory, territoryForeignKey);
Territory.hasMany(TerritoryObservation, territoryForeignKey);
TerritoryObservation.belongsTo(User, userForeignKey);
User.hasMany(TerritoryObservation, userForeignKey);
TerritoryObservation.belongsTo(Team, teamForeignKey);
Team.hasMany(TerritoryObservation, teamForeignKey);
}
TerritoryObservation.belongsTo(Organisation, organisationForeignKey);
Organisation.hasMany(TerritoryObservation, organisationForeignKey);
// Place
if (!ENCRYPTED_FIELDS_ONLY) {
Place.belongsTo(User, userForeignKey);
User.hasMany(Place, userForeignKey);
Place.belongsToMany(Person, { ...placeForeignKey, through: RelPersonPlace });
Person.belongsToMany(Place, { ...personForeignKey, through: RelPersonPlace });
}
Place.belongsTo(Organisation, organisationForeignKey);
Organisation.hasMany(Place, organisationForeignKey);
RelPersonPlace.belongsTo(Organisation, organisationForeignKey);
Organisation.hasMany(RelPersonPlace, organisationForeignKey);
// Structure
Structure.belongsTo(Organisation, organisationForeignKey);
Organisation.hasMany(Structure, organisationForeignKey);
// Report
if (!ENCRYPTED_FIELDS_ONLY) {
Report.belongsTo(Team, teamForeignKey);
Team.hasMany(Report, teamForeignKey);
}
Report.belongsTo(Organisation, organisationForeignKey);
Organisation.hasMany(Report, organisationForeignKey);
// Comment
if (!ENCRYPTED_FIELDS_ONLY) {
Comment.belongsTo(User, userForeignKey);
User.hasMany(Comment, userForeignKey);
Comment.belongsTo(Action, actionForeignKey);
Action.hasMany(Comment, actionForeignKey);
Comment.belongsTo(Person, personForeignKey);
Person.hasMany(Comment, personForeignKey);
Comment.belongsTo(Team, teamForeignKey);
Team.hasMany(Comment, teamForeignKey);
}
Comment.belongsTo(Organisation, organisationForeignKey);
Organisation.hasMany(Comment, organisationForeignKey);
// Action
if (!ENCRYPTED_FIELDS_ONLY) {
Action.belongsTo(Person, personForeignKey);
Person.hasMany(Action, personForeignKey);
Action.belongsTo(Structure, structureForeignKey);
Structure.hasMany(Action, structureForeignKey);
Action.belongsTo(Team, teamForeignKey);
Team.hasMany(Action, teamForeignKey);
Action.belongsTo(User, userForeignKey);
User.hasMany(Action, userForeignKey);
}
Action.belongsTo(Organisation, organisationForeignKey);
Organisation.hasMany(Action, organisationForeignKey);
|
import privacy
# Create two concurrent lists representing a split row
# - row is the front end of the row with sec_estimate faredecode_dict
# - row1 is the back end of the row
def simulate_row(epsilon, taxi_id, spd, cp, fr, pd, n_dict, n_decode, c_decode):
row = {}
row1 = {}
# Create row = epsilon, taxi_id, shift, company_id, pcs, dca, payment_type, sec_estimate
row['epsilon'] = epsilon
row['taxi_id'] = taxi_id
row['shift'] = privacy.col_decoder(n_dict, n_decode, c_decode, spd, 'shift_c')
row['company_id'] = privacy.col_decoder(n_dict, n_decode, c_decode, cp, 'company_c')
pca = privacy.col_decoder(n_dict, n_decode, c_decode, spd, 'pca_c')
tpca = pca
if pca == 0:
pca = -1
row['pickup_community_area'] = pca
dca = privacy.col_decoder(n_dict, n_decode, c_decode, spd, 'dca_c')
tdca = dca
if dca == 0:
dca = -1
row['dropoff_community_area'] = dca
pay = privacy.col_decoder(n_dict, n_decode, c_decode, cp, 'payment_c')
if pay == 9:
pay = -1
row['payment_type'] = pay
pca_dca = str(tpca).zfill(2) + str(tdca).zfill(2)
sec_estimate = pd[pca_dca]
row['sec_estimate'] = sec_estimate
# Create row1 = fare, tips, trip_total, trip_seconds, trip_miles
fare = privacy.col_decoder(n_dict, n_decode, c_decode, fr, 'fare_n')
row1['fare'] = fare
tips = privacy.col_decoder(n_dict, n_decode, c_decode, fr, 'tips_n')
row1['tips'] = tips
row1['trip_total'] = fare + tips
row1['trip_seconds'] = privacy.col_decoder(n_dict, n_decode, c_decode, fr, 'seconds_n')
row1['trip_miles'] = privacy.col_decoder(n_dict, n_decode, c_decode, fr, 'miles_n')
return row, row1
|
var user = new Vue({
el : '#new-user',
data : {
},
methods: {
},
ready: function(){
}
});
|
function modSampleHeight(){
var headHeight = 100;
var sch = document.getElementById("gantt_here");
sch.style.height = (parseInt(document.body.offsetHeight)-headHeight)+"px";
var contbox = document.getElementById("contbox");
contbox.style.width = (parseInt(document.body.offsetWidth)-300)+"px";
}
$( ".datepicker" ).datepicker('setDate', new Date());
$(".datepicker").datepicker({
format: 'yyyy-mm-dd' ,
});
$(".timepicker").timepicker({minuteStep: 30,sshowMeridian: false});
gantt.config.min_column_width = 60;
gantt.config.scale_height = 60;
gantt.config.subscales = [
{unit:"day", step:1, date:"%D" },
{unit:"hour", step:1, date:" %H " },
{ unit: "minute", step: 30, date: "%i" }
];
gantt.config.grid_resize = false;
gantt.config.drag_links = false;
gantt.config.drag_progress = false;
gantt.templates.task_class =function(start, end, event){
var css = "";
if(event.dis == 'accepted')
css += "event_accepted ";
if(event.dis == 'refused')
css += "event_refused";
if(event.dis == 'checkin')
css += "event_checkin";
if(event.dis == 'checkout')
css += "event_checkout";
return css; // default return
}
gantt.config.columns = [
];
$('#gantt_form').submit(function (evnt) {
evnt.preventDefault();
evnt.stopImmediatePropagation();
var driver_id = $('#driver_id').val();
var from_date = $("#from_date").val();
var to_date = $("#to_date").val();
var data = {
driver_id : driver_id,
from_date : from_date,
to_date : to_date
};
gantt.config.start_date = new Date(from_date);
gantt.config.end_date = new Date(to_date);
console.log(from_date);
$.ajax({
type: "GET",
url: base_url + '/drivers/gantt_drivers',
data: data,
dataType: 'json',
cache: false,
success: function(response) {
console.log(response);
var demo_tasks = {"data":
response.gantt_arr
};
gantt.attachEvent("onTaskCreated", function(obj){
obj.duration = 4;
obj.progress = 0.25;
})
gantt.config.readonly= true;
gantt.init("gantt_here");
modSampleHeight();
gantt.clearAll();
gantt.parse(demo_tasks);
}
});
});
|
module.exports = {
prompt: ({ inquirer }) =>
inquirer
.prompt([
{
type: 'list',
name: 'kind',
message: 'What kind of component are you generating?',
choices: ['Base', 'Modules']
}
])
.then(({ kind }) =>
inquirer.prompt([
{
type: 'input',
name: 'path',
message: 'Select a directory to store your component.',
default: `src/components/${kind}`
},
{
type: 'input',
name: 'name',
message: 'Choose a name for your new component.'
}
])
)
}
|
function goSearch(form) {
// escape special symbols in query and update parameters
let element = document.getElementById("query_element");
element.value = element.value.replaceAll("\,", "%2C")
element.value = element.value.replaceAll("\+", "%2B")
element.value = element.value.replaceAll(/\s+/g, "%20")
let searchParams = new URLSearchParams(window.location.search);
form.action = "/search?" + searchParams.toString();
}
|
import os
import sys
import socket
PACKAGE_PARENT = '..'
SCRIPT_DIR = os.path.dirname(os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__))))
sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, PACKAGE_PARENT)))
from simple_host_target.definition import OP_HT_DATA_BEGIN, OP_HT_DATA_END,\
OP_SH_DATA_PREFIX, OP_SH_DATA_POSTFIX,\
OP_SH_DATA_MIDFIX
class Client(object):
def __init__(self, ip = "127.0.0.1", port = 5000):
self.socket = socket.socket()
self.socket.connect((ip, port))
def shutdown(self):
if self.socket:
print(" Client goes down ... ")
self.socket.shutdown(socket.SHUT_RDWR)
self.socket.close()
self.socket = None
def send_sh_data(self, ip_port = "", serialized_task = ""):
# Sample data to be sent !
self.send(OP_SH_DATA_PREFIX)
self.send(ip_port)
self.send(OP_SH_DATA_MIDFIX)
self.send(serialized_task)
self.send(OP_SH_DATA_POSTFIX)
def send_ht_data(self, info_package):
# Sample data to be sent !
self.send(OP_HT_DATA_BEGIN)
self.send(info_package)
self.send(OP_HT_DATA_END)
def send(self, msg):
assert (self.socket != None)
data = bytearray(msg, "ASCII") if msg != None and type(msg) == str else msg
if data != None:
totalsent = 0
while totalsent < len(data):
sent = self.socket.send(data[totalsent:])
if sent == 0:
raise RuntimeError("socket connection broken")
totalsent = totalsent + sent
print("%d bytes data has been sent successfully !"%(totalsent))
if __name__ == "__main__":
tc = Client()
tc2 = Client()
tc.send_ht_data()
tc2.send_ht_data("THIS IS A TEST !!!")
tc.shutdown()
tc2.shutdown()
|
from django.utils.translation import gettext as _
from datetime import datetime
from .models import *
def client_dashboard_ctx():
"""Return context data about client"""
current_date = datetime.today()
return {
'client': {
'title': _("Clients"),
'count': Client.objects.filter(date__year=current_date.year).values('id').count(),
'icon': None,
'icon_class': None,
},
}
|
import React, {Component} from 'react'
import PropTypes from 'prop-types'
import {connect} from 'react-redux'
import {NavLink as Link} from 'react-router-dom'
import {logout} from '../store'
import NavbarBurger from './navbar-burger'
import playSound from '../../script/utility-functions'
class Navbar extends Component {
constructor() {
super()
this.state = {
width: 0
}
this.updateWindowDimensions = this.updateWindowDimensions.bind(this)
}
componentDidMount() {
this.updateWindowDimensions()
window.addEventListener('resize', this.updateWindowDimensions)
}
componentWillUnmount() {
window.removeEventListener('resize', this.updateWindowDimensions)
}
updateWindowDimensions() {
this.setState({width: window.innerWidth})
}
render() {
const curWindowWidth = this.state.width
const {handleClick, isLoggedIn, isAdmin, user} = this.props
return (
<React.Fragment>
<nav className="flex-row-container-navbar">
<div className="flex-containee-navbar">
<Link
to="/home"
onClick={() => playSound('pipe')}
style={{textDecoration: 'none'}}
>
{curWindowWidth > 1007 ? (
<h3>Vintage Vidya Games</h3>
) : (
<h3>
Vintage
<br />
Vidya
<br />
Games
</h3>
)}
</Link>
</div>
{curWindowWidth > 1007 ? (
<div className="flex-containee-navbar">
<Link
to="/home"
className="nes-btn is-warning"
onClick={() => playSound('jump')}
>
Home
</Link>
<Link
to="/cart"
className="nes-btn is-warning"
onClick={() => playSound('jump')}
>
Cart
</Link>
{/* show link to profile after logged in */}
{isLoggedIn && (
// React.Fragment wraps the child elements but does not add another dom element
<React.Fragment>
<Link
to="orders"
className="nes-btn is-warning"
onClick={() => playSound('jump')}
>
Orders
</Link>
{user.googleId ? null : (
<Link
to="profile"
className="nes-btn is-warning"
onClick={() => playSound('jump')}
>
Profile
</Link>
)}
</React.Fragment>
)}
{isAdmin && (
<React.Fragment>
<Link
to="inventory"
className="nes-btn is-warning"
onClick={() => playSound('jump')}
>
Inventory
</Link>
</React.Fragment>
)}
{isLoggedIn ? (
<React.Fragment>
{/* The navbar will show these links after you login */}
<Link
to="#"
className="nes-btn is-warning"
onClick={() => {
handleClick()
playSound('death')
}}
>
Logout
</Link>
</React.Fragment>
) : (
<React.Fragment>
{/* The navbar will show these links before you login */}
<Link
to="/login"
className="nes-btn is-warning"
onClick={() => playSound('jump')}
>
Login
</Link>
<Link
to="/signup"
className="nes-btn is-warning"
onClick={() => playSound('jump')}
>
Sign Up
</Link>
</React.Fragment>
)}
</div>
) : (
<NavbarBurger props={this.props} />
)}
</nav>
<br />
<progress
className="nes-progress is-success"
value="0"
max="100"
style={{height: '0.5vh'}}
/>
<br />
<br />
</React.Fragment>
)
}
}
/**
* CONTAINER
*/
const mapState = state => {
return {
isLoggedIn: !!state.user.id,
isAdmin: !!state.user.admin,
user: state.user
}
}
const mapDispatch = dispatch => {
return {
handleClick() {
dispatch(logout())
// clear cart on log out
localStorage.clear()
}
}
}
export default connect(mapState, mapDispatch)(Navbar)
/**
* PROP TYPES
*/
Navbar.propTypes = {
handleClick: PropTypes.func.isRequired,
isLoggedIn: PropTypes.bool.isRequired,
isAdmin: PropTypes.bool.isRequired,
user: PropTypes.object.isRequired
}
|
goog.provide('gfd.WebGlCanvas');
goog.provide('gfd.WebGlCanvasListener');
goog.provide('gfd.WebGlCanvas.createWebGlCanvas');
goog.provide('gfd.WebGlCanvas.releaseWebGlCanvas');
goog.require('gfd.Constants');
goog.require('goog.dom');
/**
* A listener to a webgl canvas.
* @interface
*/
gfd.WebGlCanvasListener = function(){};
/**
* Called when webgl context is lost.
*/
gfd.WebGlCanvasListener.prototype.lostContext = function() {};
/**
* Called when webgl context is restored.
*/
gfd.WebGlCanvasListener.prototype.restoredContext = function() {};
/**
* A reusable webgl canvas that can be pooled to minimize the number of webgl
* contexts.
* @param {Boolean} cacheable
* @constructor
*/
gfd.WebGlCanvas = function(cacheable)
{
/**
* The WebGl context for all drawing commands.
* @type {Object}
* @private
*/
this.gl_ = null;
/**
* The canvas element we need to draw to.
* @type {Element}
* @private
*/
this.glCanvas_ = goog.dom.createDom('canvas');
/**
* The programs created by the current user of the context.
* @type {Array<Object>}
* @private
*/
this.glPrograms_ = [];
/**
* The buffers created by the current user of the context.
* @type {Array.<Object>}
* @private
*/
this.glBuffers_ = [];
/**
* Whether this has or has not lost its gl context.
* @type {boolean}
* @private
*/
this.valid_ = true;
/**
* The textures created by the current user of the context.
* @type {Array.<Object>}
* @private
*/
this.glTextures_ = [];
/**
* The arguments used to initialize the context
* @type {Object}
* @private
*/
this.glArgs_ = {preserveDrawingBuffer: !!cacheable,
premultipliedAlpha: true/*!cacheable*/,
stencil: true};
/**
* An object using this.
* @type {WebGlCanvasListener}
* @private
*/
this.listener_ = null;
try {
//this.gl_ = WebGLDebugUtils.makeDebugContext(this.glCanvas_.getContext("experimental-webgl", this.glArgs_));
this.gl_ = this.glCanvas_.getContext("experimental-webgl", this.glArgs_);
} catch (e)
{}
if (this.gl_)
{
goog.events.listen(this.glCanvas_, 'webglcontextlost', this.lostContext_, false, this);
goog.events.listen(this.glCanvas_, 'webglcontextrestored', this.restoredContext_, false, this);
}
};
/**
* @param {goog.events.Event} e
*/
gfd.WebGlCanvas.prototype.lostContext_ = function(e)
{
e.preventDefault();
this.valid_ = false;
if (this.listener_)
{
this.listener_.lostContext();
}
};
/**
* @param {goog.events.Event} e
*/
gfd.WebGlCanvas.prototype.restoredContext_ = function(e)
{
this.valid_ = true;
if (this.listener_)
{
this.listener_.restoredContext();
}
};
/**
* Sets teh listener that gets updated when context gets lost/restored.
* @param {gfd.WebGlCanvasListener} l
*/
gfd.WebGlCanvas.prototype.setListener = function(l)
{
this.listener_ = l;
};
/**
* Whether the canvas can be cached (drawn to a 2d canvas or image). So another
* element can use the GL context.
* @returns {boolean}
*/
gfd.WebGlCanvas.prototype.isCacheable = function()
{
return !!this.glArgs_.preserveDrawingBuffer;
};
/**
* Returns the webgl context
* @returns {Object}
*/
gfd.WebGlCanvas.prototype.getGl = function()
{
return this.gl_;
};
/**
* Returns the canvas element
* @returns {Element}
*/
gfd.WebGlCanvas.prototype.getCanvas = function()
{
return this.glCanvas_;
};
/**
* Creates a webgl program and returns it. Keeps track of the program so it
* can be deleted when released.
* @returns {Object}
*/
gfd.WebGlCanvas.prototype.createProgram = function()
{
var p;
if (this.gl_) this.glPrograms_.push(p = this.gl_.createProgram());
return p;
};
/**
* Creates a webgl texture and returns it. Keeps track of the texture so it
* can be deleted when released.
* @returns {Object}
*/
gfd.WebGlCanvas.prototype.createTexture = function()
{
var t;
if (this.gl_) this.glTextures_.push(t = this.gl_.createTexture());
return t;
};
/**
* Creates a webgl buffer and returns it. Keeps track of the buffer so it
* can be deleted when released.
* @returns {Object}
*/
gfd.WebGlCanvas.prototype.createBuffer = function()
{
var b;
if (this.gl_) this.glBuffers_.push(b = this.gl_.createBuffer());
return b;
};
/**
* Whether a gl context was successfully created.
* @return {boolean}
*/
gfd.WebGlCanvas.prototype.isValid = function()
{
return this.gl_ != null && this.valid_;
};
/**
* Releases any gl objects. And resets the gl state.
*/
gfd.WebGlCanvas.prototype.release = function()
{
var i, gl = this.gl_;
if (gl)
{
for (i = this.glPrograms_.length - 1; i >= 0; --i)
{
gl.deleteProgram(this.glPrograms_[i]);
}
this.glPrograms_.length = 0;
for (i = this.glBuffers_.length - 1; i >= 0; --i)
{
gl.deleteBuffer(this.glBuffers_[i]);
}
this.glBuffers_.length = 0;
for (i = this.glTextures_.length - 1; i >= 0; --i)
{
gl.deleteTexture(this.glTextures_[i]);
}
this.glTextures_.length = 0;
gl.clearColor(0, 0, 0, 0);
gl.clearStencil(0);
gl.clearDepth(0);
gl.clear(gl.COLOR_BUFFER_BIT|gl.DEPTH_BUFFER_BIT|gl.STENCIL_BUFFER_BIT);
}
};
/**
* The number of canvases currently allocated.
* @type {number}
*/
gfd.WebGlCanvas.numCanvases = 0;
/**
* A static pool so WebGlArtElements can render when necessary.
* @type {Array.<gfd.WebGlCanvas>}
* @private
*/
gfd.WebGlCanvas.pool_ = [];
/**
* Whether this can create a webgl canvas without going over max canvases.
* @returns {boolean}
*/
gfd.WebGlCanvas.canCreateWebGlCanvas = function()
{
return gfd.WebGlCanvas.pool_.length ||
gfd.WebGlCanvas.numCanvases < gfd.Constants.MAX_WEBGL_CANVASES;
};
/**
* A static method to create a webglcanvas by pulling it from a pool.
* @param {gfd.WebGlCanvasListener} listener
* @param {boolean} cacheable
* @returns {gfd.WebGlCanvas}
*/
gfd.WebGlCanvas.createWebGlCanvas = function(listener, cacheable)
{
var canvas;
for (var i = gfd.WebGlCanvas.pool_.length - 1; i >= 0; --i)
{
if (gfd.WebGlCanvas.pool_[i].isCacheable() == cacheable)
{
canvas = gfd.WebGlCanvas.pool_[i];
gfd.WebGlCanvas.pool_.splice(i, 1);
break;
}
}
if (!canvas && gfd.WebGlCanvas.numCanvases < gfd.Constants.MAX_WEBGL_CANVASES)
{
gfd.WebGlCanvas.numCanvases++;
canvas = new gfd.WebGlCanvas(cacheable);
}
if (canvas && canvas.isValid())
{
canvas.setListener(listener);
return canvas;
}
return null;
};
/**
* A static method to release a webglcanvas by putting it back in the pool
* @param {gfd.WebGlCanvas} canvas
*/
gfd.WebGlCanvas.releaseWebGlCanvas = function(canvas)
{
canvas.setListener(null);
canvas.release();
gfd.WebGlCanvas.pool_.push(canvas);
};
|
from item_catalog import app
app.run(host='0.0.0.0', port=5000)
|
import time
import gurobipy as grb
from tqdm import tqdm
def translate_loco_type(in_type):
if in_type == "3E":
return "3E - 7"
elif in_type == "DE6400":
return "DE6400 - 6495"
elif in_type == "JT42C":
return "JT42C - 166"
elif in_type == "TEM2":
return "TEM2 - 083"
elif in_type == "X4EC":
return "X4EC - 52"
elif in_type[0:5] == "BR232":
return "BR232 - 1003"
elif in_type[0:5] == "ES64F":
return "ES64F - 1006"
else:
return "59E - 1001"
def generate_master_matrix(drivers_l_arg, f_index_list_arg, train_driver_combinations):
matrix = dict()
rows = {(t1, loco_type) for [t1, _, loco_type, _] in f_index_list_arg if t1 != "ALPHA"}
for (t, loco_type) in rows:
drivers_l = set(drivers_l_arg[translate_loco_type(loco_type)])
drivers_t = train_driver_combinations[t]
relevant_drivers = drivers_l.intersection(drivers_t)
matrix[(t, loco_type)] = {"drivers": relevant_drivers}
# matrix[(t, loco_type)]["drivers_v"] = {i[1] for i in v_index_list_arg if
# i[0] == t and i[1] in relevant_drivers}
# matrix[(t, loco_type)]["drivers_y"] = {i[1] for i in y_index_list_arg if
# i[0] == t and i[1] in relevant_drivers}
# matrix[(t, loco_type)]["drivers_z"] = {i[1] for i in z_index_list_arg if
# i[0] == t and i[1] in relevant_drivers}
return matrix
def generate_conflict_matrix(master_matrix, clique):
conflict_matrix = dict()
drivers_in_matrix = set()
common_drivers_in_clq = set()
i = 0
for t_id in clique:
for k, v in master_matrix.items():
if k[0] == t_id:
conflict_matrix[k] = v["drivers"]
if i == 0:
common_drivers_in_clq = v["drivers"]
else:
common_drivers_in_clq = common_drivers_in_clq.intersection(v["drivers"])
for drivers in conflict_matrix.values():
drivers_in_matrix = drivers_in_matrix.union(drivers)
return conflict_matrix, drivers_in_matrix, common_drivers_in_clq
def generate_limited_master_matrix(master_matrix, assignments):
limited_matrix = dict()
chosen_rows = set()
for v in assignments:
if v.varName[0] == "f":
[_, t1, _, loco] = v.varName.split("_")
if t1 == "ALPHA":
continue
if (t1, loco) in chosen_rows:
continue
else:
chosen_rows.add((t1, loco))
for k, v in master_matrix.items():
if k in chosen_rows:
limited_matrix[k] = v
return limited_matrix
def generate_locos_available_for_driver(t_argument, d, sets, foreign_locos):
locos_available_for_driver = list(set([i.split(" - ")[0] for i in sets["locos_d"][d]]))
for t in t_argument:
if "ES64F" in locos_available_for_driver and t in foreign_locos["ES64F"].keys():
locos_available_for_driver.append(foreign_locos["ES64F"][t])
if "59E" in locos_available_for_driver and t in foreign_locos["59E"].keys():
locos_available_for_driver.append(foreign_locos["59E"][t])
if "BR232" in locos_available_for_driver and t in foreign_locos["BR232"].keys():
locos_available_for_driver.append(foreign_locos["BR232"][t])
return locos_available_for_driver
def generate_cuts_c11(model, t2, loco_type, foreign_locos, f_index_list, sets, train_driver_combinations):
if t2 != "OMEGA":
drivers_l = set(sets["drivers_l"][translate_loco_type(loco_type)])
drivers_t = train_driver_combinations[t2]
drivers_available = drivers_l.intersection(drivers_t)
# Condition 1 - no v variables
# v_counter = len([i for i in v_index_list if i[0] == t2 and i[1] in drivers_available])
v_counter = len(drivers_available)
v_condition = v_counter == 0
if v_counter > 0:
return 0
# Condition 2 - next trains
feasible_next_trains_counter = 0
rhs_variables_list = []
for d in drivers_available:
next_trains = sets["trains_next_t_d"][d][t2]
feasible_next_trains = set()
for t_i in next_trains:
if d in train_driver_combinations[t_i]:
feasible_next_trains.add(t_i)
feasible_next_trains_counter += len(feasible_next_trains)
if feasible_next_trains:
locos_available_for_driver = generate_locos_available_for_driver(feasible_next_trains, d, sets,
foreign_locos)
rhs_variables_list += [i[3] for i in f_index_list
if i[1] in feasible_next_trains and i[2] in locos_available_for_driver]
rhs_variables_list = set(rhs_variables_list)
if v_condition and feasible_next_trains_counter == 0:
prohibited_vars_names = [i[3] for i in f_index_list if i[1] == t2 and i[2] == loco_type]
coefficients = [1 for _ in range(len(prohibited_vars_names))]
prohibited_vars = [model.getVarByName(i) for i in prohibited_vars_names]
expr = grb.LinExpr(coefficients, prohibited_vars)
model.addConstr(expr <= 0)
return 0
if v_condition and feasible_next_trains_counter != 0:
lhs_vars_names = [i[3] for i in f_index_list if i[1] == t2 and i[2] == loco_type]
coefficients = [1 for _ in range(len(lhs_vars_names))]
lhs_vars = [model.getVarByName(i) for i in lhs_vars_names]
lhs = grb.LinExpr(coefficients, lhs_vars)
variables = [model.getVarByName(i) for i in rhs_variables_list]
coefficients = [1 for _ in range(len(variables))]
rhs = grb.LinExpr(coefficients, variables)
model.addConstr(lhs <= rhs, name=f"c11_proj_{t2}_{loco_type}")
return 0
def generate_cuts_c12(model, t2, loco_type, foreign_locos, f_index_list, sets, train_driver_combinations):
if t2 != "OMEGA":
#
drivers_l = set(sets["drivers_l"][translate_loco_type(loco_type)])
drivers_t = train_driver_combinations[t2]
drivers_available = drivers_l.intersection(drivers_t)
# Condition 1 - no y variables
# y_counter = len([i for i in y_index_list if i[0] == t2 and i[1] in drivers_available])
y_counter = len(drivers_available)
y_condition = y_counter == 0
if y_counter > 0:
return 0
# Condition 2 - previous trains
feasible_prev_trains_counter = 0
rhs_variables_list = []
for d in drivers_available:
prev_trains = sets["trains_previous_t_d"][d][t2]
feasible_prev_trains = set()
for t_i in prev_trains:
if d in train_driver_combinations[t_i]:
feasible_prev_trains.add(t_i)
# feasible_prev_trains = [i[0] for i in delta_index_list if i[0] in prev_trains and i[1] == d]
feasible_prev_trains_counter += len(feasible_prev_trains)
if feasible_prev_trains:
locos_available_for_driver = generate_locos_available_for_driver(feasible_prev_trains, d, sets,
foreign_locos)
rhs_variables_list += [i[3] for i in f_index_list
if i[1] in feasible_prev_trains and i[2] in locos_available_for_driver]
rhs_variables_list = set(rhs_variables_list)
if y_condition and feasible_prev_trains_counter == 0:
prohibited_vars_names = [i[3] for i in f_index_list if i[1] == t2 and i[2] == loco_type]
coefficients = [1 for _ in range(len(prohibited_vars_names))]
prohibited_vars = [model.getVarByName(i) for i in prohibited_vars_names]
expr = grb.LinExpr(coefficients, prohibited_vars)
model.addConstr(expr <= 0, name=f"C12_proj_{t2}_{loco_type}")
return 0
if y_condition and feasible_prev_trains_counter != 0:
lhs_vars_names = [i[3] for i in f_index_list if i[1] == t2 and i[2] == loco_type]
coefficients = [1 for _ in range(len(lhs_vars_names))]
lhs_vars = [model.getVarByName(i) for i in lhs_vars_names]
lhs = grb.LinExpr(coefficients, lhs_vars)
variables = [model.getVarByName(i) for i in rhs_variables_list]
coefficients = [1 for _ in range(len(variables))]
rhs = grb.LinExpr(coefficients, variables)
model.addConstr(lhs <= rhs, name=f"c12_proj_{t2}_{loco_type}")
return 0
def generate_cuts_c13(model, t2, loco_type, foreign_locos, f_index_list, sets, train_driver_combinations, omega_dict):
if t2 == "OMEGA":
return 0
drivers_l = set(sets["drivers_l"][translate_loco_type(loco_type)])
drivers_t = train_driver_combinations[t2]
drivers_available = drivers_l.intersection(drivers_t)
if len(drivers_available) == 0:
return 0
# Condition 1 - no omega variables
if t2 not in omega_dict.keys():
omega_counter = 0
else:
omegas = omega_dict[t2].intersection(drivers_available)
omega_counter = len(omegas)
omega_condition = omega_counter == 0
if omega_counter > 0:
return 0
# Condition 2 - trains after break
feasible_trains_after_break_counter = 0
trains_to_enforce = []
locos_to_enforce = []
for d in drivers_available:
trains_after_break = sets["trains_after_break_t_d"][d][t2]
feasible_trains_after_break = set()
for t_i in trains_after_break:
if d in train_driver_combinations[t_i]:
feasible_trains_after_break.add(t_i)
# feasible_trains_after_break = set([i[0] for i in y_index_list if i[0] in trains_after_break and i[1] == d])
feasible_trains_after_break_counter += len(feasible_trains_after_break)
if feasible_trains_after_break:
trains_to_enforce.extend(feasible_trains_after_break)
locos_available_for_driver = set(generate_locos_available_for_driver(feasible_trains_after_break, d, sets,
foreign_locos))
locos_to_enforce.extend(locos_available_for_driver)
if omega_condition and feasible_trains_after_break_counter == 0:
prohibited_vars_names = [i[3] for i in f_index_list if i[1] == t2 and i[2] == loco_type]
coefficients = [1 for _ in range(len(prohibited_vars_names))]
prohibited_vars = [model.getVarByName(i) for i in prohibited_vars_names]
expr = grb.LinExpr(coefficients, prohibited_vars)
model.addConstr(expr <= 0, name=f"C13_proj_{t2}_{loco_type}")
return 0
if omega_condition and feasible_trains_after_break_counter != 0:
lhs_vars_names = [i[3] for i in f_index_list if i[1] == t2 and i[2] == loco_type]
coefficients = [1 for _ in range(len(lhs_vars_names))]
lhs_vars = [model.getVarByName(i) for i in lhs_vars_names]
lhs = grb.LinExpr(coefficients, lhs_vars)
trains_to_enforce = set(trains_to_enforce)
locos_to_enforce = set(locos_to_enforce)
rhs_variables_list = [i[3] for i in f_index_list if i[1] in trains_to_enforce and i[2] in locos_to_enforce]
variables = [model.getVarByName(i) for i in rhs_variables_list]
coefficients = [1 for _ in range(len(variables))]
rhs = grb.LinExpr(coefficients, variables)
model.addConstr(lhs <= rhs, name=f"c13_proj_{t2}_{loco_type}")
return 0
def generate_cuts_c14(model, t2, loco_type, foreign_locos, f_index_list, sets, train_driver_combinations, alpha_dict):
drivers_l = set(sets["drivers_l"][translate_loco_type(loco_type)])
# dont loop below, use a dict! Same holds for all the other places when you loop like this!
drivers_t = train_driver_combinations[t2]
drivers_available = drivers_l.intersection(drivers_t)
# Condition 1 - no alpha variables available
if t2 not in alpha_dict.keys():
alpha_counter = 0
else:
alphas = alpha_dict[t2].intersection(drivers_available)
alpha_counter = len(alphas)
alpha_condition = alpha_counter == 0
if alpha_counter > 0:
return 0
# Condition 2 - trains before break
feasible_trains_before_break_counter = 0
trains_to_enforce = []
locos_to_enforce = []
for d in drivers_available:
trains_before_break = sets["trains_before_break_t_d"][d][t2]
feasible_trains_before_break = set()
for t_i in trains_before_break:
if d in train_driver_combinations[t_i]:
feasible_trains_before_break.add(t_i)
# feasible_trains_before_break = [i[0] for i in v_index_list if i[0] in trains_before_break and i[1] == d]
feasible_trains_before_break_counter += len(feasible_trains_before_break)
if feasible_trains_before_break:
trains_to_enforce.extend(feasible_trains_before_break)
locos_available_for_driver = set(generate_locos_available_for_driver(feasible_trains_before_break, d, sets,
foreign_locos))
locos_to_enforce.extend(locos_available_for_driver)
if alpha_condition and feasible_trains_before_break_counter == 0:
prohibited_vars_names = [i[3] for i in f_index_list if i[1] == t2 and i[2] == loco_type]
coefficients = [1 for _ in range(len(prohibited_vars_names))]
prohibited_vars = [model.getVarByName(i) for i in prohibited_vars_names]
expr = grb.LinExpr(coefficients, prohibited_vars)
model.addConstr(expr <= 0, name=f"C14_proj_{t2}_{loco_type}")
return 0
if alpha_condition and feasible_trains_before_break_counter != 0:
lhs_vars_names = [i[3] for i in f_index_list if i[1] == t2 and i[2] == loco_type]
trains_to_enforce = set(trains_to_enforce)
locos_to_enforce = set(locos_to_enforce)
rhs_variables_list = [i[3] for i in f_index_list if i[1] in trains_to_enforce and i[2] in locos_to_enforce]
coefficients = [1 for _ in range(len(lhs_vars_names))]
lhs_vars = [model.getVarByName(i) for i in lhs_vars_names]
lhs = grb.LinExpr(coefficients, lhs_vars)
variables = [model.getVarByName(i) for i in rhs_variables_list]
coefficients = [1 for _ in range(len(variables))]
rhs = grb.LinExpr(coefficients, variables)
model.addConstr(lhs <= rhs, name=f"C14_proj_{t2}_{loco_type}")
return 0
def extend_loco_model(model, f_index_list, sets, master_matrix, train_driver_combinations):
alpha_index_list = sets["alpha_index_list"]
omega_index_list = sets["omega_index_list"]
alpha_dict = dict()
omega_dict = dict()
for (ta, da, _) in alpha_index_list:
if ta not in alpha_dict.keys():
alpha_dict[ta] = {da}
else:
alpha_dict[ta].add(da)
for (to, do, _) in omega_index_list:
if to not in omega_dict.keys():
omega_dict[to] = {do}
else:
omega_dict[to].add(do)
foreign_locos = dict()
foreign_locos["ES64F"] = {i[1]: i[2].replace(" - ", "-") for i in f_index_list
if i[2][0:5] == "ES64F" and i[1] != "OMEGA"}
foreign_locos["59E"] = {i[1]: i[2].replace(" - ", "-") for i in f_index_list
if i[2][0:5] == "BR232" and i[1] != "OMEGA"}
foreign_locos["BR232"] = {i[1]: i[2].replace(" - ", "-") for i in f_index_list
if i[2][0:3] == "59E" and i[1] != "OMEGA"}
time_c11 = 0
time_c12 = 0
time_c13 = 0
time_c14 = 0
for (t2, loco_type) in tqdm(master_matrix.keys()):
if loco_type[0:5] != "ES64F" and loco_type[0:5] != "BR232" and loco_type[0:3] != "59E":
# c11_beginning_time = time.time()
# generate_cuts_c11(model, t2, loco_type, foreign_locos, f_index_list, sets, train_driver_combinations)
# c11_end_time = time.time()
# time_c11 += c11_end_time - c11_beginning_time
#
# c12_beginning_time = time.time()
# generate_cuts_c12(model, t2, loco_type, foreign_locos, f_index_list, sets, train_driver_combinations)
# c12_end_time = time.time()
# time_c12 += c12_end_time - c12_beginning_time
c13_beginning_time = time.time()
generate_cuts_c13(model, t2, loco_type, foreign_locos, f_index_list, sets, train_driver_combinations, omega_dict)
c13_end_time = time.time()
time_c13 += c13_end_time - c13_beginning_time
c14_beginning_time = time.time()
generate_cuts_c14(model, t2, loco_type, foreign_locos, f_index_list, sets, train_driver_combinations, alpha_dict)
c14_end_time = time.time()
time_c14 += c14_end_time - c14_beginning_time
# print(f"projection of C11: {time_c11:.2f} s.")
# print(f"projection of C12: {time_c12:.2f} s.")
print(f"projection of C13: {time_c13:.2f} s.")
print(f"projection of C14: {time_c14:.2f} s.")
return model
|
/*eslint-env node */
/*eslint no-console: 0*/
require('babel-polyfill');
var jsdom = require('jsdom');
//used to log internal jsdom-errors to the console
var virtualConsole = jsdom.createVirtualConsole();
virtualConsole.on('jsdomError', function (error) {
console.error(error.stack, error.detail);
});
global.document = jsdom.jsdom('<!doctype html><html><body></body></html>', {
url: 'http://localhost',
virtualConsole
});
global.window = document.defaultView;
window.sessionStorage = {
getItem: function (key) {
return this[key];
},
setItem: function (key, value) {
this[key] = value;
},
removeItem: function(key) {
this[key] = null;
}
};
global.navigator = {userAgent: 'node.js'};
global.FormData = document.defaultView.FormData;
global.Element = window.Element;
//run tests in production environment
global.__DEVELOPMENT__ = false;
global.__DEVTOOLS__ = false;
global.__TEST__ = true;
require.extensions['.less'] = () => null;
require.extensions['.css'] = () => null;
|
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; var ownKeys = Object.keys(source); if (typeof Object.getOwnPropertySymbols === 'function') { ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function (sym) { return Object.getOwnPropertyDescriptor(source, sym).enumerable; })); } ownKeys.forEach(function (key) { _defineProperty(target, key, source[key]); }); } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import PropTypes from 'prop-types';
import React from 'react';
/**
* Link is a custom component that allows us to supporting rendering elements
* other than `a` in our markup. The goal is to allow users to support passing
* in their own components to support use-cases like `react-router` or
* `@reach/router`
*/
var Link = React.forwardRef(function Link(props, ref) {
var element = props.element,
rest = _objectWithoutProperties(props, ["element"]);
return React.createElement(element, _objectSpread({}, rest, {
ref: ref
}));
});
var LinkPropTypes = {
/**
* The base element to use to build the link. Defaults to `a`, can also accept
* alternative tag names or custom components like `Link` from `react-router`.
*/
element: PropTypes.elementType
};
Link.displayName = 'Link';
Link.propTypes = LinkPropTypes;
Link.defaultProps = {
element: 'a'
};
export { LinkPropTypes };
export default Link;
|
// @flow strict
import $ from 'lib/$';
import config from 'lib/config';
import mediator from 'lib/mediator';
import fastdom from 'lib/fastdom-promise';
import { addSlot } from 'commercial/modules/dfp/add-slot';
import { adSizes } from 'commercial/modules/ad-sizes';
import { isUserLoggedIn } from 'common/modules/identity/api';
import { commercialFeatures } from 'common/modules/commercial/commercial-features';
import { createSlots } from 'commercial/modules/dfp/create-slots';
import { getAdvertById } from 'commercial/modules/dfp/get-advert-by-id';
import { refreshAdvert } from 'commercial/modules/dfp/load-advert';
import type { Advert } from 'commercial/modules/dfp/Advert';
import type bonzo from 'bonzo';
const createCommentSlots = (
canBeDmpu: boolean
): Array<HTMLDivElement | HTMLSpanElement> => {
const sizes = canBeDmpu ? { desktop: [adSizes.halfPage] } : {};
const adSlots = createSlots('comments', { sizes });
adSlots.forEach(adSlot => {
adSlot.classList.add('js-sticky-mpu');
});
return adSlots;
};
const insertCommentAd = (
$commentMainColumn: bonzo,
$adSlotContainer: bonzo,
canBeDmpu: boolean
): Promise<void> => {
const commentSlots = createCommentSlots(canBeDmpu);
return (
fastdom
.write(() => {
$commentMainColumn.addClass('discussion__ad-wrapper');
if (
!config.get('page.isLiveBlog') &&
!config.get('page.isMinuteArticle')
) {
$commentMainColumn.addClass('discussion__ad-wrapper-wider');
}
// Append each slot into the adslot container...
commentSlots.forEach(adSlot => {
$adSlotContainer.append(adSlot);
});
return commentSlots[0];
})
// Add only the fist slot (DFP slot) to GTP
.then((adSlot: HTMLElement) => {
addSlot(adSlot, false);
Promise.resolve(mediator.emit('page:commercial:comments'));
})
);
};
const containsDMPU = (ad: Advert): boolean =>
ad.sizes.desktop.some(el => el[0] === 300 && el[1] === 600);
const maybeUpgradeSlot = (ad: Advert, $adSlot: bonzo): Advert => {
if (!containsDMPU(ad)) {
ad.sizes.desktop.push([300, 600]);
ad.slot.defineSizeMapping([[[0, 0], ad.sizes.desktop]]);
fastdom.write(() => {
$adSlot[0].setAttribute(
'data-desktop',
'1,1|2,2|300,250|300,274|fluid|300,600'
);
});
}
return ad;
};
const runSecondStage = (
$commentMainColumn: bonzo,
$adSlotContainer: bonzo
): void => {
const $adSlot: bonzo = $('.js-ad-slot', $adSlotContainer);
const commentAdvert = getAdvertById('dfp-ad--comments');
if (commentAdvert && $adSlot.length) {
// when we refresh the slot, the sticky behavior runs again
// this means the sticky-scroll height is corrected!
refreshAdvert(maybeUpgradeSlot(commentAdvert, $adSlot));
}
if (!commentAdvert) {
insertCommentAd($commentMainColumn, $adSlotContainer, true);
}
};
export const initCommentAdverts = (): Promise<boolean> => {
const $adSlotContainer: bonzo = $('.js-discussion__ad-slot');
if (!commercialFeatures.commentAdverts || !$adSlotContainer.length) {
return Promise.resolve(false);
}
mediator.once(
'modules:comments:renderComments:rendered',
(): void => {
const isLoggedIn: boolean = isUserLoggedIn();
const $commentMainColumn: bonzo = $(
'.js-comments .content__main-column'
);
fastdom
.read(() => $commentMainColumn.dim().height)
.then((mainColHeight: number) => {
// always insert an MPU/DMPU if the user is logged in, since the
// containers are reordered, and comments are further from most-pop
if (
mainColHeight >= 800 ||
(isLoggedIn && mainColHeight >= 600)
) {
insertCommentAd(
$commentMainColumn,
$adSlotContainer,
true
);
} else if (isLoggedIn) {
insertCommentAd(
$commentMainColumn,
$adSlotContainer,
false
);
}
mediator.on('discussion:comments:get-more-replies', () => {
runSecondStage($commentMainColumn, $adSlotContainer);
});
});
}
);
return Promise.resolve(true);
};
export const _ = {
maybeUpgradeSlot,
createCommentSlots,
insertCommentAd,
runSecondStage,
containsDMPU,
};
|
'use strict'
module.exports = {
name: 'Supercharge Config',
nested: {
key: 'nested-value'
}
}
|
'use strict'
if (!process.env.NODE_ENV) { require('dotenv').config() }
require('mongoose').Promise = global.Promise
const path = require('path')
const express = require('express')
const mongoose = require('mongoose')
const http = require('http')
const util = require('util')
const mkdirp = util.promisify(require('mkdirp'))
const log = require('./services/logService')
const config = require('./config/environment')
process.env.NODE_ENV = process.env.NODE_ENV || 'development'
mongoose.connect(config.mongo.uri, config.mongo.options)
if (config.seedDB) {
require('./config/seed')
}
const app = express()
app.enable('trust proxy')
require('./config/express')(app)
require('./api')(app)
async function startServer() {
const logPath = path.join(__dirname, '../log/')
try {
await mkdirp(logPath)
http.createServer(app)
.listen(config.port, config.ip, () => {
log.info(`Express server listening on ${config.port}, in ${app.get('env')} mode`)
})
}
catch (err) {
log.error('mkdirp:' + logPath, err)
}
}
startServer()
module.exports = { app }
|
from bayes_implicit_solvent.marginal_likelihood.single_type_forward_ais import \
annealed_log_posterior_at_multiple_values_of_beta, annealed_log_posterior, dataset
import numpy as np
posterior_sample_result = np.load('single_type_posterior_samples_{}.npz'.format(dataset))
posterior_samples = posterior_sample_result['traj']
def sample_from_posterior():
return posterior_samples[np.random.randint(len(posterior_samples))]
#from numpy import load
#optimized_reverse_betas = load('../../notebooks/optimized_reverse_betas.npz')['optimized_reverse_betas_1000']
from bayes_implicit_solvent.samplers import random_walk_mh
from tqdm import tqdm
if __name__ == "__main__":
N_trajectories = 1000
N_annealing_steps = 10000
trajectories = []
log_weight_trajs = []
for k in range(N_trajectories):
theta = sample_from_posterior()
traj = [theta]
log_weights = [0]
betas = np.linspace(1, 0, N_annealing_steps)
#betas = optimized_betas
#betas = optimized_reverse_betas
trange = tqdm(range(1, N_annealing_steps))
for t in trange:
log_pdf_tminus1, log_pdf_t = annealed_log_posterior_at_multiple_values_of_beta(traj[-1],
[betas[t - 1],
betas[t]])
log_weights.append(log_weights[t - 1] + (log_pdf_t - log_pdf_tminus1))
log_prob_fun = lambda theta: annealed_log_posterior(theta, betas[t])
mh_traj, _, _ = random_walk_mh(traj[-1], log_prob_fun, n_steps=1, stepsize=0.015, progress_bar=False)
traj.append(mh_traj[-1])
trange.set_postfix(running_log_Z_estimate=-log_weights[-1])
trajectories.append(np.array(traj))
log_weight_trajs.append(np.array(log_weights))
import numpy as np
np.savez('single_type_reverse_ais_long_protocol_{}.npz'.format(dataset),
trajectories=trajectories,
log_weight_trajectories=log_weight_trajs)
|
module.exports = {
exe: async (message, args, modules, prefix, command) => {
message.channel.send(message.content.slice(prefix.length + command.length + 1))
},
config: {
name: "say", aliases: [], category: "moderation", handler: { deleteInvoke: true, staff: true, dev: false, inPogress: false }, help: { desc: "Permet à Lana d'envoyer un message à votre place", use: "=say <message>" }, modules: []
}
};
|
import pandas as pd
from nltk import StanfordPOSTagger
from common.word_format.df_utils import Nlp_util, Df_util
class PosTagger:
@classmethod
def add_pos_tag(cls, df):
df = cls.__add_basic_pos_tag(df)
dic_for_correction = {"feel": "VB", "talk": "VB", 'u': 'PRP', 'i': 'PRP', 'know': 'VB', 'move': 'VB',
'ask': 'VB', 'dont': 'RB', 'don`t': 'RB', 'didnt': 'RB', 'did\'t': 'RB', "am": 'VB',
"get": 'VB', "ex": "NN", "hate": "VB", "wish": "VB", "not": "NO", "never": "NO"}
df = cls.__correct_pos_tag(df, dic_for_correction)
df = cls.__convert_pos_of_love(df)
return df
@staticmethod
def __add_basic_pos_tag(df):
pos_path_jar = "./nlp_library/stanford-postagger-full-2017-06-09/stanford-postagger.jar"
pos_path_model = "./nlp_library/stanford-postagger-full-2017-06-09/models/english-left3words-distsim.tagger"
pos_tagger = StanfordPOSTagger(pos_path_model, pos_path_jar)
pos = [pos_tagger.tag(s) for s in [df.word]]
pos = [i[1] for i in pos[0]]
pos = pd.DataFrame(pos)
df['pos'] = pos
return df
@staticmethod
def __correct_pos_tag(df, dic):
if any(df["word"].isin(dic.keys())):
df.loc[df["word"].isin(dic.keys()), "pos"] = df[df["word"].isin(dic.keys())].apply(
lambda row: dic[row["word"]], axis=1)
if any(df["word"].isin(["that", "it", "this"])):
idx_list_of_kws = Nlp_util.get_idx_list_of_word_list(["that", "it", "this"], df["word"])
for idx_of_kw in idx_list_of_kws:
condition = df.loc[idx_of_kw + 1, "pos"] not in Nlp_util.pos_PRPs + Nlp_util.pos_NOUNs
if idx_of_kw == len(df) - 1 or condition:
df.loc[idx_of_kw, "pos"] = "NN"
else:
pass
if Df_util.anything_isin(["like", "care", "guess", "need"], df["word"]):
idx_list_of_like = Nlp_util.get_idx_list_of_word_list(["like", "care", "guess", "need"], df["word"])
for idx_of_like in idx_list_of_like:
if not idx_of_like == 0 and df.loc[idx_of_like - 1, "pos"] in Nlp_util.pos_NOUNs + Nlp_util.pos_PRPs:
df.loc[idx_of_like, "pos"] = "VB"
else:
pass
if Df_util.anything_isin(["work"], df["word"]):
idx_list_of_work = Nlp_util.get_idx_list_of_word_list(["work"], df["word"])
for idx_of_work in idx_list_of_work:
if not idx_of_work == 0 and df.loc[idx_of_work - 1, "word"] in ["this"]:
df.loc[idx_of_work, "pos"] = "VB"
else:
pass
return df
@staticmethod
def __convert_pos_of_love(df):
if any(df.word == 'love'):
love_rows = df[df.word == 'love']
for row_idx, row in love_rows.iterrows():
search_range = df[(df.sidx == row.sidx) & (df.widx < row.widx)].word
if any(i == 'i' for i in search_range):
df.at[row_idx, 'pos'] = 'VBP'
return df
|
const express = require('express');
const livereload = require('livereload');
const connectLivereload = require('connect-livereload');
const path = require('path');
const fs = require('fs');
const { STYLES_DEST_DIR, DEMO_SRC_DIR } = require('./constants');
const app = express();
app.use(connectLivereload());
app.use(express.urlencoded({ extended: false }));
app.use(express.static(DEMO_SRC_DIR));
const livereloadServer = livereload.createServer();
livereloadServer.watch([DEMO_SRC_DIR, STYLES_DEST_DIR]);
app.get('/:file', (req, res) => {
const { file = 'index' } = req.params;
if (/\.css$/.test(file)) {
res.sendFile(path.join(STYLES_DEST_DIR, file));
return;
}
const htmlFile = path.join(DEMO_SRC_DIR, `${file}.html`);
if (fs.existsSync(htmlFile)) {
res.sendFile(htmlFile);
return;
}
res.sendStatus(404);
});
app.listen(8000, () => {
console.log('Demo site live at http://localhost:8000');
});
|
import os
import sys
import time
import logging
import multiprocessing
import pytest
from clitest import CmdlineInterfaceTest
sys.path.insert(0, os.path.abspath('..'))
# If we want to measure code coverage across CLI invocations then we can do
# "coverage -x goeffel". Just need to figure out where to let coverage to
# store its state.
logging.basicConfig(
format='%(asctime)s,%(msecs)-6.1f %(name)s %(funcName)s# %(message)s',
datefmt='%H:%M:%S')
log = logging.getLogger()
log.setLevel(logging.DEBUG)
@pytest.fixture
def clitest(tmp_path, request):
testname = request.node.name
c = CmdlineInterfaceTest(
name=testname,
rundir=str(tmp_path),
preamble_lines=['export PYTHONIOENCODING="utf-8"']
)
yield c
@pytest.fixture(scope='session', autouse=True)
def testprocess():
def _run():
while True:
time.sleep(0.1)
log.info('Start test process')
p = multiprocessing.Process(target=_run)
# Make it less likely for the test runner to leave behind an orphaned child.
p.daemon = True
try:
p.start()
yield p
finally:
p.terminate()
p.join()
log.info('Test process terminated cleanly')
def test_pid(clitest, testprocess):
clitest.run(f"goeffel --pid {testprocess.pid} -t 1")
def test_pid_command_simple(clitest, testprocess):
clitest.run(f"goeffel --pid-command 'echo {testprocess.pid}' -t 1")
def test_hdf5_path_prefix_default(clitest, testprocess):
clitest.run(f"goeffel --pid {testprocess.pid} -i 0.3 -t 1")
clitest.expect_filename_pattern(
r'^goeffel-timeseries__[0-9]+-[0-9]+\.hdf5$')
def test_hdf5_path_with_label(clitest, testprocess):
clitest.run(
f"goeffel --pid {testprocess.pid} -i 0.3 -t 1 "
"--label custom-label"
)
clitest.expect_filename_pattern(
r'^goeffel-timeseries_custom-label_[0-9]+-[0-9]+\.hdf5$')
def test_hdf5_path_prefix_custom(clitest, testprocess):
clitest.run(
f"goeffel --pid {testprocess.pid} -i 0.3 -t 1 "
"--outfile-hdf5-path-prefix custom_prefix"
)
clitest.expect_filename_pattern(
r'^custom_prefix__[0-9]+-[0-9]+\.hdf5$')
def test_hdf5_path_prefix_custom_and_label(clitest, testprocess):
clitest.run(
f"goeffel --pid {testprocess.pid} -i 0.3 -t 1 "
"--outfile-hdf5-path-prefix custom_prefix --label custom-label"
)
clitest.expect_filename_pattern(
r'^custom_prefix_custom-label_[0-9]+-[0-9]+\.hdf5$')
def test_hdf5_opt_collision(clitest, testprocess):
clitest.run(
f"goeffel --pid {testprocess.pid} -i 0.3 -t 1 "
"--outfile-hdf5-path-prefix a --outfile-hdf5-path b",
expect_rc=2
)
clitest.assert_in_stderr('not allowed with argument')
def test_hdf5_path(clitest, testprocess):
clitest.run(
f"goeffel --pid {testprocess.pid} -i 0.3 -t 1 "
"--outfile-hdf5-path out.hdf5"
)
clitest.expect_filename_pattern(
r'^out\.hdf5$')
def test_a_number_of_features_together(clitest, testprocess):
clitest.run(
f"goeffel --pid-command 'echo {testprocess.pid}' "
"--diskstats sda --sampling-interval 0.3 -t 1"
)
def test_analysis_inspect(clitest, testprocess):
clitest.run(
f"goeffel --pid {testprocess.pid} -i 0.3 -t 1 --outfile-hdf5-path out.hdf5"
)
clitest.run(
f"goeffel-analysis inspect out.hdf5"
)
clitest.assert_in_stdout([
'Created with: Goeffel',
'Table properties:',
'Column names:'
])
def test_analysis_plot(clitest, testprocess):
clitest.run(
f"goeffel --pid {testprocess.pid} -i 0.3 -t 1 --outfile-hdf5-path out.hdf5"
)
clitest.run(
f"goeffel-analysis plot out.hdf5"
)
clitest.assert_in_stderr(['Writing figure as PNG to'])
def test_analysis_plot_additional_metric(clitest, testprocess):
clitest.run(
f"goeffel --pid {testprocess.pid} -i 0.3 -t 1 --outfile-hdf5-path out.hdf5"
)
clitest.run(
f"goeffel-analysis plot out.hdf5 --metric system_loadavg1"
)
clitest.assert_in_stderr(['Writing figure as PNG to'])
def test_analysis_flexplot(clitest, testprocess):
clitest.run(
f"goeffel --pid {testprocess.pid} -i 0.3 -t 1 --outfile-hdf5-path out.hdf5"
)
clitest.run(
"goeffel-analysis flexplot --series out.hdf5 'label' "
"--column proc_num_ip_sockets_open 'ylabel' 'plottitle' 5"
)
clitest.assert_in_stderr(['Writing figure as PNG to'])
|
Kanboard.Screenshot = function(app) {
this.app = app;
this.pasteCatcher = null;
};
Kanboard.Screenshot.prototype.onPopoverOpened = function() {
if (this.app.hasId("screenshot-zone")) {
this.initialize();
}
};
// Setup event listener and workarounds
Kanboard.Screenshot.prototype.initialize = function() {
this.destroy();
if (! window.Clipboard) {
// Create a contenteditable element
this.pasteCatcher = document.createElement("div");
this.pasteCatcher.id = "screenshot-pastezone";
this.pasteCatcher.contentEditable = "true";
// Insert the content editable at the top to avoid scrolling down in the board view
this.pasteCatcher.style.opacity = 0;
this.pasteCatcher.style.position = "fixed";
this.pasteCatcher.style.top = 0;
this.pasteCatcher.style.right = 0;
this.pasteCatcher.style.width = 0;
document.body.insertBefore(this.pasteCatcher, document.body.firstChild);
// Set focus on the contenteditable element
this.pasteCatcher.focus();
// Set the focus when clicked anywhere in the document
document.addEventListener("click", this.setFocus.bind(this));
// Set the focus when clicked in screenshot dropzone (popover)
document.getElementById("screenshot-zone").addEventListener("click", this.setFocus.bind(this));
}
window.addEventListener("paste", this.pasteHandler.bind(this));
};
// Destroy contentEditable element
Kanboard.Screenshot.prototype.destroy = function() {
if (this.pasteCatcher != null) {
document.body.removeChild(this.pasteCatcher);
}
else if (document.getElementById("screenshot-pastezone")) {
document.body.removeChild(document.getElementById("screenshot-pastezone"));
}
document.removeEventListener("click", this.setFocus.bind(this));
this.pasteCatcher = null;
};
// Set focus on contentEditable element
Kanboard.Screenshot.prototype.setFocus = function() {
if (this.pasteCatcher !== null) {
this.pasteCatcher.focus();
}
};
// Paste event callback
Kanboard.Screenshot.prototype.pasteHandler = function(e) {
// Firefox doesn't have the property e.clipboardData.items (only Chrome)
if (e.clipboardData && e.clipboardData.items) {
var items = e.clipboardData.items;
if (items) {
for (var i = 0; i < items.length; i++) {
// Find an image in pasted elements
if (items[i].type.indexOf("image") !== -1) {
var blob = items[i].getAsFile();
// Get the image as base64 data
var reader = new FileReader();
var self = this;
reader.onload = function(event) {
self.createImage(event.target.result);
};
reader.readAsDataURL(blob);
}
}
}
}
else {
// Handle Firefox
setTimeout(this.checkInput.bind(this), 100);
}
};
// Parse the input in the paste catcher element
Kanboard.Screenshot.prototype.checkInput = function() {
var child = this.pasteCatcher.childNodes[0];
if (child) {
// If the user pastes an image, the src attribute
// will represent the image as a base64 encoded string.
if (child.tagName === "IMG") {
this.createImage(child.src);
}
}
this.pasteCatcher.innerHTML = "";
};
// Creates a new image from a given source
Kanboard.Screenshot.prototype.createImage = function(blob) {
var pastedImage = new Image();
pastedImage.src = blob;
// Send the image content to the form variable
pastedImage.onload = function() {
var sourceSplit = blob.split("base64,");
var sourceString = sourceSplit[1];
$("input[name=screenshot]").val(sourceString);
};
var zone = document.getElementById("screenshot-zone");
zone.innerHTML = "";
zone.className = "screenshot-pasted";
zone.appendChild(pastedImage);
this.destroy();
this.initialize();
};
|
const Sequelize = require('sequelize')
const db = require('../db')
const CheeseCart = db.define('CheeseCarts', {
quantity: {
type: Sequelize.INTEGER,
defaultValue: 0
},
shippingCost: {
type: Sequelize.INTEGER
}
})
module.exports = CheeseCart
|
"""
.. Necessary to reject for documentation building
* Copyright 2019 TIBCO Software Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); You may not use this file except
* in compliance with the License.
* A copy of the License is included in the distribution package with this file.
* You also may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* File name :channel.py
* Created on: 5/15/2019
* Created by: suresh
*
* SVN Id: $Id: channel.py 3256 2019-06-10 03:31:30Z ssubrama $
*
* This file encapsulates channel interfaces
"""
import enum
from typing import *
import abc
from tgdb.utils import *
import tgdb.pdu as tgpdu
import tgdb.exception as tgexception
class Status(enum.Enum):
"""Connection status to the server."""
Waiting = 0
Ok = 1
Pushed = 2
Resend = 3
Disconnected = 4,
Closed = 5
class LinkState(enum.Enum):
"""Socket link state."""
NotConnected = 0
Connected = 1
Closing = 2
Closed = 3
FailedOnSend = 4
FailedOnRecv = 5
FailedOnProcessing = 6
Reconnecting = 7
Terminated = 8
@classmethod
def fromId(cls, id):
for ls in LinkState:
if ls.value == id:
return ls
return LinkState.Terminated
class ResendMode(enum.Enum):
"""The resend mode when disconnected."""
DontReconnectAndIgnore = 0
ReconnectAndResend = 1
ReconnectAndRaiseException = 2
ReconnectAndIgnore = 3
class ProtocolType(enum.Enum):
"""Types of protocol."""
Tcp = 0
Ssl = 1
class TGChannelResponseWaiter(abc.ABC):
"""Keeps track of a single request/response from the server."""
class WaiterStatus(enum.Enum):
Waiting = 1
Ok = 2
Pushed = 3
Resend = 4
Disconnected = 5
Closed = 6
@property
@abc.abstractmethod
def isBlocking(self):
"""Is this waiter blocking?
:returns: True if this waiter blocks the thread, else False.
"""
@property
@abc.abstractmethod
def status(self) -> WaiterStatus:
"""Gets the status of this ChannelResponseWaiter"""
@property
@abc.abstractmethod
def requestid(self):
"""The request id corresponding to the response that this waiter is for."""
@abc.abstractmethod
def awaiting(self, status: WaiterStatus):
"""Thread-safe checking of a status."""
@property
@abc.abstractmethod
def reply(self):
"""Gets the thread-safe reply"""
@reply.setter
def reply(self, msg: tgpdu.TGMessage):
"""Thread safe reply setter"""
class TGChannelStopMethod(enum.Enum):
"""Way that the channel was stopped."""
Graceful = 0
ClientForceful = 1
RemoteKill = 2
class ExceptionHandleResult(enum.Enum):
"""How to handle exceptions."""
RethrowException = 0
RetryOperation = 1
Disconnected = 2
class TGSocket(abc.ABC):
"""Represents a single lower-level socket."""
@property
@abc.abstractmethod
def handle(self):
"""Gets the lower-level socket."""
@property
@abc.abstractmethod
def inboxaddr(self):
"""Gets the client-side address."""
@abc.abstractmethod
def connect(self):
"""Connect to the server."""
@abc.abstractmethod
def close(self):
"""Disconnect from the server."""
@abc.abstractmethod
def send(self, msg: tgpdu.TGMessage) -> int:
"""Send a message, returning the number of bytes sent."""
@abc.abstractmethod
def recvMsg(self) -> tgpdu.TGMessage:
"""Reads a message from."""
def tryRead(self) -> tgpdu.TGMessage:
"""Try to read a message.
:returns: None if no message was available, otherwise the message that was available.
"""
class TGChannel(abc.ABC):
"""Represents a request-reply session.
Handles all initialization, tear down, and request-reply.
"""
@property
@abc.abstractmethod
def linkstate(self) -> LinkState:
"""Gets whether the channel is connected, or any reason that it is not."""
@property
@abc.abstractmethod
def properties(self) -> TGProperties:
"""Gets the properties for this channel."""
@property
@abc.abstractmethod
def outboxaddr(self):
"""Gets the server-side address."""
@property
@abc.abstractmethod
def protocolversions(self) -> tuple:
"""Gets the protocol version for both the client and server."""
@property
@abc.abstractmethod
def authtoken(self) -> int:
"""Gets the authentication token that the server gave to us."""
@property
@abc.abstractmethod
def sessionid(self) -> int:
"""Gets the session id for this session."""
@property
@abc.abstractmethod
def clientid(self) -> str:
"""Gets this client's identifier."""
@abc.abstractmethod
def createSocket(self) -> TGSocket:
"""Creates and returns a default socket."""
@abc.abstractmethod
def connect(self):
"""Connect this channel to the server."""
@abc.abstractmethod
def start(self):
"""Starts the thread responsible for reading."""
@abc.abstractmethod
def disconnect(self):
"""Disconnect from the server."""
@abc.abstractmethod
def stop(self, stopmethod=TGChannelStopMethod.Graceful, msg=None):
"""Stop the listening thread."""
@abc.abstractmethod
def send(self, msg: tgpdu.TGMessage, response: TGChannelResponseWaiter = None) -> tgpdu.TGMessage:
"""Sends a message and waits for a response."""
@abc.abstractmethod
def readMessage(self) -> tgpdu.TGMessage:
"""Reads the message on the socket."""
@property
@abc.abstractmethod
def waiters(self) -> Dict[int, TGChannelResponseWaiter]:
"""The waiters currently active.
:returns: A dictionary with keys representing the request identifier and values of that channel response waiter.
"""
@abc.abstractmethod
def handleException(self, e: Exception) -> ExceptionHandleResult:
""" Handles the exception.
:param e: The exception to handle.
:return: Returns how the exception was handled.
"""
@property
def isClosed(self):
"""Whether the channel is closed.
:returns: True if the channel is closed.
"""
return self.linkstate in (LinkState.Closed, LinkState.Closing, LinkState.Terminated)
def inError(self):
"""Whether the channel is in an error."""
return self.linkstate in (LinkState.FailedOnSend, LinkState.FailedOnRecv, LinkState.FailedOnProcessing)
@property
def isConnected(self):
"""Whether the channel is connected."""
return self.linkstate == LinkState.Connected
@classmethod
def createChannel(cls, urlpath: str, username=None, password=None, dbName: str = None, props: TGProperties=None):
"""Creates a channel instance.
:param urlPath: The URL for the database to connect to. When running on localhost, it should look like
tcp://localhost:8222 for an insecure connection.
:param username: The user's name of the database to use to connect to the database.
:param password: The user's password for the database server.
:param dbName: The database name to connect to.
:param props: The properties to set up the connection with, contains any additional properties.
:returns: The connection object requested.
"""
klazz = None
chprops = TGProperties(ConfigName.asMap())
if props is not None:
chprops.update(props)
url = TGChannelUrl.parseUrl(urlpath)
chprops.update(url.properties)
if username is not None:
chprops[ConfigName.ChannelUserID] = username
if dbName is not None:
chprops[ConfigName.ConnectionDatabaseName] = dbName
if password is not None:
chprops[ConfigName.ChannelPassword] = password
if url.protocol == ProtocolType.Tcp:
klazz = Class.forName('tgdb.impl.channelimpl.TcpChannel')
elif url.protocol == ProtocolType.Ssl:
klazz = Class.forName('tgdb.impl.channelimpl.SslChannel')
else:
raise tgexception.TGException("Invalid Url specified")
return klazz(url, chprops)
class TGChannelUrl(abc.ABC):
"""Represents a parsed URL."""
@property
@abc.abstractmethod
def protocol(self) -> ProtocolType:
"""Gets the protol corresponding with this URL"""
@property
@abc.abstractmethod
def host(self) -> str:
"""The host for this URL."""
@property
@abc.abstractmethod
def user(self) -> str:
"""The user for this URL."""
@property
@abc.abstractmethod
def port(self) -> int:
"""The pot for this URL."""
@property
@abc.abstractmethod
def properties(self) -> dict:
"""Any properties specified."""
@property
@abc.abstractmethod
def url(self) -> str:
"""The full URL string."""
@classmethod
def parseUrl(cls, url):
"""Parse the URL and return a corresponding instance."""
import tgdb.impl.channelimpl as tgchannelimpl
return tgchannelimpl.LinkUrl.parse(url)
|
import React from 'react';
import Link from './Link';
const Header = () => {
return (
<div className="ui secondary pointing menu">
<Link href="/" className="item">
Accordion
</Link>
<Link href="/list" className="item">
Search
</Link>
<Link href="/dropdown" className="item">
Dropdown
</Link>
<Link href="/translate" className="item">
Translate
</Link>
</div>
);
};
export default Header;
|
module.exports = {
env:{
},
head: {
title: 'ROBOGRAM',
meta: [
{ charset: 'utf-8' },
{ name: 'viewport', content: 'width=device-width, initial-scale=1' },
{ hid: 'description', name: 'description', content: '' }
],
link: [
{ rel: 'icon', type: 'image/x-icon', href: '/favicon.ico' }
],
script: [
{ src: 'https://code.jquery.com/jquery-3.3.1.min.js' },
{ src: 'https://cdnjs.cloudflare.com/ajax/libs/velocity/2.0.5/velocity.min.js' },
{ src: 'https://unpkg.com/ionicons@4.1.2/dist/ionicons.js' }
]
},
loading: {
color: '#3B8070'
},
plugins: [
'~plugins/element-ui'
],
css: [
'element-ui/lib/theme-chalk/index.css'
],
modules: [
'@nuxtjs/pwa'
],
workbox: {
dev: true, //開発環境でもPWAできるように
},
manifest: {
name: 'ROBOGRAM',
short_name: 'ROBOGRAM',
title: 'ROBOGRAM',
'og:title': 'ROBOGRAM',
description: 'ROBOGRAM',
'og:description': 'ROBOGRAM',
lang: 'ja',
theme_color: '#192734',
background_color: '#192734'
},
router: {
scrollBehavior: function (to, from, savedPosition) {
let position = {}
if (to.matched.length < 2) {
position = { x: 0, y: 0 }
} else if (to.matched.some(r => r.components.default.options.scrollToTop)) {
position = { x: 0, y: 0 }
}
if (to.hash) {
position = { selector: to.hash }
}
return position
},
},
build: {
vendor: ['axios','element-ui'],
extend (config, { isDev, isClient }) {
if (isDev && isClient) {
config.module.rules.push({
enforce: 'pre',
test: /\.(js|vue)$/,
loader: 'eslint-loader',
exclude: /(node_modules)/
})
}
}
},
}
|
import numpy as np
import time
from mujoco_py import const, MjViewer, ignore_mujoco_warnings
import glfw
from gym.spaces import Box
from gym.spaces import MultiDiscrete
class EnvViewer(MjViewer):
def __init__(self, env):
self.env = env
self.elapsed = [0]
self.env.reset()
self.seed = self.env.seed()
super().__init__(self.env.unwrapped.sim)
self.num_action = self.env.action_space.shape[0]
self.action_mod_index = 0
self.action = self.zero_action(self.env.action_space)
def zero_action(self, action_space):
if isinstance(action_space, Box):
return np.zeros(action_space.shape[0])
elif isinstance(action_space, MultiDiscrete):
return action_space.nvec // 2 # assume middle element is "no action" action
def env_reset(self):
start = time.time()
# get the seed before calling env.reset(), so we display the one
# that was used for the reset.
self.seed = self.env.seed()
self.env.reset()
self.elapsed.append(time.time() - start)
self.update_sim(self.env.unwrapped.sim)
def key_callback(self, window, key, scancode, action, mods):
# Trigger on keyup only:
if action != glfw.RELEASE:
return
if key == glfw.KEY_ESCAPE:
self.env.close()
# Increment experiment seed
elif key == glfw.KEY_N:
self.seed[0] += 1
self.env.seed(self.seed)
self.env_reset()
self.action = self.zero_action(self.env.action_space)
# Decrement experiment trial
elif key == glfw.KEY_P:
self.seed = [max(self.seed[0] - 1, 0)]
self.env.seed(self.seed)
self.env_reset()
self.action = self.zero_action(self.env.action_space)
if key == glfw.KEY_A:
if isinstance(self.env.action_space, Box):
self.action[self.action_mod_index] -= 0.05
elif key == glfw.KEY_Z:
if isinstance(self.env.action_space, Box):
self.action[self.action_mod_index] += 0.05
elif key == glfw.KEY_K:
self.action_mod_index = (self.action_mod_index + 1) % self.num_action
elif key == glfw.KEY_J:
self.action_mod_index = (self.action_mod_index - 1) % self.num_action
super().key_callback(window, key, scancode, action, mods)
def render(self):
super().render()
# Display applied external forces.
self.vopt.flags[8] = 1
def run(self, once=False):
while True:
with ignore_mujoco_warnings():
self.env.step(self.action)
self.add_overlay(const.GRID_TOPRIGHT, "Reset env; (current seed: {})".format(self.seed), "N - next / P - previous ")
self.add_overlay(const.GRID_TOPRIGHT, "Apply action", "A (-0.05) / Z (+0.05)")
self.add_overlay(const.GRID_TOPRIGHT, "on action index %d out %d" % (self.action_mod_index, self.num_action), "J / K")
self.add_overlay(const.GRID_BOTTOMRIGHT, "Reset took", "%.2f sec." % (sum(self.elapsed) / len(self.elapsed)))
self.add_overlay(const.GRID_BOTTOMRIGHT, "Action", str(self.action))
self.render()
if once:
return
|
import axios from 'axios';
import { useContext, useEffect, useState } from 'react';
import { Button, Card, Container, Form, Modal } from 'react-bootstrap';
import { useHistory, useParams } from 'react-router';
import { JacuzziContext } from '../../contexts/JacuzziContext';
import { ReviewInvContext } from '../../contexts/ReviewInvContext';
import StarRating from '../StarRating';
import UseForm from '../webpanel/UseForm';
const AddReview = () => {
const { id } = useParams();
const { invites } = useContext(ReviewInvContext);
const { jacuzzis, updateData, onSuccess } = useContext(JacuzziContext);
const [validInv, setValid] = useState(false);
const [rating, setRating] = useState(0);
const [show, setShow] = useState(false);
const { handleChange, setValues, values } = UseForm({});
let history = useHistory();
useEffect(() => {
const arrInv = invites.find((inv) => inv._id === id);
if (arrInv) {
setValid(true);
const jacuzzi = jacuzzis.find((item) => item._id === arrInv.product);
setValues({ ...jacuzzi });
}
}, [invites, jacuzzis]);
const handleUpdate = () => {
const review = {
author: values.author,
header: values.header,
text: values.text,
rating: rating,
};
setValues({ ...values, userReviews: [...values.userReviews, review] });
setShow(true);
};
const handleRequest = () => {
axios
.delete(`/reviewinvites/${id}`)
.then((res) => console.log(res.data))
.catch((err) => console.log(err));
updateData(values);
};
useEffect(
() => {
if (onSuccess) history.push(`/spabad/${values._id}`);
},
[onSuccess],
[]
);
return (
<Container>
{validInv ? (
<Card>
<Card.Body>
<Card.Title>Opprett ny anmeldelse</Card.Title>
<Form>
<Form.Group>
<Form.Label>Navn</Form.Label>
<Form.Control
placeholder="Anonym/Ole Nordmann"
onChange={handleChange}
name="author"
/>
</Form.Group>
<Form.Group>
<Form.Label>Tittel</Form.Label>
<Form.Control
placeholder="Perfekt for meg/Skuffende opplevelse"
onChange={handleChange}
name="header"
/>
</Form.Group>
<Form.Group>
<Form.Label>Hva syntes du om produktet?</Form.Label>
<textarea
className="form-control"
rows={5}
onChange={handleChange}
name="text"
/>
</Form.Group>
</Form>
<Form.Group>
<Form.Label>Stjerne rangering</Form.Label>
<div>
<Button
onClick={() => setRating(rating !== 0 ? rating - 1 : rating)}>
-
</Button>
<StarRating rating={rating} size={2} />
<Button
onClick={() => setRating(rating !== 5 ? rating + 1 : rating)}>
+
</Button>
</div>
</Form.Group>
</Card.Body>
<Card.Footer>
<Button onClick={handleUpdate}>Send inn</Button>
</Card.Footer>
<Modal show={show} onHide={() => setShow(false)} centered>
<Modal.Header closeButton>
<Modal.Title>Modal heading</Modal.Title>
</Modal.Header>
<Modal.Body>
Du er nå i ferd med å sende inn din anmeldelse. Når du først har
sendt din anmeldelse kan du ikke endre på den.
</Modal.Body>
<Modal.Footer>
<Button variant="warning" onClick={() => setShow(false)}>
avbryt
</Button>
<Button variant="primary" onClick={handleRequest}>
Send inn
</Button>
<Card.Text className="text-success">
{onSuccess ? 'Anmeldelse sendt inn' : null}
</Card.Text>
</Modal.Footer>
</Modal>
</Card>
) : (
<h1>Denne linken er ikke lenger gylid eller er allerede brukt</h1>
)}
</Container>
);
};
export default AddReview;
|
/*
* Re-structure JS
* */
(function($) {
'use strict';
/*
* Helper vars
* */
/*
* Helper functions
* */
function thim_get_url_parameters(sParam) {
var sPageURL = window.location.search.substring(1);
var sURLVariables = sPageURL.split('&');
for (var i = 0; i < sURLVariables.length; i++) {
var sParameterName = sURLVariables[i].split('=');
if (sParameterName[0] === sParam) {
return sParameterName[1];
}
}
}
var thim_eduma = {
ready: function() {
this.register_ajax();
this.login_ajax();
this.login_form_popup();
this.form_submission_validate();
this.thim_TopHeader();
this.ctf7_input_effect();
this.thim_course_filter();
this.mobile_menu_toggle();
},
load: function() {
this.thim_menu();
this.thim_carousel();
this.thim_contentslider();
this.counter_box();
},
resize: function() {
},
validate_form: function(form) {
var valid = true,
email_valid = /[A-Z0-9._%+-]+@[A-Z0-9.-]+.[A-Z]{2,4}/igm;
form.find('input.required').each(function() {
// Check empty value
if (!$(this).val()) {
$(this).addClass('invalid');
valid = false;
}
// Uncheck
if ($(this).is(':checkbox') && !$(this).is(':checked')) {
$(this).addClass('invalid');
valid = false;
}
// Check email format
if ('email' === $(this).attr('type')) {
if (!email_valid.test($(this).val())) {
$(this).addClass('invalid');
valid = false;
}
}
// Check captcha
if ($(this).hasClass('captcha-result')) {
let captcha_1 = parseInt($(this).data('captcha1')),
captcha_2 = parseInt($(this).data('captcha2'));
if ((captcha_1 + captcha_2) !== parseInt($(this).val())) {
$(this).addClass('invalid').val('');
valid = false;
}
}
});
// Check repeat password
if (form.hasClass('auto_login')) {
let $pw = form.find('input[name=password]'),
$repeat_pw = form.find('input[name=repeat_password]');
if ($pw.val() !== $repeat_pw.val()) {
$pw.addClass('invalid');
$repeat_pw.addClass('invalid');
valid = false;
}
}
$('form input.required').on('focus', function() {
$(this).removeClass('invalid');
});
return valid;
},
login_form_popup: function() {
$(document).on('click', 'body:not(".loggen-in") .thim-button-checkout',
function(e) {
if ($(window).width() > 767) {
e.preventDefault();
if ($('#thim-popup-login').length) {
$('body').addClass('thim-popup-active');
$('#thim-popup-login').addClass('active');
} else {
var redirect = $(this).data('redirect');
window.location = redirect;
}
} else {
e.preventDefault();
var redirect = $(this).data('redirect');
window.location = redirect;
}
});
$(document).on('click', '#thim-popup-login .close-popup', function(event) {
event.preventDefault();
$('body').removeClass('thim-popup-active');
$('#thim-popup-login').removeClass();
});
$('body .thim-login-popup a.js-show-popup').on('click', function(event) {
event.preventDefault();
let $popup = $('#thim-popup-login');
$('body').addClass('thim-popup-active');
$popup.addClass('active');
if ($(this).hasClass('login')) {
$popup.addClass('sign-in');
} else {
$popup.addClass('sign-up');
}
});
$('#thim-popup-login .link-bottom a').on('click', function(e) {
e.preventDefault();
if ($(this).hasClass('login')) {
$('#thim-popup-login').removeClass('sign-up').addClass('sign-in');
} else {
$('#thim-popup-login').removeClass('sign-in').addClass('sign-up');
}
});
// Show login popup when click to LP buttons
$('body:not(".logged-in") .enroll-course .button-enroll-course, body:not(".logged-in") form.purchase-course:not(".guest_checkout") .button:not(.button-add-to-cart)').
on('click', function(e) {
e.preventDefault();
// if ($(window).width() > 1024) {
if ($('body').hasClass('thim-popup-feature')) {
$('.thim-link-login.thim-login-popup .login').trigger('click');
} else {
window.location.href = $(this).parent().find('input[name=redirect_to]').val();
}
// } else {
// window.location.href = $(this).parent().find('input[name=redirect_to]').val();
// }
});
$(document).on('click', '#thim-popup-login', function(e) {
if ($(e.target).attr('id') === 'thim-popup-login') {
$('body').removeClass('thim-popup-active');
$('#thim-popup-login').removeClass();
}
});
},
register_ajax: function() {
$('#thim-popup-login form[name=registerformpopup]').on('submit', function(e) {
e.preventDefault();
if (!thim_eduma.validate_form($(this))) {
return false;
}
var $form = $(this),
data = {
action : 'thim_register_ajax',
data : $form.serialize() + '&wp-submit=' +
$form.find('input[type=submit]').val(),
register_security: $form.find('#register_security').
val(),
},
redirect_url = $form.find('input[name=redirect_to]').val(),
$elem = $('#thim-popup-login .thim-login-container');
$elem.addClass('loading');
$elem.find('.message').slideDown().remove();
$.ajax({
type : 'POST',
url : ajaxurl,
data : data,
success: function(response) {
$elem.removeClass('loading');
if (typeof response.data !== 'undefined') {
$elem.find('.popup-message').html(response.data.message);
}
if (response.success === true) {
if ($form.hasClass('auto_login')) {
window.location.href = redirect_url;
}
} else {
var $captchaIframe = $('#thim-popup-login .gglcptch iframe');
if ($captchaIframe.length > 0) {
$captchaIframe.attr('src', $captchaIframe.attr('src')); // reload iframe
}
}
},
});
});
},
login_ajax: function() {
$('#thim-popup-login form[name=loginpopopform]').submit(function(event) {
event.preventDefault();
if (!thim_eduma.validate_form($(this))) {
return false;
}
var form = $(this),
$elem = $('#thim-popup-login .thim-login-container'),
wp_submit = $elem.find('input[type=submit]').val();
$elem.addClass('loading');
$elem.find('.message').slideDown().remove();
var data = {
action: 'thim_login_ajax',
data : form.serialize() + '&wp-submit=' + wp_submit,
};
$.post(ajaxurl, data, function(response) {
try {
response = JSON.parse(response);
$elem.find('.thim-login').append(response.message);
if (response.code == '1') {
if (response.redirect) {
if (window.location.href == response.redirect) {
location.reload();
} else {
window.location.href = response.redirect;
}
} else {
location.reload();
}
} else {
var $captchaIframe = $('#thim-popup-login .gglcptch iframe');
if ($captchaIframe.length > 0) {
$captchaIframe.attr('src', $captchaIframe.attr('src')); // reload iframe
}
}
} catch (e) {
return false;
}
$elem.removeClass('loading');
});
return false;
});
},
form_submission_validate: function() {
// Form login
$('.form-submission-login form[name=loginform]').on('submit', function(e) {
if (!thim_eduma.validate_form($(this))) {
e.preventDefault();
return false;
}
});
// Form register
$('.form-submission-register form[name=registerform]').on('submit', function(e) {
if (!thim_eduma.validate_form($(this))) {
e.preventDefault();
return false;
}
});
// Form lost password
$('.form-submission-lost-password form[name=lostpasswordform]').on('submit', function(e) {
if (!thim_eduma.validate_form($(this))) {
e.preventDefault();
return false;
}
});
},
thim_TopHeader: function() {
var header = $('#masthead'),
height_sticky_header = header.outerHeight(true),
content_pusher = $('#wrapper-container .content-pusher'),
top_site_main = $('#wrapper-container .top_site_main');
if (header.hasClass('header_overlay')) { // Header overlay
top_site_main.css({'padding-top': height_sticky_header + 'px'});
$(window).resize(function() {
let height_sticky_header = header.outerHeight(true);
top_site_main.css({'padding-top': height_sticky_header + 'px'});
});
} else { // Header default
content_pusher.css({'padding-top': height_sticky_header + 'px'});
$(window).resize(function() {
let height_sticky_header = header.outerHeight(true);
content_pusher.css({'padding-top': height_sticky_header + 'px'});
});
}
},
ctf7_input_effect: function() {
let $ctf7_edtech = $('.form_developer_course'),
$item_input = $ctf7_edtech.find('.field_item input'),
$submit_wrapper = $ctf7_edtech.find('.submit_row');
$item_input.focus(function() {
$(this).parent().addClass('focusing');
}).blur(function() {
$(this).parent().removeClass('focusing');
});
$submit_wrapper.on('click', function() {
$(this).closest('form').submit();
});
},
thim_course_filter: function() {
let $body = $('body');
if (!$body.hasClass('learnpress') || !$body.hasClass('archive')) {
return;
}
let ajaxCall = function(data) {
return $.ajax({
url : $('#lp-archive-courses').data('allCoursesUrl'), //using for course category page
type : 'POST',
data : data,
dataType : 'html',
beforeSend: function() {
$('#thim-course-archive').addClass('loading');
},
}).fail(function() {
$('#thim-course-archive').removeClass('loading');
}).done(function(data) {
/*if (typeof history.pushState === 'function') {
history.pushState(orderby, null, url);
}*/
let $document = $($.parseHTML(data));
$('#thim-course-archive').replaceWith($document.find('#thim-course-archive'));
$('.learn-press-pagination ul.page-numbers').
replaceWith($document.find('.learn-press-pagination ul.page-numbers'));
$('.thim-course-top .course-index span').
replaceWith($document.find('.thim-course-top .course-index span'));
});
};
let sendData = {
s : '',
ref : 'course',
post_type : 'lp_course',
course_orderby: 'newly-published',
course_paged : 1,
};
/*
* Handle courses sort ajax
* */
$(document).on('change', '.thim-course-order > select', function() {
sendData.s = $('.courses-searching .course-search-filter').val();
sendData.course_orderby = $(this).val();
sendData.course_paged = 1;
ajaxCall(sendData);
});
/*
* Handle pagination ajax
* */
$(document).on('click', '#lp-archive-courses > .learn-press-pagination a.page-numbers', function(e) {
e.preventDefault();
$('html, body').animate({
'scrollTop': $('.site-content').offset().top - 140,
}, 1000);
let pageNum = parseInt($(this).text()),
paged = pageNum ? pageNum : 1,
cateArr = [], instructorArr = [],
cpage = $('.learn-press-pagination.navigation.pagination ul.page-numbers li span.page-numbers.current').text(),
isNext = $(this).hasClass('next') && $(this).hasClass('page-numbers'),
isPrev = $(this).hasClass('prev') && $(this).hasClass('page-numbers');
if(!pageNum){
if(isNext){
paged = parseInt(cpage)+1;
}
if(isPrev){
paged = parseInt(cpage)-1;
}
}
$('form.thim-course-filter').find('input.filtered').each(function() {
switch ($(this).attr('name')) {
case 'course-cate-filter':
cateArr.push($(this).val());
break;
case 'course-instructor-filter':
instructorArr.push($(this).val());
break;
case 'course-price-filter':
sendData.course_price_filter = $(this).val();
break;
default:
break;
}
});
if ($body.hasClass('category') && $('.list-cate-filter').length <= 0) {
let bodyClass = $body.attr('class'),
cateClass = bodyClass.match(/category\-\d+/gi)[0],
cateID = cateClass.split('-').pop();
cateArr.push(cateID);
}
sendData.course_cate_filter = cateArr;
sendData.course_instructor_filter = instructorArr;
sendData.s = $('.courses-searching .course-search-filter').val();
sendData.course_orderby = $('.thim-course-order > select').val();
sendData.course_paged = paged;
ajaxCall(sendData);
});
/*
* Handle filter form click ajax
* */
$('form.thim-course-filter').on('submit', function(e) {
e.preventDefault();
let formData = $(this).serializeArray(),
cateArr = [], instructorArr = [];
if (!formData.length) {
return;
}
$('html, body').animate({
'scrollTop': $('.site-content').offset().top - 140,
}, 1000);
$(this).find('input').each(function() {
let form_input = $(this);
form_input.removeClass('filtered');
if (form_input.is(':checked')) {
form_input.addClass('filtered');
}
});
$.each(formData, function(index, filter) {
switch (filter.name) {
case 'course-cate-filter':
cateArr.push(filter.value);
break;
case 'course-instructor-filter':
instructorArr.push(filter.value);
break;
case 'course-price-filter':
sendData.course_price_filter = filter.value;
break;
default:
break;
}
});
if ($body.hasClass('category') && $('.list-cate-filter').length <= 0) {
let bodyClass = $body.attr('class'),
cateClass = bodyClass.match(/category\-\d+/gi)[0],
cateID = cateClass.split('-').pop();
cateArr.push(cateID);
}
sendData.course_cate_filter = cateArr;
sendData.course_instructor_filter = instructorArr;
sendData.course_paged = 1;
ajaxCall(sendData);
});
},
mobile_menu_toggle: function() {
$(document).on('click', '.menu-mobile-effect', function(e) {
e.stopPropagation();
$('body').toggleClass('mobile-menu-open');
});
$(document).on('click', '.mobile-menu-wrapper', function(e) {
$('body').removeClass('mobile-menu-open');
});
$(document).on('click', '.mobile-menu-inner', function(e) {
e.stopPropagation();
});
},
thim_menu: function() {
//Add class for masthead
var $header = $('#masthead.sticky-header'),
off_Top = ($('.content-pusher').length > 0) ? $('.content-pusher').
offset().top : 0,
menuH = $header.outerHeight(),
latestScroll = 0;
if ($(window).scrollTop() > 2) {
$header.removeClass('affix-top').addClass('affix');
}
$(window).scroll(function() {
var current = $(this).scrollTop();
if (current > 2) {
$header.removeClass('affix-top').addClass('affix');
} else {
$header.removeClass('affix').addClass('affix-top');
}
if (current > latestScroll && current > menuH + off_Top) {
if (!$header.hasClass('menu-hidden')) {
$header.addClass('menu-hidden');
}
} else {
if ($header.hasClass('menu-hidden')) {
$header.removeClass('menu-hidden');
}
}
latestScroll = current;
});
//Submenu position
$('.wrapper-container:not(.mobile-menu-open) .site-header .navbar-nav > .menu-item').each(function() {
if ($('>.sub-menu', this).length <= 0) {
return;
}
let elm = $('>.sub-menu', this),
off = elm.offset(),
left = off.left,
width = elm.width();
let navW = $('.thim-nav-wrapper').width(),
isEntirelyVisible = (left + width <= navW);
if (!isEntirelyVisible) {
elm.addClass('dropdown-menu-right');
} else {
let subMenu2 = elm.find('>.menu-item>.sub-menu');
if (subMenu2.length <= 0) {
return;
}
let off = subMenu2.offset(),
left = off.left,
width = subMenu2.width();
let isEntirelyVisible = (left + width <= navW);
if (!isEntirelyVisible) {
elm.addClass('dropdown-left-side');
}
}
});
//Show submenu when hover
// var $menuItem = $(
// '.wrapper-container:not(.mobile-menu-open) .site-header .navbar-nav >li,' +
// '.wrapper-container:not(.mobile-menu-open) .site-header .navbar-nav li,' +
// '.site-header .navbar-nav li ul li');
//
// $menuItem.on({
// 'mouseenter': function() {
// $(this).children('.sub-menu').stop(true, false).fadeIn(250);
// },
// 'mouseleave': function() {
// $(this).
// children('.sub-menu').
// stop(true, false).
// fadeOut(250);
// },
// });
let $headerLayout = $('header#masthead');
let magicLine = function() {
if ($(window).width() > 768) {
//Magic Line
var menu_active = $(
'#masthead .navbar-nav>li.menu-item.current-menu-item,#masthead .navbar-nav>li.menu-item.current-menu-parent, #masthead .navbar-nav>li.menu-item.current-menu-ancestor');
if (menu_active.length > 0) {
menu_active.before('<span id="magic-line"></span>');
var menu_active_child = menu_active.find(
'>a,>span.disable_link,>span.tc-menu-inner'),
menu_left = menu_active.position().left,
menu_child_left = parseInt(menu_active_child.css('padding-left')),
magic = $('#magic-line');
magic.width(menu_active_child.width()).
css('left', Math.round(menu_child_left + menu_left)).
data('magic-width', magic.width()).
data('magic-left', magic.position().left);
} else {
var first_menu = $(
'#masthead .navbar-nav>li.menu-item:first-child');
first_menu.before('<span id="magic-line"></span>');
var magic = $('#magic-line');
magic.data('magic-width', 0);
}
var nav_H = parseInt($('.site-header .navigation').outerHeight());
magic.css('bottom', nav_H - (nav_H - 90) / 2 - 64);
$('#masthead .navbar-nav>li.menu-item').on({
'mouseenter': function() {
var elem = $(this).
find('>a,>span.disable_link,>span.tc-menu-inner'),
new_width = elem.width(),
parent_left = elem.parent().position().left,
left = parseInt(elem.css('padding-left'));
if (!magic.data('magic-left')) {
magic.css('left', Math.round(parent_left + left));
magic.data('magic-left', 'auto');
}
magic.stop().animate({
left : Math.round(parent_left + left),
width: new_width,
});
},
'mouseleave': function() {
magic.stop().animate({
left : magic.data('magic-left'),
width: magic.data('magic-width'),
});
},
});
}
};
if (!$headerLayout.hasClass('header_v4')) {
magicLine();
}
var subMenuPosition = function(menuItem) {
var $menuItem = menuItem,
$container = $menuItem.closest('.container, .header_full'),
$subMenu = $menuItem.find('>.sub-menu'),
$menuItemWidth = $menuItem.width(),
$containerWidth = $container.width(),
$subMenuWidth = $subMenu.width(),
$subMenuDistance = $subMenuWidth / 2,
paddingContainer = 15;
};
},
thim_carousel: function($scope) {
if (jQuery().owlCarousel) {
$('.thim-gallery-images').owlCarousel({
autoPlay : false,
singleItem : true,
stopOnHover: true,
pagination : true,
autoHeight : false,
});
$('.thim-carousel-wrapper').each(function() {
var item_visible = $(this).data('visible') ? parseInt(
$(this).data('visible')) : 4,
item_desktopsmall = $(this).data('desktopsmall') ? parseInt(
$(this).data('desktopsmall')) : item_visible,
itemsTablet = $(this).data('itemtablet') ? parseInt(
$(this).data('itemtablet')) : 2,
itemsMobile = $(this).data('itemmobile') ? parseInt(
$(this).data('itemmobile')) : 1,
pagination = !!$(this).data('pagination'),
navigation = !!$(this).data('navigation'),
autoplay = $(this).data('autoplay') ? parseInt(
$(this).data('autoplay')) : false,
navigation_text = ($(this).data('navigation-text') &&
$(this).data('navigation-text') === '2') ? [
'<i class=\'fa fa-long-arrow-left \'></i>',
'<i class=\'fa fa-long-arrow-right \'></i>',
] : [
'<i class=\'fa fa-chevron-left \'></i>',
'<i class=\'fa fa-chevron-right \'></i>',
];
$(this).owlCarousel({
items : item_visible,
itemsDesktop : [1200, item_visible],
itemsDesktopSmall: [1024, item_desktopsmall],
itemsTablet : [768, itemsTablet],
itemsMobile : [480, itemsMobile],
navigation : navigation,
pagination : pagination,
lazyLoad : true,
autoPlay : autoplay,
navigationText : navigation_text,
});
});
$('.thim-carousel-course-categories .thim-course-slider, .thim-carousel-course-categories-tabs .thim-course-slider').
each(function() {
var item_visible = $(this).data('visible') ? parseInt($(this).data('visible')) : 7,
item_desktop = $(this).data('desktop') ? parseInt($(this).data('desktop')) : 7,
item_desktopsmall = $(this).data('desktopsmall')
? parseInt($(this).data('desktopsmall'))
: 6,
item_tablet = $(this).data('tablet') ? parseInt($(this).data('tablet')) : 4,
item_mobile = $(this).data('mobile') ? parseInt($(this).data('mobile')) : 2,
pagination = !!$(this).data('pagination'),
navigation = !!$(this).data('navigation'),
autoplay = $(this).data('autoplay') ? parseInt($(this).data('autoplay')) : false,
is_rtl = $('body').hasClass('rtl');
$(this).owlCarousel({
items : item_visible,
itemsDesktop : [1800, item_desktop],
itemsDesktopSmall: [1024, item_desktopsmall],
itemsTablet : [768, item_tablet],
itemsMobile : [480, item_mobile],
navigation : navigation,
pagination : pagination,
autoPlay : autoplay,
navigationText : [
'<i class=\'fa fa-chevron-left \'></i>',
'<i class=\'fa fa-chevron-right \'></i>',
],
});
});
}
},
thim_contentslider: function($scope) {
$('.thim-testimonial-slider').each(function() {
var elem = $(this),
item_visible = parseInt(elem.data('visible')),
item_time = parseInt(elem.data('time')),
autoplay = elem.data('auto') ? true : false,
item_ratio = elem.data('ratio') ? elem.data('ratio') : 1.18,
item_padding = elem.data('padding') ? elem.data('padding') : 15,
item_activepadding = elem.data('activepadding') ? elem.data(
'activepadding') : 0,
item_width = elem.data('width') ? elem.data('width') : 100,
mousewheel = !!elem.data('mousewheel');
var testimonial_slider = $(this).thimContentSlider({
items : elem,
itemsVisible : item_visible,
mouseWheel : mousewheel,
autoPlay : autoplay,
pauseTime : item_time,
itemMaxWidth : item_width,
itemMinWidth : item_width,
activeItemRatio : item_ratio,
activeItemPadding: item_activepadding,
itemPadding : item_padding,
});
});
},
counter_box: function() {
if (jQuery().waypoint) {
jQuery('.counter-box').waypoint(function() {
jQuery(this).find('.display-percentage').each(function() {
var percentage = jQuery(this).data('percentage');
jQuery(this).
countTo({
from : 0,
to : percentage,
refreshInterval: 40,
speed : 2000,
});
});
}, {
triggerOnce: true,
offset : '80%',
});
}
}
};
$(document).ready(function() {
thim_eduma.ready();
});
$(window).load(function() {
thim_eduma.load();
});
$(window).resize(function() {
thim_eduma.resize();
});
$(window).on('elementor/frontend/init', function() {
elementorFrontend.hooks.addAction('frontend/element_ready/thim-carousel-post.default',
thim_eduma.thim_carousel);
elementorFrontend.hooks.addAction('frontend/element_ready/thim-courses.default',
thim_eduma.thim_carousel);
elementorFrontend.hooks.addAction('frontend/element_ready/thim-course-categories.default',
thim_eduma.thim_carousel);
elementorFrontend.hooks.addAction('frontend/element_ready/thim-our-team.default',
thim_eduma.thim_carousel);
elementorFrontend.hooks.addAction('frontend/element_ready/thim-gallery-images.default',
thim_eduma.thim_carousel);
elementorFrontend.hooks.addAction('frontend/element_ready/thim-list-instructors.default',
thim_eduma.thim_carousel);
elementorFrontend.hooks.addAction('frontend/element_ready/thim-testimonials.default',
thim_eduma.thim_contentslider);
elementorFrontend.hooks.addAction('frontend/element_ready/thim-counters-box.default',
thim_eduma.counter_box);
});
})(jQuery);
|
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _typeof3 = require("@babel/runtime/helpers/typeof");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = useMergedConfig;
var _typeof2 = _interopRequireDefault(require("@babel/runtime/helpers/typeof"));
var _extends2 = _interopRequireDefault(require("@babel/runtime/helpers/extends"));
var React = _interopRequireWildcard(require("react"));
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || _typeof3(obj) !== "object" && typeof obj !== "function") { return { "default": obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj["default"] = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function useMergedConfig(propConfig, templateConfig) {
return React.useMemo(function () {
var support = !!propConfig;
return [support, (0, _extends2["default"])((0, _extends2["default"])({}, templateConfig), support && (0, _typeof2["default"])(propConfig) === 'object' ? propConfig : null)];
}, [propConfig]);
}
|
//Loads app with config.js
define(['config'], function() {
require(['app']);
})
|
import unittest
import copy
import numpy
import scipy.linalg
import pyscf
from pyscf import semiempirical
class KnownValues(unittest.TestCase):
def test_rmindo(self):
mol = pyscf.M(atom=[(8,(0,0,0)),(1,(1.,0,0)),(1,(0,1.,0))])
mf = semiempirical.RMINDO3(mol).run(conv_tol=1e-6)
self.assertAlmostEqual(mf.e_heat_formation, -48.82621264564841, 6)
mol = pyscf.M(atom=[(6,(0,0,0)),(1,(1.,0,0)),(1,(0,1.,0)),
(1,(0,0,1.)),(1,(0,0,-1.))])
mf = semiempirical.RMINDO3(mol).run(conv_tol=1e-6)
self.assertAlmostEqual(mf.e_heat_formation, 75.76019731515225, 6)
def test_umindo(self):
mol = pyscf.M(atom=[(8,(0,0,0)),(1,(1.,0,0))], spin=1)
mf = semiempirical.UMINDO3(mol).run(conv_tol=1e-6)
self.assertAlmostEqual(mf.e_heat_formation, 18.08247965492137)
if __name__ == "__main__":
print("Full Tests for addons")
unittest.main()
|
const defaultTheme = require('tailwindcss/defaultTheme')
const colors = require('tailwindcss/colors')
module.exports = {
experimental: {
optimizeUniversalDefaults: true,
},
content: [
'./pages/**/*.js',
'./components/**/*.js',
'./layouts/**/*.js',
'./lib/**/*.js',
'./data/**/*.mdx',
],
darkMode: 'class',
theme: {
extend: {
spacing: {
'9/16': '56.25%',
},
lineHeight: {
11: '2.75rem',
12: '3rem',
13: '3.25rem',
14: '3.5rem',
},
fontFamily: {
sans: ['InterVariable', ...defaultTheme.fontFamily.sans],
},
colors: {
primary: colors.teal,
gray: colors.neutral,
purplePrimary: '#2400FF',
},
typography: (theme) => ({
DEFAULT: {
css: {
color: theme('colors.gray.700'),
a: {
color: theme('colors.primary.500'),
'&:hover': {
color: `${theme('colors.primary.600')} !important`,
},
code: { color: theme('colors.primary.400') },
},
h1: {
fontWeight: '700',
letterSpacing: theme('letterSpacing.tight'),
color: theme('colors.gray.900'),
},
h2: {
fontWeight: '700',
letterSpacing: theme('letterSpacing.tight'),
color: theme('colors.gray.900'),
},
h3: {
fontWeight: '600',
color: theme('colors.gray.900'),
},
'h4,h5,h6': {
color: theme('colors.gray.900'),
},
pre: {
backgroundColor: theme('colors.gray.800'),
},
code: {
color: theme('colors.pink.500'),
backgroundColor: theme('colors.gray.100'),
paddingLeft: '4px',
paddingRight: '4px',
paddingTop: '2px',
paddingBottom: '2px',
borderRadius: '0.25rem',
},
'code::before': {
content: 'none',
},
'code::after': {
content: 'none',
},
details: {
backgroundColor: theme('colors.gray.100'),
paddingLeft: '4px',
paddingRight: '4px',
paddingTop: '2px',
paddingBottom: '2px',
borderRadius: '0.25rem',
},
hr: { borderColor: theme('colors.gray.200') },
'ol li::marker': {
fontWeight: '600',
color: theme('colors.gray.500'),
},
'ul li::marker': {
backgroundColor: theme('colors.gray.500'),
},
strong: { color: theme('colors.gray.600') },
blockquote: {
color: theme('colors.gray.900'),
borderLeftColor: theme('colors.gray.200'),
},
},
},
dark: {
css: {
color: theme('colors.gray.300'),
a: {
color: theme('colors.primary.500'),
'&:hover': {
color: `${theme('colors.primary.400')} !important`,
},
code: { color: theme('colors.primary.400') },
},
h1: {
fontWeight: '700',
letterSpacing: theme('letterSpacing.tight'),
color: theme('colors.gray.100'),
},
h2: {
fontWeight: '700',
letterSpacing: theme('letterSpacing.tight'),
color: theme('colors.gray.100'),
},
h3: {
fontWeight: '600',
color: theme('colors.gray.100'),
},
'h4,h5,h6': {
color: theme('colors.gray.100'),
},
pre: {
backgroundColor: theme('colors.gray.800'),
},
code: {
backgroundColor: theme('colors.gray.800'),
},
details: {
backgroundColor: theme('colors.gray.800'),
},
hr: { borderColor: theme('colors.gray.700') },
'ol li::marker': {
fontWeight: '600',
color: theme('colors.gray.400'),
},
'ul li::marker': {
backgroundColor: theme('colors.gray.400'),
},
strong: { color: theme('colors.gray.100') },
thead: {
th: {
color: theme('colors.gray.100'),
},
},
tbody: {
tr: {
borderBottomColor: theme('colors.gray.700'),
},
},
blockquote: {
color: theme('colors.gray.100'),
borderLeftColor: theme('colors.gray.700'),
},
},
},
}),
},
},
plugins: [require('@tailwindcss/forms'), require('@tailwindcss/typography')],
}
|
/*! Copyright (c) 2013 Brandon Aaron (http://brandonaaron.net)
* Licensed under the MIT License (LICENSE.txt).
*
* Thanks to: http://adomas.org/javascript-mouse-wheel/ for some pointers.
* Thanks to: Mathias Bank(http://www.mathias-bank.de) for a scope bug fix.
* Thanks to: Seamus Leahy for adding deltaX and deltaY
*
* Version: 3.1.3
*
* Requires: 1.2.2+
*/
LBF.define('util.mouseWheel', function(require){
var $ = require('lib.jQuery');
(function ($) {
var toFix = ['wheel', 'mousewheel', 'DOMMouseScroll', 'MozMousePixelScroll'];
var toBind = 'onwheel' in document || document.documentMode >= 9 ? ['wheel'] : ['mousewheel', 'DomMouseScroll', 'MozMousePixelScroll'];
var lowestDelta, lowestDeltaXY;
if ( $.event.fixHooks ) {
for ( var i = toFix.length; i; ) {
$.event.fixHooks[ toFix[--i] ] = $.event.mouseHooks;
}
}
$.event.special.mousewheel = {
setup: function() {
if ( this.addEventListener ) {
for ( var i = toBind.length; i; ) {
this.addEventListener( toBind[--i], handler, false );
}
} else {
this.onmousewheel = handler;
}
},
teardown: function() {
if ( this.removeEventListener ) {
for ( var i = toBind.length; i; ) {
this.removeEventListener( toBind[--i], handler, false );
}
} else {
this.onmousewheel = null;
}
}
};
$.fn.extend({
mousewheel: function(fn) {
return fn ? this.bind("mousewheel", fn) : this.trigger("mousewheel");
},
unmousewheel: function(fn) {
return this.unbind("mousewheel", fn);
}
});
function handler(event) {
var orgEvent = event || window.event,
args = [].slice.call(arguments, 1),
delta = 0,
deltaX = 0,
deltaY = 0,
absDelta = 0,
absDeltaXY = 0,
fn;
event = $.event.fix(orgEvent);
event.type = "mousewheel";
// Old school scrollwheel delta
if ( orgEvent.wheelDelta ) { delta = orgEvent.wheelDelta; }
if ( orgEvent.detail ) { delta = orgEvent.detail * -1; }
// New school wheel delta (wheel event)
if ( orgEvent.deltaY ) {
deltaY = orgEvent.deltaY * -1;
delta = deltaY;
}
if ( orgEvent.deltaX ) {
deltaX = orgEvent.deltaX;
delta = deltaX * -1;
}
// Webkit
if ( orgEvent.wheelDeltaY !== undefined ) { deltaY = orgEvent.wheelDeltaY; }
if ( orgEvent.wheelDeltaX !== undefined ) { deltaX = orgEvent.wheelDeltaX * -1; }
// Look for lowest delta to normalize the delta values
absDelta = Math.abs(delta);
if ( !lowestDelta || absDelta < lowestDelta ) { lowestDelta = absDelta; }
absDeltaXY = Math.max(Math.abs(deltaY), Math.abs(deltaX));
if ( !lowestDeltaXY || absDeltaXY < lowestDeltaXY ) { lowestDeltaXY = absDeltaXY; }
// Get a whole value for the deltas
fn = delta > 0 ? 'floor' : 'ceil';
delta = Math[fn](delta / lowestDelta);
deltaX = Math[fn](deltaX / lowestDeltaXY);
deltaY = Math[fn](deltaY / lowestDeltaXY);
// Add event and delta to the front of the arguments
args.unshift(event, delta, deltaX, deltaY);
return ($.event.dispatch || $.event.handle).apply(this, args);
}
})($);
})
|
module.exports = {
bracketSpacing: true,
singleQuote: true,
trailingComma: 'all',
};
|
import torch
from experiments.link_prediction import link_prediction
import argparse
import matplotlib.pylab as plt
import os
import seaborn as sns
if __name__ == "__main__":
sns.set()
# Arg parsing
parser = argparse.ArgumentParser()
parser.add_argument('--pt', nargs=1,
help="absolut path to pytorch model",
type=str)
parser.add_argument('--n', nargs=1,
help="name to save plot",
type=str)
arguments = parser.parse_args()
pt_path = arguments.pt[0]
plot_name = arguments.n[0]
device = 'cpu'
loaded = torch.load(pt_path, map_location=torch.device(device))
loss_dict = loaded['loss_log']
fig, ax = plt.subplots()
ax.plot(*list(zip(*sorted(loss_dict['val'].items()))), 'g', label='Validation loss')
ax.plot(*list(zip(*sorted(loss_dict['train'].items()))), 'b', label='Training loss')
plt.legend(loc='upper right')
plt.title(plot_name)
plt.xlabel('Epoch')
plt.ylabel('Elbo')
plt.savefig(os.path.dirname(os.path.abspath(__file__)) + '/plots/{}.png'.format(plot_name))
|
$.ajaxSetup({
headers: {
'X-CSRF-TOKEN': $('meta[name="csrf-token"]').attr('content')
}
});
$.ajax({ //-----------------------------------------------------------代辦事項生成
url: "/show",
type: "get",
dataType: "json",
})
.done(function(data) {
for (let value of data) {
// 丟給 render function
if (value.finish == 0) {
addPost(value);
} else {
addfinishPost(value);
}
}
})
.fail(function(err) {
alert("錯誤");
})
$(document).ready(function() { //------------------------------------------------新增代辦事項
$("#submitmsg").click(function() {
$.ajax({
url: "/new",
type: "post",
dataType: "json",
data: {
title: $("#title").val(),
tododate: $("#todolist").val(),
}
})
.done(function(data) {
if (data.id) {
$("input").val("");
addPost(data);
} else { //否則讀取後端回傳 json 資料 errorMsg 顯示錯誤訊息
alert(data.msg);
}
})
.fail(function(err) {
alert("错誤");
})
})
$("#modify").click(function() { //----------------------------------------------------------修改
var id = $(this).attr("name");
$.ajax({
type: "PUT", //傳送方式
url: '/update', //傳送目的地
dataType: "json", //資料格式
data: { //傳送資料
id: id,
title: $("#modaltitle").val(),
tododate: $("#modaldate").val()
}
})
.done(function(returnmsg) {
if (returnmsg.status) {
alert(returnmsg.msg);
} else {
$("#modifymsg").find(":text,datetime-local").each(function() {
$(this).val("");
});
var el = returnmsg.id;
document.getElementById('show' + el).innerHTML = ' 帳號 : ' + returnmsg.email + ' , 應完成時間時間 :' + returnmsg.tododate +
'<br><br> 標題 : ' + returnmsg.title + '';
alert("已修改")
}
})
.fail(function(err) {
alert("错誤");
})
})
$("#delemodify").click(function() { //----------------------------------------------------------刪除
var id = $(this).attr("name");
delet(id);
})
});
function Logout(item) {
$.ajax({
type: "get", //傳送方式
url: '/logout', //傳送目的地
dataType: "json", //資料格式
})
.done(function(returnmsg) {
location.href = "/login";
})
.fail(function(err) {
location.href = "/login";
alert("已登出");
})
}
function delet(item) {
$.ajax({
type: "post", //傳送方式
url: '/delet', //傳送目的地
dataType: "json", //資料格式
data: { //傳送資料
id: item,
}
})
.done(function(returnmsg) {
if (returnmsg.status) {
alert(returnmsg.msg);
} else {
var el = document.getElementById(returnmsg.id);
//console.log(item);
el.remove();
}
})
.fail(function(err) {
alert("错誤");
})
}
function updat(item) { //---------------------------------------------修改浮動視窗
$.ajax({
url: "/updat",
type: "post",
dataType: "json",
data: {
id: item
}
})
.done(function(update) {
if (!update.id) {
alert(update.msg);
} else {
document.getElementById('delemodify').name = update.id;
document.getElementById('modify').name = update.id;
document.getElementById('modaltitle').placeholder = update.title;
document.getElementById('modaldate').placeholder = update.tododate;
$('#updatModal').modal('show')
}
})
.fail(function(err) {
alert("错誤");
})
}
function finish(item) { //---------------------------------------------已完成按鈕
$.ajax({
url: "/finish",
type: "post",
dataType: "json",
data: {
id: item
}
})
.done(function(finish) {
if (!finish.id) {
alert(finish.msg);
} else {
var el = document.getElementById(item.id);
//console.log(item);
el.remove();
addfinishPost(finish);
alert("已完成該事項");
}
})
.fail(function(err) {
alert("错誤");
})
}
function addPost(returnmsg) {
var item = '' +
'<div class="panel panel-default" id=' + returnmsg.id + '>' +
'<div class="panel-heading">' +
'<h3 class="panel-title" id="show' + returnmsg.id + '">' + ' 帳號 : ' + returnmsg.email + ' , 應完成時間時間 :' + returnmsg.tododate + '<br><br>' +
' 標題 : ' + returnmsg.title +
'</h3>' +
'<button type="button" class="btn btn-success pull-right" style="margin-top:-45px;" onclick="updat(' + returnmsg.id + ');">' + '編輯' + '</button>' +
'<button type="button" class="btn btn-primary pull-right"style="margin-right:80px;margin-top:-45px;" onclick="finish(' + returnmsg.id + ');">' + '已完成' + '</button>' +
'</div>' +
'</div>';
$('#showownlist').append(item);
}
function addfinishPost(returnmsg) {
var item = '' +
'<div class="panel panel-default" id=' + returnmsg.id + '>' +
'<div class="panel-heading">' +
'<h3 class="panel-title" id="show' + returnmsg.id + '">' + ' 帳號 : ' + returnmsg.email + ' , 應完成時間時間 :' + returnmsg.tododate + '<br><br>' +
' 標題 : ' + returnmsg.title +
'</h3>' +
'<button type="button" class="btn btn-danger pull-right" style="margin-top:-45px;" id="delmodify" onclick="delet(' + returnmsg.id + ')">' + '刪除' + '</button>' +
'<button type="button" class="btn btn-primary pull-right"style="margin-right:80px;margin-top:-45px;" ">' + '已完成' + '</button>' +
'</div>' +
'</div>';
$('#showfinishownlist').append(item);
}
function newownlist(item) { //---------------------------------------------------modal互動視窗顯示
$('#myModal').modal('show')
}
|
# ----------------------------------------------------------------------
# phpast.py
#
# PHP abstract syntax node definitions.
# ----------------------------------------------------------------------
class Node(object):
fields = []
def __init__(self, *args, **kwargs):
assert len(self.fields) == len(args), \
'%s takes %d arguments' % (self.__class__.__name__,
len(self.fields))
try:
self.lineno = kwargs['lineno']
except KeyError:
self.lineno = None
for i, field in enumerate(self.fields):
setattr(self, field, args[i])
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__,
', '.join([repr(getattr(self, field))
for field in self.fields]))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for field in self.fields:
if not (getattr(self, field) == getattr(other, field)):
return False
return True
def accept(self, visitor):
visitor(self)
for field in self.fields:
value = getattr(self, field)
if isinstance(value, Node):
value.accept(visitor)
elif isinstance(value, list):
for item in value:
if isinstance(item, Node):
item.accept(visitor)
def generic(self, with_lineno=False):
values = {}
if with_lineno:
values['lineno'] = self.lineno
for field in self.fields:
value = getattr(self, field)
if hasattr(value, 'generic'):
value = value.generic(with_lineno)
elif isinstance(value, list):
items = value
value = []
for item in items:
if hasattr(item, 'generic'):
item = item.generic(with_lineno)
value.append(item)
values[field] = value
return (self.__class__.__name__, values)
def node(name, fields):
attrs = {'fields': fields}
return type(name, (Node,), attrs)
InlineHTML = node('InlineHTML', ['data'])
Block = node('Block', ['nodes'])
Assignment = node('Assignment', ['node', 'expr', 'is_ref'])
ListAssignment = node('ListAssignment', ['nodes', 'expr'])
New = node('New', ['name', 'params'])
Clone = node('Clone', ['node'])
Break = node('Break', ['node'])
Continue = node('Continue', ['node'])
Return = node('Return', ['node'])
Yield = node('Yield', ['node'])
Global = node('Global', ['nodes'])
Static = node('Static', ['nodes'])
Echo = node('Echo', ['nodes'])
Print = node('Print', ['node'])
Unset = node('Unset', ['nodes'])
Try = node('Try', ['nodes', 'catches', 'finally'])
Catch = node('Catch', ['class_', 'var', 'nodes'])
Finally = node('Finally', ['nodes'])
Throw = node('Throw', ['node'])
Declare = node('Declare', ['directives', 'node'])
Directive = node('Directive', ['name', 'node'])
Function = node('Function', ['name', 'params', 'nodes', 'is_ref'])
Method = node('Method', ['name', 'modifiers', 'params', 'nodes', 'is_ref'])
Closure = node('Closure', ['params', 'vars', 'nodes', 'is_ref'])
Class = node('Class', ['name', 'type', 'extends', 'implements', 'traits', 'nodes'])
Trait = node('Trait', ['name', 'traits', 'nodes'])
ClassConstants = node('ClassConstants', ['nodes'])
ClassConstant = node('ClassConstant', ['name', 'initial'])
ClassVariables = node('ClassVariables', ['modifiers', 'nodes'])
ClassVariable = node('ClassVariable', ['name', 'initial'])
Interface = node('Interface', ['name', 'extends', 'nodes'])
AssignOp = node('AssignOp', ['op', 'left', 'right'])
BinaryOp = node('BinaryOp', ['op', 'left', 'right'])
UnaryOp = node('UnaryOp', ['op', 'expr'])
TernaryOp = node('TernaryOp', ['expr', 'iftrue', 'iffalse'])
PreIncDecOp = node('PreIncDecOp', ['op', 'expr'])
PostIncDecOp = node('PostIncDecOp', ['op', 'expr'])
Cast = node('Cast', ['type', 'expr'])
IsSet = node('IsSet', ['nodes'])
Empty = node('Empty', ['expr'])
Eval = node('Eval', ['expr'])
Include = node('Include', ['expr', 'once'])
Require = node('Require', ['expr', 'once'])
Exit = node('Exit', ['expr', 'type'])
Silence = node('Silence', ['expr'])
MagicConstant = node('MagicConstant', ['name', 'value'])
Constant = node('Constant', ['name'])
Variable = node('Variable', ['name'])
StaticVariable = node('StaticVariable', ['name', 'initial'])
LexicalVariable = node('LexicalVariable', ['name', 'is_ref'])
FormalParameter = node('FormalParameter', ['name', 'default', 'is_ref', 'type'])
Parameter = node('Parameter', ['node', 'is_ref'])
FunctionCall = node('FunctionCall', ['name', 'params'])
Array = node('Array', ['nodes'])
ArrayElement = node('ArrayElement', ['key', 'value', 'is_ref'])
ArrayOffset = node('ArrayOffset', ['node', 'expr'])
StringOffset = node('StringOffset', ['node', 'expr'])
ObjectProperty = node('ObjectProperty', ['node', 'name'])
StaticProperty = node('StaticProperty', ['node', 'name'])
MethodCall = node('MethodCall', ['node', 'name', 'params'])
StaticMethodCall = node('StaticMethodCall', ['class_', 'name', 'params'])
If = node('If', ['expr', 'node', 'elseifs', 'else_'])
ElseIf = node('ElseIf', ['expr', 'node'])
Else = node('Else', ['node'])
While = node('While', ['expr', 'node'])
DoWhile = node('DoWhile', ['node', 'expr'])
For = node('For', ['start', 'test', 'count', 'node'])
Foreach = node('Foreach', ['expr', 'keyvar', 'valvar', 'node'])
ForeachVariable = node('ForeachVariable', ['name', 'is_ref'])
Switch = node('Switch', ['expr', 'nodes'])
Case = node('Case', ['expr', 'nodes'])
Default = node('Default', ['nodes'])
Namespace = node('Namespace', ['name', 'nodes'])
UseDeclarations = node('UseDeclarations', ['nodes'])
UseDeclaration = node('UseDeclaration', ['name', 'alias'])
ConstantDeclarations = node('ConstantDeclarations', ['nodes'])
ConstantDeclaration = node('ConstantDeclaration', ['name', 'initial'])
TraitUse = node('TraitUse', ['name', 'renames'])
TraitModifier = node('TraitModifier', ['from', 'to', 'visibility'])
def resolve_magic_constants(nodes):
current = {}
def visitor(node):
if isinstance(node, Namespace):
current['namespace'] = node.name
elif isinstance(node, Class):
current['class'] = node.name
elif isinstance(node, Function):
current['function'] = node.name
elif isinstance(node, Method):
current['method'] = node.name
elif isinstance(node, MagicConstant):
if node.name == '__NAMESPACE__':
node.value = current.get('namespace')
elif node.name == '__CLASS__':
node.value = current.get('class')
if current.get('namespace'):
node.value = '%s\\%s' % (current.get('namespace'),
node.value)
elif node.name == '__FUNCTION__':
node.value = current.get('function')
if current.get('namespace'):
node.value = '%s\\%s' % (current.get('namespace'),
node.value)
elif node.name == '__METHOD__':
node.value = current.get('method')
if current.get('class'):
node.value = '%s::%s' % (current.get('class'),
node.value)
if current.get('namespace'):
node.value = '%s\\%s' % (current.get('namespace'),
node.value)
for node in nodes:
if isinstance(node, Node):
node.accept(visitor)
|
import path from "path"
import resolve from "@rollup/plugin-node-resolve"
import commonjs from "@rollup/plugin-commonjs"
import peerDepsExternal from "rollup-plugin-peer-deps-external"
import postcss from "rollup-plugin-postcss"
import typescript from "rollup-plugin-typescript2"
import copy from "rollup-plugin-copy"
function bundle(inputPath) {
const input = path.basename(inputPath, ".ts")
return {
input: inputPath,
output: [
{
file: `./dist/${input}.js`,
format: "cjs",
sourcemap: true,
},
{
file: `./dist/${input}.esm.js`,
format: "esm",
sourcemap: true,
},
],
plugins: [
peerDepsExternal(),
postcss(),
resolve(),
commonjs(),
typescript(),
copy({
targets: [
{ src: "tailwind.config.js", dest: "dist", rename: "tailwind.js" },
],
}),
],
}
}
export default [bundle("./src/lib.ts"), bundle("./src/web.ts")]
|
"use strict"
var util = require("util");
var fs = require("fs");
var path = require("path");
var EventEmitter = require("events").EventEmitter;
var livelyDAVPlugin = require('./jsDAV-plugin');
var VersionedFileSystem = require('./VersionedFileSystem');
var d = require('./domain');
var log = require('./util').log;
var counter = 0;
function newID() { return ++counter; }
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
// Repo
function Repository(options) {
try {
EventEmitter.call(this);
this.initialize(options);
} catch(e) { this.emit('error', e); }
}
util._extend(Repository.prototype, EventEmitter.prototype);
util._extend(Repository.prototype, d.bindMethods({
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
// intialize-release
initialize: function(options) {
if (global.lively) lively.repository = this;
this.fs = new VersionedFileSystem(options);
// we keep a queue for changes b/c they should be committed to the
// versioned file system in their incoming order. Before they can be
// committed async work has to to done, though, which might intermix the
// change order
this.pendingChangeQueue = [];
this.fs.once('initialized', function() { this.emit('initialized'); }.bind(this));
this._commitPendingChangesWatcherTimer = setInterval(this.commitPendingChangesWatcher.bind(this), 1000);
Object.freeze(this);
},
start: function(resetDatabase, thenDo) {
// resetDatabase = drop what was stored previously
this.fs.initializeFromDisk(resetDatabase, thenDo);
},
close: function(thenDo) {
clearInterval(this._commitPendingChangesWatcherTimer);
this.emit('closed');
thenDo && thenDo(null);
},
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
// DAV
getDAVPlugin: function() {
return livelyDAVPlugin.onNew(this.attachToDAVPlugin.bind(this));
},
attachToDAVPlugin: function(plugin) {
plugin.on('fileChanged', this.onFileChange.bind(this));
plugin.on('afterFileChanged', this.onAfterWrite.bind(this));
plugin.on('fileCreated', this.onFileCreation.bind(this));
plugin.on('afterFileCreated', this.onAfterWrite.bind(this));
plugin.on('fileDeleted', this.onFileDeletion.bind(this));
},
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
// change recording
isSynchronized: function() { return this.pendingChangeQueue.length === 0; },
commitPendingChangesWatcher: function() {
if (!this.pendingChangeQueue.length) return;
var timeToWorry = 60*1000;
this.pendingChangeQueue.forEach(function(change) {
if (Date.now() - change.startTime < timeToWorry) return;
if (!change.statRead) console.warn('Change for %s has no file stat', change.record.path);
if (!change.requestDataRead) console.warn('Change for %s has no content', change.record.path);
});
var change = this.pendingChangeQueue[0];
if (Date.now() - change.startTime > timeToWorry) {
console.warn('Took too long to process change for %s, discarding it', change.record.path);
this.discardPendingChange(change);
}
},
commitPendingChanges: function() {
var repo = this,
q = this.pendingChangeQueue,
toCommit = [];
for (var i = 0; i < q.length; i++) {
if (!q[i].canBeCommitted()) break;
toCommit.push(q[i]);
}
log("Commiting %s (change ids %s) changes to DB", toCommit.length, toCommit.map(function(change) { return change.id; }).join(','));
if (!toCommit.length) return;
repo.pendingChangeQueue.splice(0, toCommit.length);
repo.fs.addVersions(toCommit.map(function(elem) { return elem.record; }), {}, function(err, version) {
if (err) {
console.error('error in addVersions for records ', toCommit);
}
toCommit.forEach(function(change) {
// FIXME: should check that change and version correlate
if (change && change.callback instanceof Function)
change.callback(version);
});
if (!repo.pendingChangeQueue.length) {
log("all pending changes processed");
repo.emit('synchronized');
}
});
},
discardPendingChange: function(change) {
var idx = this.pendingChangeQueue.indexOf(change);
if (idx === -1) return;
this.pendingChangeQueue.splice(idx, 1);
if (idx === 0) this.commitPendingChanges();
},
onAfterWrite: function(evt) {
log('after write: ', evt.uri);
if (!evt.uri) return;
var q = this.pendingChangeQueue, change;
for (var i = 0; i < q.length; i++)
if (q[i].record.path === evt.uri) { change = q[i]; break; }
if (!change) return;
this.readFileStat(change);
},
captureDAVEvt: function(changeType, readBody, readStat, evt) {
if (!evt.uri) { console.error('Error recording file change, no path', evt); return; }
var taskData = {
id: newID(),
record: {
version: undefined,
change: changeType,
author: evt.username || 'unknown',
date: evt.stat ? evt.stat.mtime :
(readStat ? '' :
// don't record the ms
new Date().toISOString().replace(/[0-9]{3}Z/, '000Z')),
content: evt.req && evt.req.body ? evt.req.body : null,
path: evt.uri,
stat: evt.stat
},
canBeCommitted: function() {
var waitForStat = readStat && !this.statRead,
waitForBody = readBody && !this.requestDataRead;
waitForBody && log("%s (change %s) cannot yet be committed because no file stat was read", evt.uri, this.id);
waitForStat && log("%s (change %s) cannot yet be committed because no request data was read", evt.uri, this.id);
return !waitForBody && !waitForStat;
},
startTime: Date.now(),
requestDataRead: false,
statRead: !!evt.stat || false,
request: evt.req,
incomingContent: evt.content
}
log("capturing DAV event %s (%s, %s)", taskData.id , taskData.request.method, taskData.record.path)
this.pendingChangeQueue.push(taskData);
readBody && this.startReadingRequestContent(taskData);
if (!readBody && !readStat) this.commitPendingChanges();
},
onFileChange: function(evt) {
console.log('file change: ', evt.uri);
this.captureDAVEvt('contentChange', true, true, evt);
},
onFileCreation: function(evt) {
console.log('file created: ', evt.uri);
this.captureDAVEvt('created', true, true, evt);
},
onFileDeletion: function(evt) {
console.log('file deleted: ', evt.uri);
this.captureDAVEvt('deletion', false, false, evt);
},
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
// change processing
startReadingRequestContent: function(change) {
log("startReadingRequestContent for change %s", change.id);
var repo = this;
if (!change.incomingContent) change.requestDataRead = true;
if (change.requestDataRead) { this.commitPendingChanges(); return; }
var timeout = 60*1000, ts = Date.now();
if (ts-change.startTime > timeout) {
console.warn("reading content for %s timed out", change.record.path);
change.requestDataRead = true;
this.commitPendingChanges();
return;
}
log("waiting for content of %s", change.record.path);
if (!change.incomingContent.isDone) {
setTimeout(this.startReadingRequestContent.bind(
this, change), 500);
return;
}
change.record.content = (change.incomingContent.buffer || '').toString();
change.requestDataRead = true;
log("content for %s read", change.record.path);
repo.commitPendingChanges();
},
readFileStat: function(change) {
var repo = this;
log("start reading file stat for %s", change.record.path);
fs.stat(path.join(repo.getRootDirectory(), change.record.path), function(err, stat) {
if (err || !stat) {
console.error('readFileStat: ', err);
repo.discardPendingChange(change);
return;
}
log("file stat for %s read", change.record.path, stat);
change.record.stat = stat;
change.record.date = stat.mtime.toISOString();
change.statRead = true;
repo.commitPendingChanges();
});
},
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
// accessors
getRootDirectory: function() { return this.fs.getRootDirectory(); },
getFiles: function(thenDo) { this.fs.getFiles(thenDo); },
getFileRecord: function(options, thenDo) { return this.fs.getFileRecord(options, thenDo); },
getRecords: function(options, thenDo) { return this.fs.getRecords(options, thenDo); },
getVersionsFor: function(path, thenDo) { return this.getRecords({paths: [path]}, thenDo); },
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
// debugging
logState: function() {
console.log('log repo state:');
console.log("versionedFileInfos: ");
console.dir(this.fs, 1);
}
}));
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
// exports
module.exports = Repository;
|
from skeleton_parsing import skeleton_parser_str
from itertools import chain
import networkx as nx
def get_deppath_list(question_normal, ungrounded_nodes, isSkeletonorDep='Skeleton'):
abstract_question_deppath_list = []
final_ph_tok_list, link_anchor_list, ans_anchor = placeholding_node(question_normal.split(' '), ungrounded_nodes=ungrounded_nodes)
# print('#final_ph_tok_list:\t', final_ph_tok_list)
# print('#link_anchor_list:\t', link_anchor_list)
# print('#ans_anchor:\t', ans_anchor)
try:
path_tok_lists = dep_path_seq(ph_tok_list=final_ph_tok_list, link_anchor_list=link_anchor_list,
ans_anchor=ans_anchor, isSkeletonorDep=isSkeletonorDep) #Skeleton Dep
except Exception as e:
path_tok_lists = []
for path_toks in path_tok_lists:
token_path = ' '.join(path_toks)
abstract_question_deppath_list.append(token_path.strip())
return abstract_question_deppath_list
def triples(dep_parse, node=None):
"""
Extract dependency triples of the form:
((head word, head tag), rel, (dep word, dep tag))
"""
if not node:
node = dep_parse.root
head = node['address']
for i in sorted(chain.from_iterable(node['deps'].values())):
dep = dep_parse.get_by_address(i)
yield (head, dep['rel'], dep['address'])
for triple in triples(dep_parse=dep_parse, node=dep):
yield triple
def get_redundancy_continuous_index(token_nodes, redundancy):
'''get redundancy index of tokens
phrase级的索引
'''
redundancy_tokens = redundancy.split(' ')
j = 0
start_index = -1
end_index = -1
while j < len(token_nodes):
common = 0
for redundancy_index in range(len(redundancy_tokens)):
if redundancy_tokens[redundancy_index] == token_nodes[j]:
common = common + 1
j = j + 1
else:
j = j - common
break
if common == len(redundancy_tokens):
start_index = j - common
end_index = j - 1
j = j + 1
return start_index, end_index
def placeholding_node(tok_list, ungrounded_nodes):
""" Step 1: Shrink each E/Tm link, occupying only one token """
ph_tok_list = list(tok_list)
link_pos_list = [-1] * len(tok_list)
for link_idx, gl_data in enumerate(ungrounded_nodes):
if gl_data.node_type != 'entity' and gl_data.node_type != 'literal':
continue
# st = gl_data.start_position #start index
# ed = gl_data.end_position #end index
st, ed = get_redundancy_continuous_index(token_nodes=tok_list, redundancy=gl_data.friendly_name)
link_pos_list[ed] = link_idx
# identifying the anchor word of the current linking #[-1, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1]
# for tok_idx in range(st, ed):
# ph_tok_list[tok_idx] = ''
# print (ph_tok_list) #['Who', 'is', 'the', 'office', 'holder', 'with', 'deputies', 'as', '', 'Brown', '?']
tok_link_tups = []
# 只保留根
for ph_tok, link_idx in zip(ph_tok_list, link_pos_list):
if ph_tok != '':
tok_link_tups.append([ph_tok, link_idx])
# remove non-trailing words of E/Tm [['Who', -1], ['is', -1], ['the', -1], ['office', -1], ['holder', -1], ['with', -1], ['deputies', -1],
# ['as', -1], ['Brown', 0], ['?', -1]]
link_anchor_list = [-1] * len(ungrounded_nodes) # the anchor position
for anchor_idx, tup in enumerate(tok_link_tups):
link_idx = tup[-1]
if link_idx != -1:
link_anchor_list[link_idx] = anchor_idx
link_anchor_list_update = []
for link_idx in link_anchor_list:
if link_idx != -1:
link_anchor_list_update.append(link_idx)
link_anchor_list = link_anchor_list_update
""" Step 2: Determine answer's anchor point """
ans_anchor = 0 # find the first wh- word in the sentence, otherwise picking the first word
# for tok_idx, (ph_tok, link_idx) in enumerate(tok_link_tups):
# if ph_tok.startswith('wh') or ph_tok == 'how' or ph_tok.startswith('Wh') or ph_tok == 'How':
# ans_anchor = tok_idx
# break
for link_idx, gl_data in enumerate(ungrounded_nodes):
if gl_data.question_node != 1:
continue
# st = gl_data.start_position #start index
ed = gl_data.end_position #end index
ans_anchor = ed
""" Step 3: Dynamic replacement """
# for anchor_idx in range(len(tok_link_tups)):
# ph_tok, link_idx = tok_link_tups[anchor_idx]
# if link_idx == -1:
# continue
# tok_link_tups[anchor_idx][0] = 'XYZ'+str(link_idx) # default value
final_ph_tok_list = [tup[0] for tup in tok_link_tups]
return final_ph_tok_list, link_anchor_list, ans_anchor
def dep_path_seq(ph_tok_list, link_anchor_list, ans_anchor, isSkeletonorDep='Skeleton'):
placeholder_dict = {}
for i, link_anchor in enumerate(link_anchor_list):
placeholder_dict[link_anchor] = '<E'+str(i)+'>'
utterance = ' '.join(ph_tok_list)
assert isSkeletonorDep in ['Dep', 'Skeleton']
if isSkeletonorDep == 'Dep':
dependency_graph = skeleton_parser_str.get_dependency_tree(question=utterance)
elif isSkeletonorDep == 'Skeleton':
dependency_graph = skeleton_parser_str.get_hybrid_dependency_tree(question=utterance)
edge_dict = {}
for head_position, rel, dep_position in triples(dep_parse=dependency_graph):
fwd_key = '%d-%d' % (head_position, dep_position) #head -> depentant
bkwd_key = '%d-%d' % (dep_position, head_position) #depentant -> head
edge_dict[fwd_key] = rel #head -> depentant ----> relation
edge_dict[bkwd_key] = '!%s' % rel #head -> depentant ----> !relation
path_tok_lists = []
for _, link_anchor in enumerate(link_anchor_list):
path_tok_list = find_path(ph_tok_list=ph_tok_list, dep_parse=dependency_graph, link_anchor=link_anchor,
ans_anchor=ans_anchor, edge_dict=edge_dict, ph_dict=placeholder_dict)
path_tok_lists.append(path_tok_list)
return path_tok_lists
def find_path(ph_tok_list, dep_parse, link_anchor, ans_anchor, edge_dict, ph_dict):
"""
:param dep_parse: dependency graph
:param link_anchor: token index of the focus word (0-based)
:param ans_anchor: token index of the answer (0-based)
:param link_category: the category of the current focus link
:param edge_dict: <head-dep, rel> dict
:param ph_dict: <token_idx, ph> dict
:return:
"""
if ans_anchor != link_anchor:
edges = []
for head, rel, dep in triples(dep_parse=dep_parse):
edges.append((head, dep))
graph = nx.Graph(edges)
path_nodes = nx.shortest_path(graph, source=ans_anchor+1, target=link_anchor+1) #[0, 1, 2, 3, 4]
else:
path_nodes = [link_anchor]
path_tok_list = []
path_len = len(path_nodes)
if path_len > 0:
for position in range(path_len-1):
edge = edge_dict['%d-%d' % (path_nodes[position], path_nodes[position+1])]
cur_token_idx = path_nodes[position] - 1
if cur_token_idx in ph_dict:
path_tok_list.append(ph_dict[cur_token_idx])
else:
path_tok_list.append(ph_tok_list[cur_token_idx])
path_tok_list.append(edge)
if link_anchor in ph_dict:
path_tok_list.append(ph_dict[link_anchor])
else:
path_tok_list.append('<E>')
return path_tok_list
|
// plugin_node.js
const PluginNode = {
'初期化': {
type: 'func',
josi: [],
fn: function (sys) {
sys.__varslist[0]['コマンドライン'] = process.argv
}
},
// @ファイル入出力
'開': { // @ファイルSを開く // @ひらく
type: 'func',
josi: [['を', 'から']],
fn: function (s) {
const fs = require('fs')
return fs.readFileSync(s, 'utf-8')
}
},
'読': { // @ファイルSを開く // @よむ
type: 'func',
josi: [['を', 'から']],
fn: function (s) {
const fs = require('fs')
return fs.readFileSync(s, 'utf-8')
}
},
'保存': { // @ファイルFヘSを書き込む // @ほぞん
type: 'func',
josi: [['へ', 'に'], ['を']],
fn: function (f, s) {
const fs = require('fs')
fs.writeFileSync(f, s, 'utf-8')
},
return_none: true
},
'起動': { // @シェルコマンドSを起動 // @きどう
type: 'func',
josi: [['を']],
fn: function (s) {
const execSync = require('child_process').execSync
const r = execSync(s)
return r.toString()
}
},
'カレントディレクトリ取得': { // @カレントディレクトリを返す // @かれんとでぃれくとりしゅとく
type: 'func',
josi: [],
fn: function () {
const cwd = process.cwd()
const path = require('path')
return path.resolve(cwd)
}
},
'カレントディレクトリ変更': { // @カレントディレクトリをDIRに変更する // @かれんとでぃれくとりへんこう
type: 'func',
josi: [['に', 'へ']],
fn: function (dir) {
process.chdir(dir)
},
return_none: true
},
'作業フォルダ取得': { // @カレントディレクトリを返す // @さぎょうふぉるだしゅとく
type: 'func',
josi: [],
fn: function () {
const cwd = process.cwd()
const path = require('path')
return path.resolve(cwd)
}
},
'作業フォルダ変更': { // @カレントディレクトリをDIRに変更する // @さぎょうふぉるだへんこう
type: 'func',
josi: [['に', 'へ']],
fn: function (dir) {
process.chdir(dir)
},
return_none: true
},
'母艦パス取得': { // @スクリプトのあるディレクトリを返す // @ぼかんぱすしゅとく
type: 'func',
josi: [],
fn: function () {
const path = require('path')
let nakofile
const cmd = path.basename(process.argv[1])
if (cmd.indexOf('cnako3') < 0) {
nakofile = process.argv[1]
} else {
nakofile = process.argv[2]
}
return path.dirname(path.resolve(nakofile))
}
},
'環境変数取得': { // @環境変数の一覧を返す // @かんきょうへんすうしゅとく
type: 'func',
josi: [],
fn: function () {
return process.env
}
},
'ファイル列挙': { // @パスSのファイル名(フォルダ名)一覧を取得する。ワイルドカード可能。「*.jpg;*.png」など複数の拡張子を指定可能。 // @ふぁいるれっきょ
type: 'func',
josi: [['の', 'を', 'で']],
fn: function (s) {
const fs = require('fs')
const path = require('path')
if (s.indexOf('*') >= 0) { // ワイルドカードがある場合
const searchPath = path.dirname(s)
const mask1 = path.basename(s)
.replace(/\./g, '\\.')
.replace(/\*/g, '.*')
const mask2 = (mask1.indexOf(';') < 0)
? mask1 + '$' : '(' + mask1.replace(/;/g, '|') + ')$'
const maskRE = new RegExp(mask2, 'i')
const list = fs.readdirSync(searchPath)
const list2 = list.filter((n) => maskRE.test(n))
return list2
} else {
const list = fs.readdirSync(s)
return list
}
}
},
'全ファイル列挙': { // @パスS以下の全ファイル名を取得する。ワイルドカード可能。「*.jpg;*.png」のように複数の拡張子を指定可能。 // @ぜんふぁいるれっきょ
type: 'func',
josi: [['の', 'を', 'で']],
fn: function (s) {
const fs = require('fs')
const path = require('path')
const result = []
// ワイルドカードの有無を確認
let mask = '.*'
let basepath = s
if (s.indexOf('*') >= 0) {
basepath = path.dirname(s)
const mask1 = path.basename(s)
.replace(/\./g, '\\.')
.replace(/\*/g, '.*')
mask = (mask1.indexOf(';') < 0)
? mask1 + '$' : '(' + mask1.replace(/;/g, '|') + ')$'
}
basepath = path.resolve(basepath)
const maskRE = new RegExp(mask, 'i')
// 再帰関数を定義
const enumR = (base) => {
const list = fs.readdirSync(base)
for (const f of list) {
if (f === '.' || f === '..') continue
const fullpath = path.join(base, f)
const st = fs.statSync(fullpath)
if (st.isDirectory()) {
enumR(fullpath)
continue
}
if (maskRE.test(f)) result.push(fullpath)
}
}
// 検索実行
enumR(basepath)
return result
}
},
'ファイル名抽出': { // @フルパスのファイル名Sからファイル名部分を抽出して返す // @ふぁいるめいちゅうしゅつ
type: 'func',
josi: [['から', 'の']],
fn: function (s) {
const path = require('path')
return path.basename(s)
}
},
'パス抽出': { // @ファイル名Sからパス部分を抽出して返す // @ぱすちゅうしゅつ
type: 'func',
josi: [['から', 'の']],
fn: function (s) {
const path = require('path')
return path.dirname(s)
}
},
'存在': { // @ファイルPATHが存在するか確認して返す // @そんざい
type: 'func',
josi: [['が', 'の']],
fn: function (path) {
const fs = require('fs')
try {
fs.statSync(path)
return true
} catch (err) {
return false
}
}
},
// @Nodeプロセス
'終': { // @Nodeでプログラム実行を強制終了する // @終わる
type: 'func',
josi: [],
fn: function () {
process.exit()
}
},
// @コマンドライン
'コマンドライン': {type: 'const', value: ''}, // @こまんどらいん
'標準入力取得時': { // @標準入力を一行取得した時に、無名関数(あるいは、文字列で関数名を指定)F(s)を実行する // @ひょうじゅんにゅうりょくしゅとくしたとき
type: 'func',
josi: [['を']],
fn: function (callback) {
const reader = require('readline').createInterface({
input: process.stdin,
output: process.stdout
})
reader.on('line', function (line) {
callback(line)
})
}
},
// @ASSERTテスト
'ASSERT等': { // @ mochaによるテストで、ASSERTでAとBが正しいことを報告する // @ASSERTひとしい
type: 'func',
josi: [['と'], ['が']],
fn: function (a, b, sys) {
const assert = require('assert')
assert.equal(a, b)
}
}
}
module.exports = PluginNode
|
const express = require("express");
const router = express.Router();
const add = require("../../api/addFaceset.js");
const get = require("../../api/getFaceset.js");
const del = require("../../api/deleteFace.js");
const search = require("../../api/searchByFace.js");
const match = require("../../api/match.js");
module.exports = key => {
router.post("/add", function(req, res) {
if (req.user) {
if (req.user.system === "student") {
add(req, key)
.then(data => {
res.json(JSON.parse(data));
})
.catch(err => {
res.send(JSON.stringify(err));
});
} else {
res.send({ success: false, message: "您无权访问本系统" });
}
} else {
res.send({ success: false, message: "请求失败" });
}
});
router.post("/get", function(req, res) {
if (req.user) {
if (req.user.system === "student") {
get(req, key)
.then(data => {
res.json(JSON.parse(data));
})
.catch(err => {
res.send(JSON.stringify(err));
});
} else {
res.send({ success: false, message: "您无权访问本系统" });
}
} else {
res.send({ success: false, message: "请求失败" });
}
});
router.post("/delete", function(req, res) {
if (req.user) {
if (req.user.system === "student") {
del(req, key)
.then(data => {
res.json(JSON.parse(data));
})
.catch(err => {
res.send(JSON.stringify(err));
});
} else {
res.send({ success: false, message: "您无权访问本系统" });
}
} else {
res.send({ success: false, message: "请求失败" });
}
});
router.post("/search", function(req, res) {
if (req.user) {
if (req.user.system === "student") {
search(req, key)
.then(data => {
res.json(JSON.parse(data));
})
.catch(err => {
res.send(JSON.stringify(err));
});
} else {
res.send({ success: false, message: "您无权访问本系统" });
}
} else {
res.send({ success: false, message: "请求失败" });
}
});
router.post("/match", function(req, res) {
if (req.user) {
if (req.user.system === "student") {
match(req, key)
.then(data => {
res.send({ success: true, data });
})
.catch(err => {
res.send({ success: false, data: err });
});
} else {
res.send({ success: false, message: "您无权访问本系统" });
}
} else {
res.send({ success: false, message: "请求失败" });
}
});
return router;
};
|
mycallback( {"CONTRIBUTOR OCCUPATION": "Retired", "CONTRIBUTION AMOUNT (F3L Bundled)": "100.00", "ELECTION CODE": "G2010", "MEMO CODE": "", "CONTRIBUTOR EMPLOYER": "n/a", "DONOR CANDIDATE STATE": "", "CONTRIBUTOR STREET 1": "7440 Labranza Street", "CONTRIBUTOR MIDDLE NAME": "", "DONOR CANDIDATE FEC ID": "", "DONOR CANDIDATE MIDDLE NAME": "", "CONTRIBUTOR STATE": "CA", "DONOR CANDIDATE FIRST NAME": "", "CONTRIBUTOR FIRST NAME": "Bronwyn D.", "BACK REFERENCE SCHED NAME": "", "DONOR CANDIDATE DISTRICT": "", "CONTRIBUTION DATE": "20101021", "DONOR COMMITTEE NAME": "", "MEMO TEXT/DESCRIPTION": "", "Reference to SI or SL system code that identifies the Account": "", "FILER COMMITTEE ID NUMBER": "C00461061", "DONOR CANDIDATE LAST NAME": "", "CONTRIBUTOR LAST NAME": "Anthony", "_record_type": "fec.version.v7_0.SA", "CONDUIT STREET2": "", "CONDUIT STREET1": "", "DONOR COMMITTEE FEC ID": "", "CONTRIBUTION PURPOSE DESCRIP": "", "CONTRIBUTOR ZIP": "95683", "CONTRIBUTOR STREET 2": "", "CONDUIT CITY": "", "ENTITY TYPE": "IND", "CONTRIBUTOR CITY": "Sloughhouse", "CONTRIBUTOR SUFFIX": "", "TRANSACTION ID": "INCA7270", "DONOR CANDIDATE SUFFIX": "", "DONOR CANDIDATE OFFICE": "", "CONTRIBUTION PURPOSE CODE": "15", "ELECTION OTHER DESCRIPTION": "", "_src_file": "2011/20110504/727407.fec_1.yml", "CONDUIT STATE": "", "CONTRIBUTOR ORGANIZATION NAME": "", "BACK REFERENCE TRAN ID NUMBER": "", "DONOR CANDIDATE PREFIX": "", "CONTRIBUTOR PREFIX": "", "CONDUIT ZIP": "", "CONDUIT NAME": "", "CONTRIBUTION AGGREGATE F3L Semi-annual Bundled": "222.50", "FORM TYPE": "SA11AI"});
|
document.addEventListener('DOMContentLoaded', function () {
let car_user = document.getElementById('car_user');
let user_id = car_user.getAttribute('user_id');
let car_products = document.getElementById('car_products');
let car_total_container = document.getElementById('car_total_container');
let element = "";
let car_total_content = "";
let total = "";
url = base + '/md/api/car/products/'+user_id;
http.open('GET', url, true);
http.setRequestHeader('X-CSRF-TOKEN', csrfToken);
http.send();
http.onreadystatechange = function () {
if (this.readyState == 4 && this.status == 200) {
let data = this.responseText;
data = JSON.parse(data);
let products = data.products;
// Valores para la orden
let costo_total = 0;
let cantidad_productos = products.length;
products.forEach(product => {
price = parseInt(product.price);
id = product.id;
amount = product.amount;
costo_total += price;
total.innerHTML = "$" + costo_total + ".00";
element = `<tr>
<td width="">${product.id}</td>
<td>
<a href="/uploads/'.${product.file_path}.'/'.${product.image})" data-fancybox="gallery">
<img src="/uploads/${product.file_path}/t_${product.image}" width="64">
</a>
</td>
<td>${product.code}</td>
<td width="300">${product.name}</td>
<td class="price_united">$ ${product.price}</td>
<td width="150">
<div class="opts">
<a href="#" onclick="delete_product_car(${product.id}); return false;" data-action="delete" title="Eliminar" class="btn btn-danger action-delete"><i class="fas fa-trash-alt"></i></a>
</div>
</td>
</tr>`;
car_products.innerHTML += element;
});
car_total_content = ` <h2 class="car_total">Total: <span id="total"> $${costo_total}.00</span></h2>
<div class="car_options">
<a href="/account/orders/${user_id}" class="btn btn-success"><i class=" fas fa-clipboard"></i> Mis ordenes</a>
<a href="#" onclick="order_now('${user_id}', '1', '${cantidad_productos}', '${costo_total}', '0'); return false" class="btn btn-success">Ordenar</a>
</div>`;
car_total_container.innerHTML = car_total_content;
}
}
});
function delete_product_car(id) {
url = base + '/account/car/delete/'+id;
http.open('GET', url, true);
http.setRequestHeader('X-CSRF-TOKEN', csrfToken);
http.send();
http.onreadystatechange = function () {
if (this.readyState == 4 && this.status == 200) {
let data = this.responseText;
data = JSON.parse(data);
if(data.status == "success") {
// Swal.fire({ title: 'Producto retirado del carrito', icon: 'success' });
window.location.reload();
}
}
}
}
async function order_now(id_user, module, cantidad, total, paid_out) {
const { value: direction } = await Swal.fire({
input: 'textarea',
required: true,
title: 'Ingrese una dirección',
text: 'Indique la dirección a la cual se enviara el pedido',
inputPlaceholder: 'Escriba su dirección aqui...',
inputAttributes: {
'aria-label': 'Escriba su dirección aqui'
},
showCancelButton: true
})
if (direction) {
url = base + '/md/api/orders/add/'+id_user+'/'+module+'/'+total+'/'+cantidad+'/'+direction+'/'+paid_out;
http.open('POST', url, true);
http.setRequestHeader('X-CSRF-TOKEN', csrfToken);
http.setRequestHeader('Content-type', 'application/x-www-form-urlencoded');
http.send();
http.onreadystatechange = function () {
if (this.readyState == 4 && this.status == 200) {
let data = this.responseText;
data = JSON.parse(data);
if(data.status == "success") {
// id_compra++;
Swal.fire({
icon: 'success',
title: 'Orden realizada',
close: false,
text: 'Puedes realizar el proceso de pago dentro del modulo de tus ordenes',
footer: `<a href="/account/orders/${id_user}" class="enlace-orden">Ver ordenes</a>`
})
}
}
}
} else {
Swal.fire({
icon: 'warning',
title: 'Advertencia',
text: 'Es necesario que ingreses una dirección a la cual se enviará el pedido'
})
}
console.log(`ID usuario: ${id_user} - Cantidad de productos: ${cantidad} - Total a pagar: ${total} - Dirección ${direction}`);
}
function car_delete_all(id_user) {
url = base + '/md/api/car/delete_all/'+id;
http.open('GET', url, true);
http.setRequestHeader('X-CSRF-TOKEN', csrfToken);
http.send();
http.onreadystatechange = function () {
if (this.readyState == 4 && this.status == 200) {
let data = this.responseText;
data = JSON.parse(data);
// if(data.status == "success") {
// // Swal.fire({ title: 'Producto retirado del carrito', icon: 'success' });
// window.location.reload();
// }
}
}
}
|
'use strict'
const merge = require('webpack-merge')
const prodEnv = require('./prod.env')
module.exports = merge(prodEnv, {
NODE_ENV: '"development"',
TOKENAPI:'"http://172.16.4.237:8090"',
LOCALTOKEN:false
})
|
var sys = require('sys');
var exec = require('child_process').exec;
var os = require('os');
if (os.type() === 'Linux' || os.type() === 'Darwin' )
exec("rm -rf bin/site && cp -R ./node_modules/dcrdex-assets/dexc/site bin/");
else if (os.type() === 'Windows_NT')
exec("rd /s /q \"bin/site\" && Xcopy /E /I \"./node_modules/dcrdex-assets/dexc/site\" \"bin/site\"");
else
throw new Error("Unsupported OS found: " + os.type());
|
/**
* Contaxy API
* Functionality to create and manage projects, services, jobs, and files.
*
* The version of the OpenAPI document: 0.0.6
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*
*/
import ApiClient from '../ApiClient';
import DeploymentCompute from './DeploymentCompute';
/**
* The JobInput model module.
* @module model/JobInput
* @version 0.0.6
*/
class JobInput {
/**
* Constructs a new <code>JobInput</code>.
* @alias module:model/JobInput
* @param containerImage {String} The container image used for this deployment.
*/
constructor(containerImage) {
JobInput.initialize(this, containerImage);
}
/**
* Initializes the fields of this object.
* This method is used by the constructors of any subclasses, in order to implement multiple inheritance (mix-ins).
* Only for internal use.
*/
static initialize(obj, containerImage) {
obj['container_image'] = containerImage;
}
/**
* Constructs a <code>JobInput</code> from a plain JavaScript object, optionally creating a new instance.
* Copies all relevant properties from <code>data</code> to <code>obj</code> if supplied or a new instance if not.
* @param {Object} data The plain JavaScript object bearing properties of interest.
* @param {module:model/JobInput} obj Optional instance to populate.
* @return {module:model/JobInput} The populated <code>JobInput</code> instance.
*/
static constructFromObject(data, obj) {
if (data) {
obj = obj || new JobInput();
if (data.hasOwnProperty('container_image')) {
obj['container_image'] = ApiClient.convertToType(
data['container_image'],
'String'
);
}
if (data.hasOwnProperty('parameters')) {
obj['parameters'] = ApiClient.convertToType(data['parameters'], {
String: 'String',
});
}
if (data.hasOwnProperty('compute')) {
obj['compute'] = ApiClient.convertToType(
data['compute'],
DeploymentCompute
);
}
if (data.hasOwnProperty('command')) {
obj['command'] = ApiClient.convertToType(data['command'], ['String']);
}
if (data.hasOwnProperty('args')) {
obj['args'] = ApiClient.convertToType(data['args'], ['String']);
}
if (data.hasOwnProperty('requirements')) {
obj['requirements'] = ApiClient.convertToType(data['requirements'], [
'String',
]);
}
if (data.hasOwnProperty('endpoints')) {
obj['endpoints'] = ApiClient.convertToType(data['endpoints'], [
'String',
]);
}
if (data.hasOwnProperty('display_name')) {
obj['display_name'] = ApiClient.convertToType(
data['display_name'],
'String'
);
}
if (data.hasOwnProperty('description')) {
obj['description'] = ApiClient.convertToType(
data['description'],
'String'
);
}
if (data.hasOwnProperty('icon')) {
obj['icon'] = ApiClient.convertToType(data['icon'], 'String');
}
if (data.hasOwnProperty('metadata')) {
obj['metadata'] = ApiClient.convertToType(data['metadata'], {
String: 'String',
});
}
if (data.hasOwnProperty('disabled')) {
obj['disabled'] = ApiClient.convertToType(data['disabled'], 'Boolean');
}
}
return obj;
}
}
/**
* The container image used for this deployment.
* @member {String} container_image
*/
JobInput.prototype['container_image'] = undefined;
/**
* Parmeters (enviornment variables) for this deployment.
* @member {Object.<String, String>} parameters
*/
JobInput.prototype['parameters'] = undefined;
/**
* Compute instructions and limitations for this deployment.
* @member {module:model/DeploymentCompute} compute
*/
JobInput.prototype['compute'] = undefined;
/**
* Command to run within the deployment. This overwrites the existing docker ENTRYPOINT.
* @member {Array.<String>} command
*/
JobInput.prototype['command'] = undefined;
/**
* Arguments to the command/entrypoint. This overwrites the existing docker CMD.
* @member {Array.<String>} args
*/
JobInput.prototype['args'] = undefined;
/**
* Additional requirements for deployment.
* @member {Array.<String>} requirements
*/
JobInput.prototype['requirements'] = undefined;
/**
* A list of HTTP endpoints that can be accessed. This should always have an internal port and can include additional instructions, such as the URL path.
* @member {Array.<String>} endpoints
*/
JobInput.prototype['endpoints'] = undefined;
/**
* A user-defined human-readable name of the resource. The name can be up to 128 characters long and can consist of any UTF-8 character.
* @member {String} display_name
*/
JobInput.prototype['display_name'] = undefined;
/**
* A user-defined short description about the resource. Can consist of any UTF-8 character.
* @member {String} description
*/
JobInput.prototype['description'] = undefined;
/**
* Identifier or image URL used for displaying this resource.
* @member {String} icon
*/
JobInput.prototype['icon'] = undefined;
/**
* A collection of arbitrary key-value pairs associated with this resource that does not need predefined structure. Enable third-party integrations to decorate objects with additional metadata for their own use.
* @member {Object.<String, String>} metadata
*/
JobInput.prototype['metadata'] = undefined;
/**
* Allows to disable a resource without requiring deletion. A disabled resource is not shown and not accessible.
* @member {Boolean} disabled
* @default false
*/
JobInput.prototype['disabled'] = false;
export default JobInput;
|
"""wargame.hut
This module contains the Hut class implementation.
This module is compatible with Python 3.5.x. It contains
supporting code for the book, Learning Python Application Development,
Packt Publishing.
.. todo::
The code comments and function descriptions in this file are
intentionally kept to a minimum! See Chapter 4 of the book to
learn about the code documentation and best practices!
:copyright: 2016, Ninad Sathaye
:license: The MIT License (MIT) . See LICENSE file for further details.
"""
from __future__ import print_function
from gameutils import print_bold
class Hut:
"""Class to create hut objects in the game Attack of the Orcs"""
def __init__(self, number, occupant):
self.occupant = occupant
self.number = number
self.is_acquired = False
def acquire(self, new_occupant):
"""Update the occupant of this hut"""
self.occupant = new_occupant
self.is_acquired = True
print_bold("GOOD JOB! Hut %d acquired" % self.number)
def get_occupant_type(self):
"""Return a string giving info on the hut occupant"""
if self.is_acquired:
occupant_type = 'ACQUIRED'
elif self.occupant is None:
occupant_type = 'unoccupied'
else:
occupant_type = self.occupant.unit_type
return occupant_type
|
// https://docs.meteor.com/api/mobile-config.html#App-icons
const ICON_RESOURCE_TABLE = {
app_store: '1024x1024',
iphone_2x: '120x120',
iphone_3x: '180x180',
ipad_2x: '152x152',
ipad_pro: '167x167',
ios_settings_2x: '58x58',
ios_settings_3x: '87x87',
ios_spotlight_2x: '80x80',
ios_spotlight_3x: '120x120',
ios_notification_2x: '40x40',
ios_notification_3x: '60x60',
ipad: '76x76',
ios_settings: '29x29',
ios_spotlight: '40x40',
ios_notification: '20x20',
iphone_legacy: '57x57',
iphone_legacy_2x: '114x114',
ipad_spotlight_legacy: '50x50',
ipad_spotlight_legacy_2x: '100x100',
ipad_app_legacy: '72x72',
ipad_app_legacy_2x: '144x144',
android_mdpi: '48x48',
android_hdpi: '72x72',
android_xhdpi: '96x96',
android_xxhdpi: '144x144',
android_xxxhdpi: '192x192'
}
// https://docs.meteor.com/api/mobile-config.html#App-launchScreens
const LAUNCH_SCREEN_RESOURCE_TABLE = {
iphone5: '640x1136',
iphone6: '750x1334',
iphone6p_portrait: '1242x2208',
iphone6p_landscape: '2208x1242',
iphoneX_portrait: '1125x2436',
iphoneX_landscape: '2436x1125',
ipad_portrait_2x: '1536x2048',
ipad_landscape_2x: '2048x1536',
iphone: '320x480',
iphone_2x: '640x960',
ipad_portrait: '768x1024',
ipad_landscape: '1024x768',
android_mdpi_portrait: '320x480',
android_mdpi_landscape: '480x320',
android_hdpi_portrait: '480x800',
android_hdpi_landscape: '800x480',
android_xhdpi_portrait: '720x1280',
android_xhdpi_landscape: '1280x720',
android_xxhdpi_portrait: '960x1600',
android_xxhdpi_landscape: '1600x960',
android_xxxhdpi_portrait: '1280x1920',
android_xxxhdpi_landscape: '1920x1280'
}
module.exports = {
ICON_RESOURCE_TABLE,
LAUNCH_SCREEN_RESOURCE_TABLE
}
|