max_stars_repo_path stringlengths 4 286 | max_stars_repo_name stringlengths 5 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.03M | content_cleaned stringlengths 6 1.03M | language stringclasses 111 values | language_score float64 0.03 1 | comments stringlengths 0 556k | edu_score float64 0.32 5.03 | edu_int_score int64 0 5 |
|---|---|---|---|---|---|---|---|---|---|---|
db/migrations/0085_remove_student_job_position.py | matchd-ch/matchd-backend | 1 | 6624451 | # Generated by Django 3.1.5 on 2021-04-06 13:27
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('db', '0084_remove_company_job_positions'),
]
operations = [
migrations.RemoveField(
model_name='student',
name='job_position',
),
]
| # Generated by Django 3.1.5 on 2021-04-06 13:27
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('db', '0084_remove_company_job_positions'),
]
operations = [
migrations.RemoveField(
model_name='student',
name='job_position',
),
]
| en | 0.830319 | # Generated by Django 3.1.5 on 2021-04-06 13:27 | 1.502858 | 2 |
zentral/contrib/monolith/forms.py | VegarM/zentral | 1 | 6624452 | import json
from django import forms
from django.db import IntegrityError, transaction
from django.db.models import F, Max, Q
from django.urls import reverse
from zentral.conf import settings
from zentral.contrib.inventory.models import MetaBusinessUnit, Tag
from zentral.utils.api_views import make_secret
from .attachments import MobileconfigFile, PackageFile
from .exceptions import AttachmentError
from .models import (CacheServer, Catalog, Configuration, Enrollment,
Manifest, ManifestCatalog, ManifestSubManifest,
Printer, PrinterPPD,
PkgInfoName, SubManifest,
SubManifestPkgInfo, SubManifestAttachment)
from .ppd import get_ppd_information
from .releases import DEPNotifyReleases, MunkiReleases
class PkgInfoSearchForm(forms.Form):
name = forms.CharField(label="Name", required=False,
widget=forms.TextInput(attrs={"placeholder": "name"}))
catalog = forms.ModelChoiceField(queryset=Catalog.objects.filter(archived_at__isnull=True),
required=False)
def is_initial(self):
return not {k: v for k, v in self.cleaned_data.items() if v}
class ManifestForm(forms.ModelForm):
class Meta:
model = Manifest
fields = ('meta_business_unit',)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
mbu_qs = MetaBusinessUnit.objects.available_for_api_enrollment()
if self.instance.pk:
mbu_qs = mbu_qs.filter(Q(manifest=None) | Q(pk=self.instance.meta_business_unit.id))
else:
mbu_qs = mbu_qs.filter(manifest=None)
self.fields['meta_business_unit'].queryset = mbu_qs
class ManifestSearchForm(forms.Form):
meta_business_unit_name = forms.CharField(label="Business unit name", required=False,
widget=forms.TextInput(attrs={"placeholder": "Business unit name…"}))
def get_queryset(self):
qs = Manifest.objects.select_related("meta_business_unit").all()
meta_business_unit_name = self.cleaned_data.get("meta_business_unit_name")
if meta_business_unit_name:
qs = qs.filter(meta_business_unit__name__icontains=meta_business_unit_name)
return qs
class SubManifestSearchForm(forms.Form):
keywords = forms.CharField(label="Keywords", required=False,
widget=forms.TextInput(attrs={"placeholder": "Keywords…"}))
def get_queryset(self):
qs = SubManifest.objects.select_related("meta_business_unit").all()
keywords = self.cleaned_data.get("keywords")
if keywords:
qs = qs.distinct().filter(Q(name__icontains=keywords)
| Q(description__icontains=keywords)
| Q(meta_business_unit__name__icontains=keywords)
| Q(submanifestpkginfo__pkg_info_name__name__icontains=keywords)
| Q(submanifestattachment__name__icontains=keywords))
return qs
class SubManifestForm(forms.ModelForm):
class Meta:
model = SubManifest
fields = ('meta_business_unit', 'name', 'description')
def clean_meta_business_unit(self):
mbu = self.cleaned_data.get("meta_business_unit")
if mbu and self.instance.pk:
linked_mbu = {manifest.meta_business_unit
for _, manifest in self.instance.manifests_with_tags()}
if linked_mbu - {mbu}:
raise forms.ValidationError(
"Cannot restrict this sub manifest to this business unit. "
"It is already included in some other business units."
)
return mbu
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['meta_business_unit'].queryset = MetaBusinessUnit.objects.available_for_api_enrollment()
class SubManifestPkgInfoForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.sub_manifest = kwargs.pop('sub_manifest')
super().__init__(*args, **kwargs)
pin_qs = PkgInfoName.objects.distinct().filter(pkginfo__id__isnull=False,
pkginfo__archived_at__isnull=True,
pkginfo__update_for=None).exclude(
submanifestpkginfo__sub_manifest=self.sub_manifest)
self.fields['pkg_info_name'].queryset = pin_qs
class Meta:
model = SubManifestPkgInfo
fields = ('key', 'condition', 'featured_item', 'pkg_info_name')
class SubManifestAttachmentForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.sub_manifest = kwargs.pop('sub_manifest')
super().__init__(*args, **kwargs)
class Meta:
model = SubManifestAttachment
fields = ('key', 'condition', 'featured_item', 'file',)
def clean_file(self):
f = self.cleaned_data["file"]
if not f:
raise forms.ValidationError("You need to select a file.")
error_messages = []
for file_class in (MobileconfigFile, PackageFile):
try:
af = file_class(f)
except AttachmentError as e:
error_messages.append(e.message)
else:
break
else:
raise forms.ValidationError(", ".join(error_messages))
self.attachment_file = af
return f
def save(self, *args, **kwargs):
sma = super().save(commit=False)
sma.sub_manifest = self.sub_manifest
sma.type = self.attachment_file.type
sma.name = self.attachment_file.name
sma.identifier = self.attachment_file.identifier
for i in range(10): # 10 trials max
max_version = SubManifestAttachment.objects.filter(
sub_manifest=self.sub_manifest,
name=sma.name
).aggregate(Max("version"))["version__max"]
sma.version = (max_version or 0) + 1
sma.pkg_info = self.attachment_file.make_package_info(sma)
try:
with transaction.atomic():
sma.save()
except IntegrityError:
raise
else:
break
else:
raise Exception("Could not find valid version #")
# trash other versions
for sma_with_different_version in (SubManifestAttachment.objects.filter(
sub_manifest=self.sub_manifest,
name=sma.name
).exclude(version=sma.version)):
sma_with_different_version.mark_as_trashed()
return sma
class SubManifestScriptForm(forms.Form):
DEFAULT_INSTALL_CHECK_SCRIPT = (
"#!/bin/bash\n\n"
"# WARNING: executed at every Munki run!\n\n"
"exit 0"
)
name = forms.CharField(max_length=256, required=True)
key = forms.ChoiceField(choices=(("managed_installs", "Managed Installs"),
("managed_uninstalls", "Managed Uninstalls")),
required=True)
description = forms.CharField(required=True, widget=forms.Textarea())
installcheck_script = forms.CharField(
label="install check script",
help_text="This script is executed to determine if an item needs to be installed. "
"A return code of 0 means install is needed.",
required=True,
initial=DEFAULT_INSTALL_CHECK_SCRIPT,
widget=forms.Textarea(),
)
postinstall_script = forms.CharField(
label="post install script",
help_text="The main script.",
required=True,
widget=forms.Textarea(),
)
uninstall_script = forms.CharField(
label="uninstall script",
help_text="Script that performs an uninstall.",
required=False,
widget=forms.Textarea(),
)
def __init__(self, *args, **kwargs):
self.sub_manifest = kwargs.pop('sub_manifest')
self.script = kwargs.pop('script', None)
super().__init__(*args, **kwargs)
def clean(self):
super().clean()
key = self.cleaned_data["key"]
if key == "managed_uninstalls" and not self.cleaned_data["uninstall_script"]:
self.add_error("uninstall_script", "Can't be empty if managed uninstalls")
return self.cleaned_data
def save(self, *args, **kwargs):
name = self.cleaned_data["name"]
key = self.cleaned_data["key"]
pkg_info = {
'display_name': name,
'description': self.cleaned_data["description"],
'autoremove': False,
'unattended_install': True,
'installer_type': 'nopkg',
'uninstallable': True,
'unattended_uninstall': True,
'minimum_munki_version': '2.2',
'minimum_os_version': '10.6.0', # TODO: HARDCODED !!!
'installcheck_script': self.cleaned_data["installcheck_script"],
'postinstall_script': self.cleaned_data["postinstall_script"],
}
uninstall_script = self.cleaned_data["uninstall_script"]
if uninstall_script:
pkg_info["uninstall_method"] = "uninstall_script"
pkg_info["uninstall_script"] = uninstall_script
if not self.script:
self.script = SubManifestAttachment(
sub_manifest=self.sub_manifest,
type="script",
key=key,
name=name,
pkg_info=pkg_info,
version=1,
)
self.script.save()
else:
self.script.name = name
self.script.key = key
self.script.version = F("version") + 1
self.script.pkg_info = pkg_info
self.script.save()
self.script.refresh_from_db()
self.script.pkg_info["version"] = "{}.0".format(self.script.version)
self.script.save()
return self.script
class AddManifestCatalogForm(forms.Form):
catalog = forms.ModelChoiceField(queryset=Catalog.objects.filter(archived_at__isnull=True))
tags = forms.ModelMultipleChoiceField(queryset=Tag.objects.none(), required=False)
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['catalog']
field.queryset = field.queryset.exclude(id__in=[c.id for c in self.manifest.catalogs()])
field = self.fields['tags']
field.queryset = Tag.objects.available_for_meta_business_unit(self.manifest.meta_business_unit)
def save(self):
mc = ManifestCatalog(manifest=self.manifest,
catalog=self.cleaned_data['catalog'])
mc.save()
mc.tags = self.cleaned_data['tags']
self.manifest.save() # updated_at
return mc
class DeleteManifestCatalogForm(forms.Form):
catalog = forms.ModelChoiceField(queryset=Catalog.objects.all(),
widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['catalog']
field.queryset = field.queryset.filter(id__in=[mc.catalog_id
for mc in self.manifest.manifestcatalog_set.all()])
def save(self):
number_deleted, _ = ManifestCatalog.objects.filter(manifest=self.manifest,
catalog=self.cleaned_data['catalog']).delete()
if number_deleted:
self.manifest.save() # updated_at
class AddManifestEnrollmentPackageForm(forms.Form):
tags = forms.ModelMultipleChoiceField(queryset=Tag.objects.none(), required=False)
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['tags']
field.queryset = Tag.objects.available_for_meta_business_unit(self.manifest.meta_business_unit)
class ManifestPrinterForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['tags']
field.queryset = Tag.objects.available_for_meta_business_unit(self.manifest.meta_business_unit)
class Meta:
model = Printer
fields = ["tags",
"name", "location",
"scheme", "address",
"shared", "error_policy", "ppd",
"required_package"]
class AddManifestSubManifestForm(forms.Form):
sub_manifest = forms.ModelChoiceField(queryset=SubManifest.objects.all())
tags = forms.ModelMultipleChoiceField(queryset=Tag.objects.none(), required=False)
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['sub_manifest']
field.queryset = (field.queryset.filter(Q(meta_business_unit__isnull=True)
| Q(meta_business_unit=self.manifest.meta_business_unit))
.exclude(id__in=[sm.id for sm in self.manifest.sub_manifests()]))
field = self.fields['tags']
field.queryset = Tag.objects.available_for_meta_business_unit(self.manifest.meta_business_unit)
def save(self):
msn = ManifestSubManifest(manifest=self.manifest,
sub_manifest=self.cleaned_data['sub_manifest'])
msn.save()
msn.tags = self.cleaned_data['tags']
self.manifest.save() # updated_at
return msn
class DeleteManifestSubManifestForm(forms.Form):
sub_manifest = forms.ModelChoiceField(queryset=SubManifest.objects.all(),
widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['sub_manifest']
field.queryset = field.queryset.filter(id__in=[msm.sub_manifest_id
for msm in self.manifest.manifestsubmanifest_set.all()])
def save(self):
number_deleted, _ = ManifestSubManifest.objects.filter(manifest=self.manifest,
sub_manifest=self.cleaned_data['sub_manifest']).delete()
if number_deleted:
self.manifest.save() # updated_at
class CacheServerBaseForm(forms.Form):
name = forms.CharField(max_length=256)
base_url = forms.URLField(label="base URL")
class CacheServersPostForm(CacheServerBaseForm):
def save(self, manifest, public_ip_address):
cd = self.cleaned_data
cache_server, _ = CacheServer.objects.update_or_create(
name=cd["name"],
manifest=manifest,
defaults={"public_ip_address": public_ip_address,
"base_url": cd["base_url"]}
)
return cache_server
class ConfigureCacheServerForm(CacheServerBaseForm):
def build_curl_command(self, manifest):
business_unit = manifest.meta_business_unit.api_enrollment_business_units()[0]
api_secret = make_secret('zentral.contrib.monolith', business_unit)
json_payload = json.dumps(self.cleaned_data)
tls_hostname = settings["api"]["tls_hostname"]
path = reverse("monolith:cache_servers")
# TODO: what if there is a ' in the json payload ?
return ("curl -XPOST "
"-H 'Zentral-API-Secret: {api_secret}' "
"-d '{json_payload}' "
"{tls_hostname}{path}").format(api_secret=api_secret,
json_payload=json_payload,
tls_hostname=tls_hostname,
path=path)
class UploadPPDForm(forms.ModelForm):
class Meta:
model = PrinterPPD
fields = ['file']
def clean_file(self):
f = self.cleaned_data["file"]
try:
self.cleaned_data["ppd_info"] = get_ppd_information(f)
except Exception:
raise forms.ValidationError("Could not parse PPD file %s." % f.name)
return f
def save(self, *args, **kwargs):
ppd = PrinterPPD.objects.create(**self.cleaned_data["ppd_info"])
uploaded_file = self.cleaned_data["file"]
ppd.file.save(uploaded_file.name, uploaded_file)
return ppd
class ConfigurationForm(forms.ModelForm):
depnotify_release = forms.ChoiceField(
label="DEPNotify release",
choices=[],
initial="",
help_text="Choose a DEPNotify release to be installed",
required=False
)
class Meta:
model = Configuration
fields = "__all__"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# DEPNotifiy release
choices = [("", "---")]
depnotify_releases = DEPNotifyReleases()
for filename, version, created_at, download_url, is_local in depnotify_releases.get_versions():
choices.append((filename, filename))
self.fields["depnotify_release"].choices = choices
def clean(self):
super().clean()
if self.cleaned_data.get("depnotify_commands") and not self.cleaned_data.get("depnotify_release"):
self.add_error("depnotify_release",
"You need to pick a DEPNotify release to use the commands.")
if self.cleaned_data.get("eula") and not self.cleaned_data.get("depnotify_release"):
self.add_error("depnotify_release",
"You need to pick a DEPNotify release to display the EULA.")
def clean_depnotify_commands(self):
depnotify_commands = self.cleaned_data.get("depnotify_commands")
if depnotify_commands:
depnotify_commands = depnotify_commands.strip().replace("\r\n", "\n")
return depnotify_commands
def clean_setup_script(self):
setup_script = self.cleaned_data.get("setup_script")
if setup_script:
setup_script = setup_script.strip().replace("\r\n", "\n")
return setup_script
def clean_eula(self):
eula = self.cleaned_data.get("eula")
if eula:
eula = eula.strip().replace("\r\n", "\n")
return eula
class EnrollmentForm(forms.ModelForm):
munki_release = forms.ChoiceField(
label="Munki release",
choices=[],
initial="",
help_text="Choose a munki release to be installed with the enrollment package.",
required=False
)
class Meta:
model = Enrollment
fields = "__all__"
def __init__(self, *args, **kwargs):
self.meta_business_unit = kwargs.pop("meta_business_unit", None)
self.standalone = kwargs.pop("standalone", False)
super().__init__(*args, **kwargs)
# hide manifest dropdown if manifest/mbu is fixed
# the value will be set in the clean_manifest method
# TODO: kind of a hack
if self.meta_business_unit:
self.fields["manifest"].widget = forms.HiddenInput()
self.fields["manifest"].required = False
self.fields["taxonomies"].queryset = self.fields["taxonomies"].queryset.filter(
meta_business_unit=self.meta_business_unit
)
# munki release
choices = []
if not self.standalone:
choices.append(("", "Do not include munki"))
munki_releases = MunkiReleases()
for filename, version, created_at, download_url, is_local in munki_releases.get_versions():
choices.append((filename, filename))
self.fields["munki_release"].choices = choices
def clean_manifest(self):
if self.meta_business_unit:
return self.meta_business_unit.manifest
else:
return self.cleaned_data.get("manifest")
| import json
from django import forms
from django.db import IntegrityError, transaction
from django.db.models import F, Max, Q
from django.urls import reverse
from zentral.conf import settings
from zentral.contrib.inventory.models import MetaBusinessUnit, Tag
from zentral.utils.api_views import make_secret
from .attachments import MobileconfigFile, PackageFile
from .exceptions import AttachmentError
from .models import (CacheServer, Catalog, Configuration, Enrollment,
Manifest, ManifestCatalog, ManifestSubManifest,
Printer, PrinterPPD,
PkgInfoName, SubManifest,
SubManifestPkgInfo, SubManifestAttachment)
from .ppd import get_ppd_information
from .releases import DEPNotifyReleases, MunkiReleases
class PkgInfoSearchForm(forms.Form):
name = forms.CharField(label="Name", required=False,
widget=forms.TextInput(attrs={"placeholder": "name"}))
catalog = forms.ModelChoiceField(queryset=Catalog.objects.filter(archived_at__isnull=True),
required=False)
def is_initial(self):
return not {k: v for k, v in self.cleaned_data.items() if v}
class ManifestForm(forms.ModelForm):
class Meta:
model = Manifest
fields = ('meta_business_unit',)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
mbu_qs = MetaBusinessUnit.objects.available_for_api_enrollment()
if self.instance.pk:
mbu_qs = mbu_qs.filter(Q(manifest=None) | Q(pk=self.instance.meta_business_unit.id))
else:
mbu_qs = mbu_qs.filter(manifest=None)
self.fields['meta_business_unit'].queryset = mbu_qs
class ManifestSearchForm(forms.Form):
meta_business_unit_name = forms.CharField(label="Business unit name", required=False,
widget=forms.TextInput(attrs={"placeholder": "Business unit name…"}))
def get_queryset(self):
qs = Manifest.objects.select_related("meta_business_unit").all()
meta_business_unit_name = self.cleaned_data.get("meta_business_unit_name")
if meta_business_unit_name:
qs = qs.filter(meta_business_unit__name__icontains=meta_business_unit_name)
return qs
class SubManifestSearchForm(forms.Form):
keywords = forms.CharField(label="Keywords", required=False,
widget=forms.TextInput(attrs={"placeholder": "Keywords…"}))
def get_queryset(self):
qs = SubManifest.objects.select_related("meta_business_unit").all()
keywords = self.cleaned_data.get("keywords")
if keywords:
qs = qs.distinct().filter(Q(name__icontains=keywords)
| Q(description__icontains=keywords)
| Q(meta_business_unit__name__icontains=keywords)
| Q(submanifestpkginfo__pkg_info_name__name__icontains=keywords)
| Q(submanifestattachment__name__icontains=keywords))
return qs
class SubManifestForm(forms.ModelForm):
class Meta:
model = SubManifest
fields = ('meta_business_unit', 'name', 'description')
def clean_meta_business_unit(self):
mbu = self.cleaned_data.get("meta_business_unit")
if mbu and self.instance.pk:
linked_mbu = {manifest.meta_business_unit
for _, manifest in self.instance.manifests_with_tags()}
if linked_mbu - {mbu}:
raise forms.ValidationError(
"Cannot restrict this sub manifest to this business unit. "
"It is already included in some other business units."
)
return mbu
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['meta_business_unit'].queryset = MetaBusinessUnit.objects.available_for_api_enrollment()
class SubManifestPkgInfoForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.sub_manifest = kwargs.pop('sub_manifest')
super().__init__(*args, **kwargs)
pin_qs = PkgInfoName.objects.distinct().filter(pkginfo__id__isnull=False,
pkginfo__archived_at__isnull=True,
pkginfo__update_for=None).exclude(
submanifestpkginfo__sub_manifest=self.sub_manifest)
self.fields['pkg_info_name'].queryset = pin_qs
class Meta:
model = SubManifestPkgInfo
fields = ('key', 'condition', 'featured_item', 'pkg_info_name')
class SubManifestAttachmentForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.sub_manifest = kwargs.pop('sub_manifest')
super().__init__(*args, **kwargs)
class Meta:
model = SubManifestAttachment
fields = ('key', 'condition', 'featured_item', 'file',)
def clean_file(self):
f = self.cleaned_data["file"]
if not f:
raise forms.ValidationError("You need to select a file.")
error_messages = []
for file_class in (MobileconfigFile, PackageFile):
try:
af = file_class(f)
except AttachmentError as e:
error_messages.append(e.message)
else:
break
else:
raise forms.ValidationError(", ".join(error_messages))
self.attachment_file = af
return f
def save(self, *args, **kwargs):
sma = super().save(commit=False)
sma.sub_manifest = self.sub_manifest
sma.type = self.attachment_file.type
sma.name = self.attachment_file.name
sma.identifier = self.attachment_file.identifier
for i in range(10): # 10 trials max
max_version = SubManifestAttachment.objects.filter(
sub_manifest=self.sub_manifest,
name=sma.name
).aggregate(Max("version"))["version__max"]
sma.version = (max_version or 0) + 1
sma.pkg_info = self.attachment_file.make_package_info(sma)
try:
with transaction.atomic():
sma.save()
except IntegrityError:
raise
else:
break
else:
raise Exception("Could not find valid version #")
# trash other versions
for sma_with_different_version in (SubManifestAttachment.objects.filter(
sub_manifest=self.sub_manifest,
name=sma.name
).exclude(version=sma.version)):
sma_with_different_version.mark_as_trashed()
return sma
class SubManifestScriptForm(forms.Form):
DEFAULT_INSTALL_CHECK_SCRIPT = (
"#!/bin/bash\n\n"
"# WARNING: executed at every Munki run!\n\n"
"exit 0"
)
name = forms.CharField(max_length=256, required=True)
key = forms.ChoiceField(choices=(("managed_installs", "Managed Installs"),
("managed_uninstalls", "Managed Uninstalls")),
required=True)
description = forms.CharField(required=True, widget=forms.Textarea())
installcheck_script = forms.CharField(
label="install check script",
help_text="This script is executed to determine if an item needs to be installed. "
"A return code of 0 means install is needed.",
required=True,
initial=DEFAULT_INSTALL_CHECK_SCRIPT,
widget=forms.Textarea(),
)
postinstall_script = forms.CharField(
label="post install script",
help_text="The main script.",
required=True,
widget=forms.Textarea(),
)
uninstall_script = forms.CharField(
label="uninstall script",
help_text="Script that performs an uninstall.",
required=False,
widget=forms.Textarea(),
)
def __init__(self, *args, **kwargs):
self.sub_manifest = kwargs.pop('sub_manifest')
self.script = kwargs.pop('script', None)
super().__init__(*args, **kwargs)
def clean(self):
super().clean()
key = self.cleaned_data["key"]
if key == "managed_uninstalls" and not self.cleaned_data["uninstall_script"]:
self.add_error("uninstall_script", "Can't be empty if managed uninstalls")
return self.cleaned_data
def save(self, *args, **kwargs):
name = self.cleaned_data["name"]
key = self.cleaned_data["key"]
pkg_info = {
'display_name': name,
'description': self.cleaned_data["description"],
'autoremove': False,
'unattended_install': True,
'installer_type': 'nopkg',
'uninstallable': True,
'unattended_uninstall': True,
'minimum_munki_version': '2.2',
'minimum_os_version': '10.6.0', # TODO: HARDCODED !!!
'installcheck_script': self.cleaned_data["installcheck_script"],
'postinstall_script': self.cleaned_data["postinstall_script"],
}
uninstall_script = self.cleaned_data["uninstall_script"]
if uninstall_script:
pkg_info["uninstall_method"] = "uninstall_script"
pkg_info["uninstall_script"] = uninstall_script
if not self.script:
self.script = SubManifestAttachment(
sub_manifest=self.sub_manifest,
type="script",
key=key,
name=name,
pkg_info=pkg_info,
version=1,
)
self.script.save()
else:
self.script.name = name
self.script.key = key
self.script.version = F("version") + 1
self.script.pkg_info = pkg_info
self.script.save()
self.script.refresh_from_db()
self.script.pkg_info["version"] = "{}.0".format(self.script.version)
self.script.save()
return self.script
class AddManifestCatalogForm(forms.Form):
catalog = forms.ModelChoiceField(queryset=Catalog.objects.filter(archived_at__isnull=True))
tags = forms.ModelMultipleChoiceField(queryset=Tag.objects.none(), required=False)
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['catalog']
field.queryset = field.queryset.exclude(id__in=[c.id for c in self.manifest.catalogs()])
field = self.fields['tags']
field.queryset = Tag.objects.available_for_meta_business_unit(self.manifest.meta_business_unit)
def save(self):
mc = ManifestCatalog(manifest=self.manifest,
catalog=self.cleaned_data['catalog'])
mc.save()
mc.tags = self.cleaned_data['tags']
self.manifest.save() # updated_at
return mc
class DeleteManifestCatalogForm(forms.Form):
catalog = forms.ModelChoiceField(queryset=Catalog.objects.all(),
widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['catalog']
field.queryset = field.queryset.filter(id__in=[mc.catalog_id
for mc in self.manifest.manifestcatalog_set.all()])
def save(self):
number_deleted, _ = ManifestCatalog.objects.filter(manifest=self.manifest,
catalog=self.cleaned_data['catalog']).delete()
if number_deleted:
self.manifest.save() # updated_at
class AddManifestEnrollmentPackageForm(forms.Form):
tags = forms.ModelMultipleChoiceField(queryset=Tag.objects.none(), required=False)
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['tags']
field.queryset = Tag.objects.available_for_meta_business_unit(self.manifest.meta_business_unit)
class ManifestPrinterForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['tags']
field.queryset = Tag.objects.available_for_meta_business_unit(self.manifest.meta_business_unit)
class Meta:
model = Printer
fields = ["tags",
"name", "location",
"scheme", "address",
"shared", "error_policy", "ppd",
"required_package"]
class AddManifestSubManifestForm(forms.Form):
sub_manifest = forms.ModelChoiceField(queryset=SubManifest.objects.all())
tags = forms.ModelMultipleChoiceField(queryset=Tag.objects.none(), required=False)
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['sub_manifest']
field.queryset = (field.queryset.filter(Q(meta_business_unit__isnull=True)
| Q(meta_business_unit=self.manifest.meta_business_unit))
.exclude(id__in=[sm.id for sm in self.manifest.sub_manifests()]))
field = self.fields['tags']
field.queryset = Tag.objects.available_for_meta_business_unit(self.manifest.meta_business_unit)
def save(self):
msn = ManifestSubManifest(manifest=self.manifest,
sub_manifest=self.cleaned_data['sub_manifest'])
msn.save()
msn.tags = self.cleaned_data['tags']
self.manifest.save() # updated_at
return msn
class DeleteManifestSubManifestForm(forms.Form):
sub_manifest = forms.ModelChoiceField(queryset=SubManifest.objects.all(),
widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.manifest = kwargs.pop('manifest')
super().__init__(*args, **kwargs)
field = self.fields['sub_manifest']
field.queryset = field.queryset.filter(id__in=[msm.sub_manifest_id
for msm in self.manifest.manifestsubmanifest_set.all()])
def save(self):
number_deleted, _ = ManifestSubManifest.objects.filter(manifest=self.manifest,
sub_manifest=self.cleaned_data['sub_manifest']).delete()
if number_deleted:
self.manifest.save() # updated_at
class CacheServerBaseForm(forms.Form):
name = forms.CharField(max_length=256)
base_url = forms.URLField(label="base URL")
class CacheServersPostForm(CacheServerBaseForm):
def save(self, manifest, public_ip_address):
cd = self.cleaned_data
cache_server, _ = CacheServer.objects.update_or_create(
name=cd["name"],
manifest=manifest,
defaults={"public_ip_address": public_ip_address,
"base_url": cd["base_url"]}
)
return cache_server
class ConfigureCacheServerForm(CacheServerBaseForm):
def build_curl_command(self, manifest):
business_unit = manifest.meta_business_unit.api_enrollment_business_units()[0]
api_secret = make_secret('zentral.contrib.monolith', business_unit)
json_payload = json.dumps(self.cleaned_data)
tls_hostname = settings["api"]["tls_hostname"]
path = reverse("monolith:cache_servers")
# TODO: what if there is a ' in the json payload ?
return ("curl -XPOST "
"-H 'Zentral-API-Secret: {api_secret}' "
"-d '{json_payload}' "
"{tls_hostname}{path}").format(api_secret=api_secret,
json_payload=json_payload,
tls_hostname=tls_hostname,
path=path)
class UploadPPDForm(forms.ModelForm):
class Meta:
model = PrinterPPD
fields = ['file']
def clean_file(self):
f = self.cleaned_data["file"]
try:
self.cleaned_data["ppd_info"] = get_ppd_information(f)
except Exception:
raise forms.ValidationError("Could not parse PPD file %s." % f.name)
return f
def save(self, *args, **kwargs):
ppd = PrinterPPD.objects.create(**self.cleaned_data["ppd_info"])
uploaded_file = self.cleaned_data["file"]
ppd.file.save(uploaded_file.name, uploaded_file)
return ppd
class ConfigurationForm(forms.ModelForm):
depnotify_release = forms.ChoiceField(
label="DEPNotify release",
choices=[],
initial="",
help_text="Choose a DEPNotify release to be installed",
required=False
)
class Meta:
model = Configuration
fields = "__all__"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# DEPNotifiy release
choices = [("", "---")]
depnotify_releases = DEPNotifyReleases()
for filename, version, created_at, download_url, is_local in depnotify_releases.get_versions():
choices.append((filename, filename))
self.fields["depnotify_release"].choices = choices
def clean(self):
super().clean()
if self.cleaned_data.get("depnotify_commands") and not self.cleaned_data.get("depnotify_release"):
self.add_error("depnotify_release",
"You need to pick a DEPNotify release to use the commands.")
if self.cleaned_data.get("eula") and not self.cleaned_data.get("depnotify_release"):
self.add_error("depnotify_release",
"You need to pick a DEPNotify release to display the EULA.")
def clean_depnotify_commands(self):
depnotify_commands = self.cleaned_data.get("depnotify_commands")
if depnotify_commands:
depnotify_commands = depnotify_commands.strip().replace("\r\n", "\n")
return depnotify_commands
def clean_setup_script(self):
setup_script = self.cleaned_data.get("setup_script")
if setup_script:
setup_script = setup_script.strip().replace("\r\n", "\n")
return setup_script
def clean_eula(self):
eula = self.cleaned_data.get("eula")
if eula:
eula = eula.strip().replace("\r\n", "\n")
return eula
class EnrollmentForm(forms.ModelForm):
munki_release = forms.ChoiceField(
label="Munki release",
choices=[],
initial="",
help_text="Choose a munki release to be installed with the enrollment package.",
required=False
)
class Meta:
model = Enrollment
fields = "__all__"
def __init__(self, *args, **kwargs):
self.meta_business_unit = kwargs.pop("meta_business_unit", None)
self.standalone = kwargs.pop("standalone", False)
super().__init__(*args, **kwargs)
# hide manifest dropdown if manifest/mbu is fixed
# the value will be set in the clean_manifest method
# TODO: kind of a hack
if self.meta_business_unit:
self.fields["manifest"].widget = forms.HiddenInput()
self.fields["manifest"].required = False
self.fields["taxonomies"].queryset = self.fields["taxonomies"].queryset.filter(
meta_business_unit=self.meta_business_unit
)
# munki release
choices = []
if not self.standalone:
choices.append(("", "Do not include munki"))
munki_releases = MunkiReleases()
for filename, version, created_at, download_url, is_local in munki_releases.get_versions():
choices.append((filename, filename))
self.fields["munki_release"].choices = choices
def clean_manifest(self):
if self.meta_business_unit:
return self.meta_business_unit.manifest
else:
return self.cleaned_data.get("manifest")
| en | 0.537616 | # 10 trials max #") # trash other versions # TODO: HARDCODED !!! # updated_at # updated_at # updated_at # updated_at # TODO: what if there is a ' in the json payload ? # DEPNotifiy release # hide manifest dropdown if manifest/mbu is fixed # the value will be set in the clean_manifest method # TODO: kind of a hack # munki release | 1.896889 | 2 |
MedTARSQI/src/main/resources/ttk/docmodel/metadata_parser.py | CDCgov/DCPC | 6 | 6624453 | """Metadata Parsers.
This module contains metadata parsers, that is, parsers that pull out the
metadata and add it to a TarsqiDocument. The only requirements on each parser is
that it defines an __init__() method that takes a dictionary of options and a
parse() method that takes a TarsqiDocument instance.
Current parsers only deal with the DCT.
"""
import re, time, os, sqlite3
from docmodel.document import TarsqiDocument
import utilities.logger as logger
from library.main import LIBRARY
class MetadataParser:
"""This is the minimal metadata parser that is used as a default. It selects
the DCT from all available sources and picks one of them, or it uses today's
date if no DCT's are available. Subclasses should override the get_dct()
method to define specific DCT extraction methods for the document source."""
def __init__(self, options):
"""At the moment, initialization only uses the --dct option if it is
present, but this could change. Note that the TarsqiDocument does not
exist yet when the MetadataParser is initialized."""
self.options = options
self.tarsqidoc = None # added in by the parse() method
def parse(self, tarsqidoc):
"""Adds metadata to the TarsqiDocument. The only thing it adds to the
metadata dictionary is the DCT, which is set to today."""
self.tarsqidoc = tarsqidoc
self.tarsqidoc.metadata['dct'] = self.get_dct()
self._moderate_dct_vals()
def _moderate_dct_vals(self):
"""There are five places where a DCT can be expressed: the DCT handed in
with the --dct option or defined in the config file, the DCT from the
metadata on the TarsqiDocument, the DCT from the metadata on the
SourceDoc, DCTs from the TagRepository on the TarsqiDocument and DCTs
from the TagRepository on the SourceDoc. The first three are single
values or None, the other two are lists of any length. The order of
these five is significant in that a DCT earlier on the list if given
precedence over a DCT later on the list. Collects all the DCT values and
picks the very first one, or today's date if no DCTs are available. Logs
a warning if the DCTs do not all have the same value."""
dcts = []
for dct_val in [self.tarsqidoc.options.dct,
self.tarsqidoc.metadata.get('dct'),
self.tarsqidoc.sourcedoc.metadata.get('dct'),
_get_dct_values(self.tarsqidoc.sourcedoc.tags),
_get_dct_values(self.tarsqidoc.tags)]:
if dct_val is None:
# the case where there is no DCT in the options or metadata
continue
elif isinstance(dct_val, list):
dcts.extend(dct_val)
else:
dcts.append(dct_val)
if len(set(dcts)) > 1:
logger.warn("WARNING: more than one DCT value available")
dct = dcts[0] if dcts else _get_today()
self.tarsqidoc.metadata['dct'] = dct
def get_dct(self):
return None
def _get_source(self):
"""A convenience method to lift the SourceDoc out of the tarsqi
instance."""
return self.tarsqidoc.sourcedoc
def _get_tag_content(self, tagname):
"""Return the text content of the first tag with name tagname, return
None if there is no such tag."""
try:
tag = self._get_source().tags.find_tags(tagname)[0]
content = self._get_source().text[tag.begin:tag.end].strip()
return content
except IndexError:
logger.warn("Cannot get the %s tag in this document" % tagname)
return None
class MetadataParserTTK(MetadataParser):
"""The metadata parser for the ttk format. For now this one adds
nothing to the default metadata parser."""
class MetadataParserText(MetadataParser):
"""The metadata parser for the text format. For now this one adds
nothing to the default metadata parser."""
class MetadataParserTimebank(MetadataParser):
"""The parser for Timebank documents. All it does is to overwrite the
get_dct() method."""
def get_dct(self):
"""Extracts the document creation time, and returns it as a string of
the form YYYYMMDD. Depending on the source, the DCT can be found in one
of the following tags: DOCNO, DATE_TIME, PUBDATE or FILEID."""
result = self._get_doc_source()
if result is None:
# dct defaults to today if we cannot find the DOCNO tag in the
# document
return _get_today()
source_identifier, content = result
if source_identifier in ('ABC', 'CNN', 'PRI', 'VOA'):
return content[3:11]
elif source_identifier == 'AP':
dct = self._parse_tag_content("(?:AP-NR-)?(\d+)-(\d+)-(\d+)",
'FILEID')
# the DCT format is YYYYMMDD or YYMMDD
return dct if len(dct) == 8 else '19' + dct
elif source_identifier in ('APW', 'NYT'):
return self._parse_tag_content("(\d+)/(\d+)/(\d+)", 'DATE_TIME')
elif source_identifier == 'SJMN':
pubdate_content = self._get_tag_content('PUBDATE')
return '19' + pubdate_content
elif source_identifier == 'WSJ':
return '19' + content[3:9]
elif source_identifier in ('ea', 'ed'):
return '19' + content[2:8]
def _get_doc_source(self):
"""Return the name of the content provider as well as the content of the
DOCNO tag that has that information."""
content = self._get_tag_content('DOCNO')
content = str(content) # in case the above returned None
for source_identifier in ('ABC', 'APW', 'AP', 'CNN', 'NYT', 'PRI',
'SJMN', 'VOA', 'WSJ', 'ea', 'ed'):
if content.startswith(source_identifier):
return (source_identifier, content)
logger.warn("Could not determine document source from DOCNO tag")
return None
def _parse_tag_content(self, regexpr, tagname):
"""Return the DCT part of the tag content of tagname, requires a reqular
expression as one of the arguments."""
content_string = self._get_tag_content(tagname)
result = re.compile(regexpr).match(content_string)
if result:
(month, day, year) = result.groups()
return "%s%s%s" % (year, month, day)
else:
logger.warn("Could not get date from %s tag" % tagname)
return _get_today()
class MetadataParserATEE(MetadataParser):
"""The parser for ATEE document."""
def get_dct(self):
"""All ATEE documents have a DATE tag with a value attribute, the value
of that attribute is returned."""
date_tag = self.tarsqidoc.sourcedoc.tags.find_tag('DATE')
return date_tag.attrs['value']
class MetadataParserRTE3(MetadataParser):
"""The parser for RTE3 documents, no differences with the default parser."""
class MetadataParserDB(MetadataParser):
"""A minimal example parser for cases where the DCT is retrieved from a
database. It is identical to MetadataParser except for how it gets the
DCT. This is done by lookup in a database. This here is the simplest
possible case, and it is quite inefficient. It assumes there is an sqlite
database at 'TTK_ROOT/data/in/va/dct.sqlite' which was created as
follows:
$ sqlite3 dct.sqlite
sqlite> create table dct (filename TEXT, dct TEXT)
sqlite> insert into dct values ("test.xml", "1999-12-31");
The get_dct() method uses this database and the location of the database is
specified in the config.txt file. The first use case for this were VA
documents where the DCT was stored externally. To see this in action run
$ python tarsqi.py --source=db data/in/va/test.xml out.xml
"""
def get_dct(self):
fname = self._get_source().filename
fname = os.path.basename(fname)
db_location = self.options.getopt('dct-database')
db_connection = sqlite3.connect(db_location)
db_cursor = db_connection.cursor()
db_cursor.execute('SELECT dct FROM dct WHERE filename=?', (fname,))
dct = db_cursor.fetchone()[0]
return dct
def _get_today():
"""Return today's date in YYYYMMDD format."""
return time.strftime("%Y%m%d", time.localtime())
def _get_dct_values(tag_repository):
"""Return the list of nromalized values from all TIMEX3 tags in the
TagRepository."""
timexes = [t for t in tag_repository.find_tags('TIMEX3')
if t.attrs.get('functionInDocument') == 'CREATION_TIME']
values = [t.attrs.get(LIBRARY.timeml.VALUE) for t in timexes]
return values
| """Metadata Parsers.
This module contains metadata parsers, that is, parsers that pull out the
metadata and add it to a TarsqiDocument. The only requirements on each parser is
that it defines an __init__() method that takes a dictionary of options and a
parse() method that takes a TarsqiDocument instance.
Current parsers only deal with the DCT.
"""
import re, time, os, sqlite3
from docmodel.document import TarsqiDocument
import utilities.logger as logger
from library.main import LIBRARY
class MetadataParser:
"""This is the minimal metadata parser that is used as a default. It selects
the DCT from all available sources and picks one of them, or it uses today's
date if no DCT's are available. Subclasses should override the get_dct()
method to define specific DCT extraction methods for the document source."""
def __init__(self, options):
"""At the moment, initialization only uses the --dct option if it is
present, but this could change. Note that the TarsqiDocument does not
exist yet when the MetadataParser is initialized."""
self.options = options
self.tarsqidoc = None # added in by the parse() method
def parse(self, tarsqidoc):
"""Adds metadata to the TarsqiDocument. The only thing it adds to the
metadata dictionary is the DCT, which is set to today."""
self.tarsqidoc = tarsqidoc
self.tarsqidoc.metadata['dct'] = self.get_dct()
self._moderate_dct_vals()
def _moderate_dct_vals(self):
"""There are five places where a DCT can be expressed: the DCT handed in
with the --dct option or defined in the config file, the DCT from the
metadata on the TarsqiDocument, the DCT from the metadata on the
SourceDoc, DCTs from the TagRepository on the TarsqiDocument and DCTs
from the TagRepository on the SourceDoc. The first three are single
values or None, the other two are lists of any length. The order of
these five is significant in that a DCT earlier on the list if given
precedence over a DCT later on the list. Collects all the DCT values and
picks the very first one, or today's date if no DCTs are available. Logs
a warning if the DCTs do not all have the same value."""
dcts = []
for dct_val in [self.tarsqidoc.options.dct,
self.tarsqidoc.metadata.get('dct'),
self.tarsqidoc.sourcedoc.metadata.get('dct'),
_get_dct_values(self.tarsqidoc.sourcedoc.tags),
_get_dct_values(self.tarsqidoc.tags)]:
if dct_val is None:
# the case where there is no DCT in the options or metadata
continue
elif isinstance(dct_val, list):
dcts.extend(dct_val)
else:
dcts.append(dct_val)
if len(set(dcts)) > 1:
logger.warn("WARNING: more than one DCT value available")
dct = dcts[0] if dcts else _get_today()
self.tarsqidoc.metadata['dct'] = dct
def get_dct(self):
return None
def _get_source(self):
"""A convenience method to lift the SourceDoc out of the tarsqi
instance."""
return self.tarsqidoc.sourcedoc
def _get_tag_content(self, tagname):
"""Return the text content of the first tag with name tagname, return
None if there is no such tag."""
try:
tag = self._get_source().tags.find_tags(tagname)[0]
content = self._get_source().text[tag.begin:tag.end].strip()
return content
except IndexError:
logger.warn("Cannot get the %s tag in this document" % tagname)
return None
class MetadataParserTTK(MetadataParser):
"""The metadata parser for the ttk format. For now this one adds
nothing to the default metadata parser."""
class MetadataParserText(MetadataParser):
"""The metadata parser for the text format. For now this one adds
nothing to the default metadata parser."""
class MetadataParserTimebank(MetadataParser):
"""The parser for Timebank documents. All it does is to overwrite the
get_dct() method."""
def get_dct(self):
"""Extracts the document creation time, and returns it as a string of
the form YYYYMMDD. Depending on the source, the DCT can be found in one
of the following tags: DOCNO, DATE_TIME, PUBDATE or FILEID."""
result = self._get_doc_source()
if result is None:
# dct defaults to today if we cannot find the DOCNO tag in the
# document
return _get_today()
source_identifier, content = result
if source_identifier in ('ABC', 'CNN', 'PRI', 'VOA'):
return content[3:11]
elif source_identifier == 'AP':
dct = self._parse_tag_content("(?:AP-NR-)?(\d+)-(\d+)-(\d+)",
'FILEID')
# the DCT format is YYYYMMDD or YYMMDD
return dct if len(dct) == 8 else '19' + dct
elif source_identifier in ('APW', 'NYT'):
return self._parse_tag_content("(\d+)/(\d+)/(\d+)", 'DATE_TIME')
elif source_identifier == 'SJMN':
pubdate_content = self._get_tag_content('PUBDATE')
return '19' + pubdate_content
elif source_identifier == 'WSJ':
return '19' + content[3:9]
elif source_identifier in ('ea', 'ed'):
return '19' + content[2:8]
def _get_doc_source(self):
"""Return the name of the content provider as well as the content of the
DOCNO tag that has that information."""
content = self._get_tag_content('DOCNO')
content = str(content) # in case the above returned None
for source_identifier in ('ABC', 'APW', 'AP', 'CNN', 'NYT', 'PRI',
'SJMN', 'VOA', 'WSJ', 'ea', 'ed'):
if content.startswith(source_identifier):
return (source_identifier, content)
logger.warn("Could not determine document source from DOCNO tag")
return None
def _parse_tag_content(self, regexpr, tagname):
"""Return the DCT part of the tag content of tagname, requires a reqular
expression as one of the arguments."""
content_string = self._get_tag_content(tagname)
result = re.compile(regexpr).match(content_string)
if result:
(month, day, year) = result.groups()
return "%s%s%s" % (year, month, day)
else:
logger.warn("Could not get date from %s tag" % tagname)
return _get_today()
class MetadataParserATEE(MetadataParser):
"""The parser for ATEE document."""
def get_dct(self):
"""All ATEE documents have a DATE tag with a value attribute, the value
of that attribute is returned."""
date_tag = self.tarsqidoc.sourcedoc.tags.find_tag('DATE')
return date_tag.attrs['value']
class MetadataParserRTE3(MetadataParser):
"""The parser for RTE3 documents, no differences with the default parser."""
class MetadataParserDB(MetadataParser):
"""A minimal example parser for cases where the DCT is retrieved from a
database. It is identical to MetadataParser except for how it gets the
DCT. This is done by lookup in a database. This here is the simplest
possible case, and it is quite inefficient. It assumes there is an sqlite
database at 'TTK_ROOT/data/in/va/dct.sqlite' which was created as
follows:
$ sqlite3 dct.sqlite
sqlite> create table dct (filename TEXT, dct TEXT)
sqlite> insert into dct values ("test.xml", "1999-12-31");
The get_dct() method uses this database and the location of the database is
specified in the config.txt file. The first use case for this were VA
documents where the DCT was stored externally. To see this in action run
$ python tarsqi.py --source=db data/in/va/test.xml out.xml
"""
def get_dct(self):
fname = self._get_source().filename
fname = os.path.basename(fname)
db_location = self.options.getopt('dct-database')
db_connection = sqlite3.connect(db_location)
db_cursor = db_connection.cursor()
db_cursor.execute('SELECT dct FROM dct WHERE filename=?', (fname,))
dct = db_cursor.fetchone()[0]
return dct
def _get_today():
"""Return today's date in YYYYMMDD format."""
return time.strftime("%Y%m%d", time.localtime())
def _get_dct_values(tag_repository):
"""Return the list of nromalized values from all TIMEX3 tags in the
TagRepository."""
timexes = [t for t in tag_repository.find_tags('TIMEX3')
if t.attrs.get('functionInDocument') == 'CREATION_TIME']
values = [t.attrs.get(LIBRARY.timeml.VALUE) for t in timexes]
return values
| en | 0.804198 | Metadata Parsers. This module contains metadata parsers, that is, parsers that pull out the metadata and add it to a TarsqiDocument. The only requirements on each parser is that it defines an __init__() method that takes a dictionary of options and a parse() method that takes a TarsqiDocument instance. Current parsers only deal with the DCT. This is the minimal metadata parser that is used as a default. It selects the DCT from all available sources and picks one of them, or it uses today's date if no DCT's are available. Subclasses should override the get_dct() method to define specific DCT extraction methods for the document source. At the moment, initialization only uses the --dct option if it is present, but this could change. Note that the TarsqiDocument does not exist yet when the MetadataParser is initialized. # added in by the parse() method Adds metadata to the TarsqiDocument. The only thing it adds to the metadata dictionary is the DCT, which is set to today. There are five places where a DCT can be expressed: the DCT handed in with the --dct option or defined in the config file, the DCT from the metadata on the TarsqiDocument, the DCT from the metadata on the SourceDoc, DCTs from the TagRepository on the TarsqiDocument and DCTs from the TagRepository on the SourceDoc. The first three are single values or None, the other two are lists of any length. The order of these five is significant in that a DCT earlier on the list if given precedence over a DCT later on the list. Collects all the DCT values and picks the very first one, or today's date if no DCTs are available. Logs a warning if the DCTs do not all have the same value. # the case where there is no DCT in the options or metadata A convenience method to lift the SourceDoc out of the tarsqi instance. Return the text content of the first tag with name tagname, return None if there is no such tag. The metadata parser for the ttk format. For now this one adds nothing to the default metadata parser. The metadata parser for the text format. For now this one adds nothing to the default metadata parser. The parser for Timebank documents. All it does is to overwrite the get_dct() method. Extracts the document creation time, and returns it as a string of the form YYYYMMDD. Depending on the source, the DCT can be found in one of the following tags: DOCNO, DATE_TIME, PUBDATE or FILEID. # dct defaults to today if we cannot find the DOCNO tag in the # document # the DCT format is YYYYMMDD or YYMMDD Return the name of the content provider as well as the content of the DOCNO tag that has that information. # in case the above returned None Return the DCT part of the tag content of tagname, requires a reqular expression as one of the arguments. The parser for ATEE document. All ATEE documents have a DATE tag with a value attribute, the value of that attribute is returned. The parser for RTE3 documents, no differences with the default parser. A minimal example parser for cases where the DCT is retrieved from a database. It is identical to MetadataParser except for how it gets the DCT. This is done by lookup in a database. This here is the simplest possible case, and it is quite inefficient. It assumes there is an sqlite database at 'TTK_ROOT/data/in/va/dct.sqlite' which was created as follows: $ sqlite3 dct.sqlite sqlite> create table dct (filename TEXT, dct TEXT) sqlite> insert into dct values ("test.xml", "1999-12-31"); The get_dct() method uses this database and the location of the database is specified in the config.txt file. The first use case for this were VA documents where the DCT was stored externally. To see this in action run $ python tarsqi.py --source=db data/in/va/test.xml out.xml Return today's date in YYYYMMDD format. Return the list of nromalized values from all TIMEX3 tags in the TagRepository. | 2.642568 | 3 |
tubify.py | teejaytiger/Tubify | 0 | 6624454 | from genericpath import exists
import os, subprocess, sys
from posixpath import splitext
import spotipy
import yaml
from spotipy.oauth2 import SpotifyOAuth
from youtube_search import YoutubeSearch
from difflib import SequenceMatcher
from subprocess import Popen, PIPE
## configuration
class config:
def __init__(self, **entries):
self.__dict__.update(entries)
## import the config file as config properties
with open("config.yaml", 'r') as stream:
data_loaded = yaml.safe_load(stream)
s = config(**data_loaded)
# configure Spotipy
sp = spotipy.Spotify(auth_manager=SpotifyOAuth(
client_id="d18509bdbb4d498e89dc2f56a6b7e8e6",
client_secret="84db0e51115e483398e237961a12bbbb",
redirect_uri="https://spotify.com",
scope="playlist-read-private playlist-read-collaborative")
)
# capture the top result for a song title
def get_youtube_uri(title):
prefix = "https://www.youtube.com/watch?v="
results = YoutubeSearch(title, max_results=1).to_dict()
if results:
suffix = results[0]["id"]
return prefix+suffix
return None
# get the tracks of a specific spotify playlist by id
def get_playlist_tracks(username,playlist_id):
results = sp.user_playlist_tracks(username,playlist_id)
tracks = results['items']
while results['next']:
results = sp.next(results)
tracks.extend(results['items'])
return tracks
# collect a users playlists
def get_playlists(username):
result = []
ytdl = []
folder_titles = []
#playlists = sp.user_playlists(username)
playlists = sp.user_playlists(username)
while playlists:
for i, playlist in enumerate(playlists['items']):
if s.show_playlists: print (playlist['name'])
ytdlpl = []
# determine if the playlist is specified in the config file
similarity = []
for pl in s.playlists:
similarity.append(SequenceMatcher(None, pl, playlist['name']).ratio())
# only grab playlists in the config file if they're 90% similar or higher
if (max(similarity) >= s.pl_similarity) and (not s.show_playlists):
print("%4d %s" % (i + 1 + playlists['offset'], playlist['name']))
pluri = playlist['uri'].split(":")[2]
tracks = get_playlist_tracks(username, pluri)
for track in tracks:
title = track["track"]["album"]["artists"][0]["name"]+" - "+track["track"]["name"]
if s.assert_music:
title += " "+s.assert_music_string
yturi = get_youtube_uri(title)
result.append([track, yturi])
if yturi:
ytdlpl.append(yturi)
print( " {1} :: {0}".format(title, yturi))
ytdl.append(ytdlpl)
folder_titles.append(playlist['name'])
if playlists['next']:
playlists = sp.next(playlists)
else:
playlists = None
return result, ytdl, folder_titles
# collect everything up for youtube-dl
tracks, uris, folder_titles = get_playlists(s.uname)
# create folders to store playlist items and download music
if not s.show_playlists:
for i in range(len(folder_titles)):
title = "".join([c for c in folder_titles[i] if c.isalpha() or c.isdigit() or c==' ']).rstrip()
# create the music folder path
# case for named playlist folders
if s.playlist_folders:
p = os.path.join(os.path.normpath(s.output_path), title)
p3 = title
p2 = os.path.join(p, p3+".ytdl")
if not os.path.isdir(p):
os.mkdir(p)
# case for aggregated download
else:
p = os.path.normpath(s.output_path)
p3 = "playlist"
p2 = os.path.join(p, p3+".ytdl")
if not os.path.isdir(p):
os.mkdir(p)
# append untracked songs
with open(p2, 'a+') as f:
f.seek(0)
songs = [i.rstrip() for i in f.readlines()]
for k, uri in enumerate(uris[i]):
if not uri in songs:
f.write('%s\n' % uri)
# download!
# case for numbering tracks to preserve order
if s.download:
if s.number_tracks:
r = subprocess.Popen(
[
s.youtube_dl_path,
"-ciw",
"--download-archive", os.path.join(p, p3+".txt"),
"--batch-file", p2,
"--restrict-filenames",
"--format", "140",
"-o", p+"//"+"%(autonumber)s-%(title)s.%(ext)s"
],
stdout=subprocess.PIPE)
# case for not numbering tracks
else:
r = subprocess.Popen(
[
s.youtube_dl_path,
"-ciw",
"--download-archive", os.path.join(p, p3+".txt"),
"--batch-file", p2,
"--restrict-filenames",
"--format", "140",
"-o", p+"//"+"%(title)s.%(ext)s"
],
stdout=subprocess.PIPE)
for line in iter(r.stdout.readline, b''):
print(line.decode('cp1252'), end="\r", file=sys.stdout, flush=True)
print ("[DONE]\n")
if s.convert_to_mp3:
print("[POST] Converting files to mp3. See dirname/dirname for converted files.")
count = 0
cmds = []
convert_p = os.path.join(s.output_path,"CONVERTED")
if not os.path.exists(convert_p): os.mkdir(convert_p)
for subdir, dirs, files in os.walk(s.output_path):
for d in dirs[1:]:
p = os.path.join(convert_p, d)
if not os.path.exists(p): os.mkdir(p)
for file in files:
parent_folder = os.path.split(subdir)[1]
bname, ext = os.path.splitext(file)
if "m4a" in ext:
fname = bname+".mp3"
cmds.append( [
s.ffmpeg_install_location,
'-i', "\""+os.path.join(subdir, file)+"\"",
'-acodec', 'libmp3lame',
'-ac', '2',
'-q:a', '2',
"\""+os.path.join(convert_p, parent_folder, fname)+"\""] )
count+=1
i = 1
for command in cmds:
print("converting {0}/{1} - {2}".format(i, count, os.path.split(command[2])[1]))
#print(" ".join(command)) # debug
ffmpeg = subprocess.Popen(" ".join(command), stderr=subprocess.PIPE, stdout = subprocess.PIPE )
stdout, stderr = ffmpeg.communicate()
i+=1 | from genericpath import exists
import os, subprocess, sys
from posixpath import splitext
import spotipy
import yaml
from spotipy.oauth2 import SpotifyOAuth
from youtube_search import YoutubeSearch
from difflib import SequenceMatcher
from subprocess import Popen, PIPE
## configuration
class config:
def __init__(self, **entries):
self.__dict__.update(entries)
## import the config file as config properties
with open("config.yaml", 'r') as stream:
data_loaded = yaml.safe_load(stream)
s = config(**data_loaded)
# configure Spotipy
sp = spotipy.Spotify(auth_manager=SpotifyOAuth(
client_id="d18509bdbb4d498e89dc2f56a6b7e8e6",
client_secret="84db0e51115e483398e237961a12bbbb",
redirect_uri="https://spotify.com",
scope="playlist-read-private playlist-read-collaborative")
)
# capture the top result for a song title
def get_youtube_uri(title):
prefix = "https://www.youtube.com/watch?v="
results = YoutubeSearch(title, max_results=1).to_dict()
if results:
suffix = results[0]["id"]
return prefix+suffix
return None
# get the tracks of a specific spotify playlist by id
def get_playlist_tracks(username,playlist_id):
results = sp.user_playlist_tracks(username,playlist_id)
tracks = results['items']
while results['next']:
results = sp.next(results)
tracks.extend(results['items'])
return tracks
# collect a users playlists
def get_playlists(username):
result = []
ytdl = []
folder_titles = []
#playlists = sp.user_playlists(username)
playlists = sp.user_playlists(username)
while playlists:
for i, playlist in enumerate(playlists['items']):
if s.show_playlists: print (playlist['name'])
ytdlpl = []
# determine if the playlist is specified in the config file
similarity = []
for pl in s.playlists:
similarity.append(SequenceMatcher(None, pl, playlist['name']).ratio())
# only grab playlists in the config file if they're 90% similar or higher
if (max(similarity) >= s.pl_similarity) and (not s.show_playlists):
print("%4d %s" % (i + 1 + playlists['offset'], playlist['name']))
pluri = playlist['uri'].split(":")[2]
tracks = get_playlist_tracks(username, pluri)
for track in tracks:
title = track["track"]["album"]["artists"][0]["name"]+" - "+track["track"]["name"]
if s.assert_music:
title += " "+s.assert_music_string
yturi = get_youtube_uri(title)
result.append([track, yturi])
if yturi:
ytdlpl.append(yturi)
print( " {1} :: {0}".format(title, yturi))
ytdl.append(ytdlpl)
folder_titles.append(playlist['name'])
if playlists['next']:
playlists = sp.next(playlists)
else:
playlists = None
return result, ytdl, folder_titles
# collect everything up for youtube-dl
tracks, uris, folder_titles = get_playlists(s.uname)
# create folders to store playlist items and download music
if not s.show_playlists:
for i in range(len(folder_titles)):
title = "".join([c for c in folder_titles[i] if c.isalpha() or c.isdigit() or c==' ']).rstrip()
# create the music folder path
# case for named playlist folders
if s.playlist_folders:
p = os.path.join(os.path.normpath(s.output_path), title)
p3 = title
p2 = os.path.join(p, p3+".ytdl")
if not os.path.isdir(p):
os.mkdir(p)
# case for aggregated download
else:
p = os.path.normpath(s.output_path)
p3 = "playlist"
p2 = os.path.join(p, p3+".ytdl")
if not os.path.isdir(p):
os.mkdir(p)
# append untracked songs
with open(p2, 'a+') as f:
f.seek(0)
songs = [i.rstrip() for i in f.readlines()]
for k, uri in enumerate(uris[i]):
if not uri in songs:
f.write('%s\n' % uri)
# download!
# case for numbering tracks to preserve order
if s.download:
if s.number_tracks:
r = subprocess.Popen(
[
s.youtube_dl_path,
"-ciw",
"--download-archive", os.path.join(p, p3+".txt"),
"--batch-file", p2,
"--restrict-filenames",
"--format", "140",
"-o", p+"//"+"%(autonumber)s-%(title)s.%(ext)s"
],
stdout=subprocess.PIPE)
# case for not numbering tracks
else:
r = subprocess.Popen(
[
s.youtube_dl_path,
"-ciw",
"--download-archive", os.path.join(p, p3+".txt"),
"--batch-file", p2,
"--restrict-filenames",
"--format", "140",
"-o", p+"//"+"%(title)s.%(ext)s"
],
stdout=subprocess.PIPE)
for line in iter(r.stdout.readline, b''):
print(line.decode('cp1252'), end="\r", file=sys.stdout, flush=True)
print ("[DONE]\n")
if s.convert_to_mp3:
print("[POST] Converting files to mp3. See dirname/dirname for converted files.")
count = 0
cmds = []
convert_p = os.path.join(s.output_path,"CONVERTED")
if not os.path.exists(convert_p): os.mkdir(convert_p)
for subdir, dirs, files in os.walk(s.output_path):
for d in dirs[1:]:
p = os.path.join(convert_p, d)
if not os.path.exists(p): os.mkdir(p)
for file in files:
parent_folder = os.path.split(subdir)[1]
bname, ext = os.path.splitext(file)
if "m4a" in ext:
fname = bname+".mp3"
cmds.append( [
s.ffmpeg_install_location,
'-i', "\""+os.path.join(subdir, file)+"\"",
'-acodec', 'libmp3lame',
'-ac', '2',
'-q:a', '2',
"\""+os.path.join(convert_p, parent_folder, fname)+"\""] )
count+=1
i = 1
for command in cmds:
print("converting {0}/{1} - {2}".format(i, count, os.path.split(command[2])[1]))
#print(" ".join(command)) # debug
ffmpeg = subprocess.Popen(" ".join(command), stderr=subprocess.PIPE, stdout = subprocess.PIPE )
stdout, stderr = ffmpeg.communicate()
i+=1 | en | 0.819105 | ## configuration ## import the config file as config properties # configure Spotipy # capture the top result for a song title # get the tracks of a specific spotify playlist by id # collect a users playlists #playlists = sp.user_playlists(username) # determine if the playlist is specified in the config file # only grab playlists in the config file if they're 90% similar or higher # collect everything up for youtube-dl # create folders to store playlist items and download music # create the music folder path # case for named playlist folders # case for aggregated download # append untracked songs # download! # case for numbering tracks to preserve order # case for not numbering tracks #print(" ".join(command)) # debug | 2.640987 | 3 |
2020/Python/day03.py | SDragon42/AdventOfCode2019 | 0 | 6624455 | <reponame>SDragon42/AdventOfCode2019
from typing import List
import helper
import inputHelper
from puzzleBase import PuzzleBase
class InputData:
input: List[str]
slopes: List[str]
expectedAnswer: int
def __init__(self, name: str, part: int) -> None:
day = 3
self.input = inputHelper.load_file(day, name).splitlines()
self.slopes = inputHelper.load_file(day, f'slopes{part}').split(',') #lines[0].split(',')
answer = inputHelper.load_file(day, f"{name}-answer{part}")
self.expectedAnswer = int(answer) if answer is not None else None
class Puzzle(PuzzleBase):
def count_trees(self, input: List[str], slopeX: int, slopeY: int) -> int:
x = 0
y = 0
numTrees = 0
inputWidth = len(input[0])
while y < len(input) - 1:
x += slopeX
y += slopeY
if x >= inputWidth:
x -= inputWidth
val = input[y][x]
if val == '#':
numTrees += 1
return numTrees
def run_part(self, data: InputData) -> str:
result = 1
for sl in data.slopes:
result *= self.count_trees(data.input, int(sl[0]), int(sl[1]))
return helper.validate_result("How many trees would you encounter?", result, data.expectedAnswer)
def solve(self):
print("Day 3: Toboggan Trajectory")
print("")
self.run_example(lambda: "P1 Ex1) " + self.run_part(InputData('example1', 1)))
self.run_problem(lambda: "Part 1) " + self.run_part(InputData('input', 1)))
print("")
self.run_example(lambda: "P2 Ex1) " + self.run_part(InputData('example1', 2)))
self.run_problem(lambda: "Part 2) " + self.run_part(InputData('input', 2))) | from typing import List
import helper
import inputHelper
from puzzleBase import PuzzleBase
class InputData:
input: List[str]
slopes: List[str]
expectedAnswer: int
def __init__(self, name: str, part: int) -> None:
day = 3
self.input = inputHelper.load_file(day, name).splitlines()
self.slopes = inputHelper.load_file(day, f'slopes{part}').split(',') #lines[0].split(',')
answer = inputHelper.load_file(day, f"{name}-answer{part}")
self.expectedAnswer = int(answer) if answer is not None else None
class Puzzle(PuzzleBase):
def count_trees(self, input: List[str], slopeX: int, slopeY: int) -> int:
x = 0
y = 0
numTrees = 0
inputWidth = len(input[0])
while y < len(input) - 1:
x += slopeX
y += slopeY
if x >= inputWidth:
x -= inputWidth
val = input[y][x]
if val == '#':
numTrees += 1
return numTrees
def run_part(self, data: InputData) -> str:
result = 1
for sl in data.slopes:
result *= self.count_trees(data.input, int(sl[0]), int(sl[1]))
return helper.validate_result("How many trees would you encounter?", result, data.expectedAnswer)
def solve(self):
print("Day 3: Toboggan Trajectory")
print("")
self.run_example(lambda: "P1 Ex1) " + self.run_part(InputData('example1', 1)))
self.run_problem(lambda: "Part 1) " + self.run_part(InputData('input', 1)))
print("")
self.run_example(lambda: "P2 Ex1) " + self.run_part(InputData('example1', 2)))
self.run_problem(lambda: "Part 2) " + self.run_part(InputData('input', 2))) | zh | 0.425303 | #lines[0].split(',') | 3.747589 | 4 |
project/settings.py | kunugoda/jobbrd | 0 | 6624456 | <reponame>kunugoda/jobbrd
"""
Django settings for activcar project.
Generated by 'django-admin startproject' using Django 1.10.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
from decouple import config
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG')
ALLOWED_HOSTS = []
SITE_NAME = 'dopejob'
SITE_ID = 1
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.humanize',
'dj_pagination',
'storages',
'accounts',
'jobboard',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'dj_pagination.middleware.PaginationMiddleware',
]
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
from django.utils.translation import gettext_lazy as _
LANGUAGE_CODE = 'fr-fr'
TIME_ZONE = 'Europe/Paris'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LANGUAGES = (
('en', _('English')),
('fr', _('Français')),
)
LOCALE_PATHS = (
os.path.join(BASE_DIR, 'locale/'),
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATIC_ROOT = 'staticfiles'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static/'),
)
DEFAULT_FROM_EMAIL = '<EMAIL>'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Dango Allauth settings
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
'django.contrib.auth.backends.ModelBackend',
)
AUTH_USER_MODEL = 'accounts.User'
| """
Django settings for activcar project.
Generated by 'django-admin startproject' using Django 1.10.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
from decouple import config
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG')
ALLOWED_HOSTS = []
SITE_NAME = 'dopejob'
SITE_ID = 1
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.humanize',
'dj_pagination',
'storages',
'accounts',
'jobboard',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'dj_pagination.middleware.PaginationMiddleware',
]
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
from django.utils.translation import gettext_lazy as _
LANGUAGE_CODE = 'fr-fr'
TIME_ZONE = 'Europe/Paris'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LANGUAGES = (
('en', _('English')),
('fr', _('Français')),
)
LOCALE_PATHS = (
os.path.join(BASE_DIR, 'locale/'),
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATIC_ROOT = 'staticfiles'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static/'),
)
DEFAULT_FROM_EMAIL = '<EMAIL>'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Dango Allauth settings
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
'django.contrib.auth.backends.ModelBackend',
)
AUTH_USER_MODEL = 'accounts.User' | en | 0.62065 | Django settings for activcar project. Generated by 'django-admin startproject' using Django 1.10.6. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! # SECURITY WARNING: don't run with debug turned on in production! # Application definition # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ # Dango Allauth settings # Needed to login by username in Django admin, regardless of `allauth` | 1.79376 | 2 |
tests/gcp/hooks/test_cloud_build.py | suensummit/airflow | 1 | 6624457 | <gh_stars>1-10
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Tests for Google Cloud Build Hook
"""
import unittest
from typing import Optional
from unittest import mock
from airflow import AirflowException
from airflow.gcp.hooks.cloud_build import CloudBuildHook
from tests.compat import PropertyMock
from tests.gcp.utils.base_gcp_mock import (
GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id,
mock_base_gcp_hook_no_default_project_id,
)
TEST_CREATE_BODY = {
"source": {"storageSource": {"bucket": "cloud-build-examples", "object": "node-docker-example.tar.gz"}},
"steps": [
{"name": "gcr.io/cloud-builders/docker", "args": ["build", "-t", "gcr.io/$PROJECT_ID/my-image", "."]}
],
"images": ["gcr.io/$PROJECT_ID/my-image"],
}
TEST_BUILD = {"name": "build-name", "metadata": {"build": {"id": "AAA"}}}
TEST_WAITING_OPERATION = {"done": False, "response": "response"}
TEST_DONE_OPERATION = {"done": True, "response": "response"}
TEST_ERROR_OPERATION = {"done": True, "response": "response", "error": "error"}
TEST_PROJECT_ID = "cloud-build-project-id"
class TestCloudBuildHookWithPassedProjectId(unittest.TestCase):
hook = None # type: Optional[CloudBuildHook]
def setUp(self):
with mock.patch(
"airflow.gcp.hooks.base.GoogleCloudBaseHook.__init__",
new=mock_base_gcp_hook_default_project_id,
):
self.hook = CloudBuildHook(gcp_conn_id="test")
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook._authorize")
@mock.patch("airflow.gcp.hooks.cloud_build.build")
def test_cloud_build_client_creation(self, mock_build, mock_authorize):
result = self.hook.get_conn()
mock_build.assert_called_once_with(
'cloudbuild', 'v1', http=mock_authorize.return_value, cache_discovery=False
)
self.assertEqual(mock_build.return_value, result)
self.assertEqual(self.hook._conn, result)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
def test_build_immediately_complete(self, get_conn_mock):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value\
.builds.return_value\
.create.return_value\
.execute.return_value = TEST_BUILD
service_mock.projects.return_value.\
builds.return_value.\
get.return_value.\
execute.return_value = TEST_BUILD
service_mock.operations.return_value.\
get.return_value.\
execute.return_value = TEST_DONE_OPERATION
result = self.hook.create_build(body={}, project_id=TEST_PROJECT_ID)
service_mock.projects.return_value.builds.return_value.create.assert_called_once_with(
body={}, projectId=TEST_PROJECT_ID
)
self.assertEqual(result, TEST_BUILD)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
@mock.patch("airflow.gcp.hooks.cloud_build.time.sleep")
def test_waiting_operation(self, _, get_conn_mock):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value.builds.return_value.create.return_value.execute.return_value = (
TEST_BUILD
)
service_mock.projects.return_value.builds.return_value.get.return_value.execute.return_value = (
TEST_BUILD
)
execute_mock = mock.Mock(
**{"side_effect": [TEST_WAITING_OPERATION, TEST_DONE_OPERATION, TEST_DONE_OPERATION]}
)
service_mock.operations.return_value.get.return_value.execute = execute_mock
result = self.hook.create_build(body={}, project_id=TEST_PROJECT_ID)
self.assertEqual(result, TEST_BUILD)
@mock.patch(
'airflow.gcp.hooks.base.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
@mock.patch("airflow.gcp.hooks.cloud_build.time.sleep")
def test_error_operation(self, _, get_conn_mock, mock_project_id):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value.builds.return_value.create.return_value.execute.return_value = (
TEST_BUILD
)
execute_mock = mock.Mock(**{"side_effect": [TEST_WAITING_OPERATION, TEST_ERROR_OPERATION]})
service_mock.operations.return_value.get.return_value.execute = execute_mock
with self.assertRaisesRegex(AirflowException, "error"):
self.hook.create_build(body={})
class TestGcpComputeHookWithDefaultProjectIdFromConnection(unittest.TestCase):
hook = None # type: Optional[CloudBuildHook]
def setUp(self):
with mock.patch(
"airflow.gcp.hooks.base.GoogleCloudBaseHook.__init__",
new=mock_base_gcp_hook_default_project_id,
):
self.hook = CloudBuildHook(gcp_conn_id="test")
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook._authorize")
@mock.patch("airflow.gcp.hooks.cloud_build.build")
def test_cloud_build_client_creation(self, mock_build, mock_authorize):
result = self.hook.get_conn()
mock_build.assert_called_once_with(
'cloudbuild', 'v1', http=mock_authorize.return_value, cache_discovery=False
)
self.assertEqual(mock_build.return_value, result)
self.assertEqual(self.hook._conn, result)
@mock.patch(
'airflow.gcp.hooks.base.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
def test_build_immediately_complete(self, get_conn_mock, mock_project_id):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value.builds.return_value.create.return_value.execute.return_value = (
TEST_BUILD
)
service_mock.projects.return_value.builds.return_value.get.return_value.execute.return_value = (
TEST_BUILD
)
service_mock.operations.return_value.get.return_value.execute.return_value = TEST_DONE_OPERATION
result = self.hook.create_build(body={})
service_mock.projects.return_value.builds.return_value.create.assert_called_once_with(
body={}, projectId='example-project'
)
self.assertEqual(result, TEST_BUILD)
@mock.patch(
'airflow.gcp.hooks.base.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
@mock.patch("airflow.gcp.hooks.cloud_build.time.sleep")
def test_waiting_operation(self, _, get_conn_mock, mock_project_id):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value.builds.return_value.create.return_value.execute.return_value = (
TEST_BUILD
)
service_mock.projects.return_value.builds.return_value.get.return_value.execute.return_value = (
TEST_BUILD
)
execute_mock = mock.Mock(
**{"side_effect": [TEST_WAITING_OPERATION, TEST_DONE_OPERATION, TEST_DONE_OPERATION]}
)
service_mock.operations.return_value.get.return_value.execute = execute_mock
result = self.hook.create_build(body={})
self.assertEqual(result, TEST_BUILD)
@mock.patch(
'airflow.gcp.hooks.base.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
@mock.patch("airflow.gcp.hooks.cloud_build.time.sleep")
def test_error_operation(self, _, get_conn_mock, mock_project_id):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value.builds.return_value.create.return_value.execute.return_value = (
TEST_BUILD
)
execute_mock = mock.Mock(**{"side_effect": [TEST_WAITING_OPERATION, TEST_ERROR_OPERATION]})
service_mock.operations.return_value.get.return_value.execute = execute_mock
with self.assertRaisesRegex(AirflowException, "error"):
self.hook.create_build(body={})
class TestCloudBuildHookWithoutProjectId(unittest.TestCase):
hook = None # type: Optional[CloudBuildHook]
def setUp(self):
with mock.patch(
"airflow.gcp.hooks.base.GoogleCloudBaseHook.__init__",
new=mock_base_gcp_hook_no_default_project_id,
):
self.hook = CloudBuildHook(gcp_conn_id="test")
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook._authorize")
@mock.patch("airflow.gcp.hooks.cloud_build.build")
def test_cloud_build_client_creation(self, mock_build, mock_authorize):
result = self.hook.get_conn()
mock_build.assert_called_once_with(
'cloudbuild', 'v1', http=mock_authorize.return_value, cache_discovery=False
)
self.assertEqual(mock_build.return_value, result)
self.assertEqual(self.hook._conn, result)
@mock.patch(
'airflow.gcp.hooks.base.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
def test_create_build(self, mock_get_conn, mock_project_id):
with self.assertRaises(AirflowException) as e:
self.hook.create_build(body={})
self.assertEqual(
"The project id must be passed either as keyword project_id parameter or as project_id extra in "
"GCP connection definition. Both are not set!",
str(e.exception),
)
| # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Tests for Google Cloud Build Hook
"""
import unittest
from typing import Optional
from unittest import mock
from airflow import AirflowException
from airflow.gcp.hooks.cloud_build import CloudBuildHook
from tests.compat import PropertyMock
from tests.gcp.utils.base_gcp_mock import (
GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id,
mock_base_gcp_hook_no_default_project_id,
)
TEST_CREATE_BODY = {
"source": {"storageSource": {"bucket": "cloud-build-examples", "object": "node-docker-example.tar.gz"}},
"steps": [
{"name": "gcr.io/cloud-builders/docker", "args": ["build", "-t", "gcr.io/$PROJECT_ID/my-image", "."]}
],
"images": ["gcr.io/$PROJECT_ID/my-image"],
}
TEST_BUILD = {"name": "build-name", "metadata": {"build": {"id": "AAA"}}}
TEST_WAITING_OPERATION = {"done": False, "response": "response"}
TEST_DONE_OPERATION = {"done": True, "response": "response"}
TEST_ERROR_OPERATION = {"done": True, "response": "response", "error": "error"}
TEST_PROJECT_ID = "cloud-build-project-id"
class TestCloudBuildHookWithPassedProjectId(unittest.TestCase):
hook = None # type: Optional[CloudBuildHook]
def setUp(self):
with mock.patch(
"airflow.gcp.hooks.base.GoogleCloudBaseHook.__init__",
new=mock_base_gcp_hook_default_project_id,
):
self.hook = CloudBuildHook(gcp_conn_id="test")
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook._authorize")
@mock.patch("airflow.gcp.hooks.cloud_build.build")
def test_cloud_build_client_creation(self, mock_build, mock_authorize):
result = self.hook.get_conn()
mock_build.assert_called_once_with(
'cloudbuild', 'v1', http=mock_authorize.return_value, cache_discovery=False
)
self.assertEqual(mock_build.return_value, result)
self.assertEqual(self.hook._conn, result)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
def test_build_immediately_complete(self, get_conn_mock):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value\
.builds.return_value\
.create.return_value\
.execute.return_value = TEST_BUILD
service_mock.projects.return_value.\
builds.return_value.\
get.return_value.\
execute.return_value = TEST_BUILD
service_mock.operations.return_value.\
get.return_value.\
execute.return_value = TEST_DONE_OPERATION
result = self.hook.create_build(body={}, project_id=TEST_PROJECT_ID)
service_mock.projects.return_value.builds.return_value.create.assert_called_once_with(
body={}, projectId=TEST_PROJECT_ID
)
self.assertEqual(result, TEST_BUILD)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
@mock.patch("airflow.gcp.hooks.cloud_build.time.sleep")
def test_waiting_operation(self, _, get_conn_mock):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value.builds.return_value.create.return_value.execute.return_value = (
TEST_BUILD
)
service_mock.projects.return_value.builds.return_value.get.return_value.execute.return_value = (
TEST_BUILD
)
execute_mock = mock.Mock(
**{"side_effect": [TEST_WAITING_OPERATION, TEST_DONE_OPERATION, TEST_DONE_OPERATION]}
)
service_mock.operations.return_value.get.return_value.execute = execute_mock
result = self.hook.create_build(body={}, project_id=TEST_PROJECT_ID)
self.assertEqual(result, TEST_BUILD)
@mock.patch(
'airflow.gcp.hooks.base.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
@mock.patch("airflow.gcp.hooks.cloud_build.time.sleep")
def test_error_operation(self, _, get_conn_mock, mock_project_id):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value.builds.return_value.create.return_value.execute.return_value = (
TEST_BUILD
)
execute_mock = mock.Mock(**{"side_effect": [TEST_WAITING_OPERATION, TEST_ERROR_OPERATION]})
service_mock.operations.return_value.get.return_value.execute = execute_mock
with self.assertRaisesRegex(AirflowException, "error"):
self.hook.create_build(body={})
class TestGcpComputeHookWithDefaultProjectIdFromConnection(unittest.TestCase):
hook = None # type: Optional[CloudBuildHook]
def setUp(self):
with mock.patch(
"airflow.gcp.hooks.base.GoogleCloudBaseHook.__init__",
new=mock_base_gcp_hook_default_project_id,
):
self.hook = CloudBuildHook(gcp_conn_id="test")
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook._authorize")
@mock.patch("airflow.gcp.hooks.cloud_build.build")
def test_cloud_build_client_creation(self, mock_build, mock_authorize):
result = self.hook.get_conn()
mock_build.assert_called_once_with(
'cloudbuild', 'v1', http=mock_authorize.return_value, cache_discovery=False
)
self.assertEqual(mock_build.return_value, result)
self.assertEqual(self.hook._conn, result)
@mock.patch(
'airflow.gcp.hooks.base.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
def test_build_immediately_complete(self, get_conn_mock, mock_project_id):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value.builds.return_value.create.return_value.execute.return_value = (
TEST_BUILD
)
service_mock.projects.return_value.builds.return_value.get.return_value.execute.return_value = (
TEST_BUILD
)
service_mock.operations.return_value.get.return_value.execute.return_value = TEST_DONE_OPERATION
result = self.hook.create_build(body={})
service_mock.projects.return_value.builds.return_value.create.assert_called_once_with(
body={}, projectId='example-project'
)
self.assertEqual(result, TEST_BUILD)
@mock.patch(
'airflow.gcp.hooks.base.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
@mock.patch("airflow.gcp.hooks.cloud_build.time.sleep")
def test_waiting_operation(self, _, get_conn_mock, mock_project_id):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value.builds.return_value.create.return_value.execute.return_value = (
TEST_BUILD
)
service_mock.projects.return_value.builds.return_value.get.return_value.execute.return_value = (
TEST_BUILD
)
execute_mock = mock.Mock(
**{"side_effect": [TEST_WAITING_OPERATION, TEST_DONE_OPERATION, TEST_DONE_OPERATION]}
)
service_mock.operations.return_value.get.return_value.execute = execute_mock
result = self.hook.create_build(body={})
self.assertEqual(result, TEST_BUILD)
@mock.patch(
'airflow.gcp.hooks.base.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
@mock.patch("airflow.gcp.hooks.cloud_build.time.sleep")
def test_error_operation(self, _, get_conn_mock, mock_project_id):
service_mock = get_conn_mock.return_value
service_mock.projects.return_value.builds.return_value.create.return_value.execute.return_value = (
TEST_BUILD
)
execute_mock = mock.Mock(**{"side_effect": [TEST_WAITING_OPERATION, TEST_ERROR_OPERATION]})
service_mock.operations.return_value.get.return_value.execute = execute_mock
with self.assertRaisesRegex(AirflowException, "error"):
self.hook.create_build(body={})
class TestCloudBuildHookWithoutProjectId(unittest.TestCase):
hook = None # type: Optional[CloudBuildHook]
def setUp(self):
with mock.patch(
"airflow.gcp.hooks.base.GoogleCloudBaseHook.__init__",
new=mock_base_gcp_hook_no_default_project_id,
):
self.hook = CloudBuildHook(gcp_conn_id="test")
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook._authorize")
@mock.patch("airflow.gcp.hooks.cloud_build.build")
def test_cloud_build_client_creation(self, mock_build, mock_authorize):
result = self.hook.get_conn()
mock_build.assert_called_once_with(
'cloudbuild', 'v1', http=mock_authorize.return_value, cache_discovery=False
)
self.assertEqual(mock_build.return_value, result)
self.assertEqual(self.hook._conn, result)
@mock.patch(
'airflow.gcp.hooks.base.GoogleCloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch("airflow.gcp.hooks.cloud_build.CloudBuildHook.get_conn")
def test_create_build(self, mock_get_conn, mock_project_id):
with self.assertRaises(AirflowException) as e:
self.hook.create_build(body={})
self.assertEqual(
"The project id must be passed either as keyword project_id parameter or as project_id extra in "
"GCP connection definition. Both are not set!",
str(e.exception),
) | en | 0.817941 | # -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. Tests for Google Cloud Build Hook # type: Optional[CloudBuildHook] # type: Optional[CloudBuildHook] # type: Optional[CloudBuildHook] | 1.738491 | 2 |
jwql/utils/mast_utils.py | falkben/jwql | 42 | 6624458 | """Various utility functions for interacting with MAST
Authors
-------
- <NAME>
Use
---
This module can be imported as such:
>>> import mast_utils
results = mast_utils.mast_query('nircam', 'NRCA1_FULL', 'NRC_DARK', 53005.1, 53005.2)
"""
from jwql.jwql_monitors import monitor_mast
from jwql.utils.constants import JWST_DATAPRODUCTS, JWST_INSTRUMENT_NAMES_MIXEDCASE
def mast_query(instrument, templates, start_date, end_date, aperture=None, detector=None, filter_name=None,
pupil=None, grating=None, readpattern=None, lamp=None):
"""Use ``astroquery`` to search MAST for data for given observation
templates over a given time range
Parameters
----------
instrument : str
Instrument name (e.g. ``nircam``)
templates : str or list
Single, or list of, templates for the query (e.g. ``NRC_DARK``,
``MIR_FLATMRS``)
start_date : float
Starting date for the search in MJD
end_date : float
Ending date for the search in MJD
aperture : str
Detector aperture to search for (e.g. ``NRCA1_FULL``)
detector : str
Detector name (e.g. ``MIRIMAGE``)
filter_name : str
Fitler element (e.g. ``F200W``)
pupil : str
Pupil element (e.g. ``F323N``)
grating : str
Grating element (e.g. ``MIRROR``)
readpattern : str
Detector read out pattern (e.g. ``NISRAPID``)
lamp : str
Lamp name (e.g. ``LINE2``)
Returns
-------
query_results : list
List of dictionaries containing the query results
"""
# If a single template name is input as a string, put it in a list
if isinstance(templates, str):
templates = [templates]
# Make sure instrument is correct case
instrument = JWST_INSTRUMENT_NAMES_MIXEDCASE[instrument.lower()]
# monitor_mast.instrument_inventory does not allow list inputs to
# the added_filters input (or at least if you do provide a list, then
# it becomes a nested list when it sends the query to MAST. The
# nested list is subsequently ignored by MAST.)
# So query once for each flat template, and combine outputs into a
# single list.
query_results = []
for template_name in templates:
# Create dictionary of parameters to add
parameters = {"date_obs_mjd": {"min": start_date, "max": end_date},
"exp_type": template_name}
if detector is not None:
parameters["detector"] = detector
if aperture is not None:
parameters["apername"] = aperture
if filter_name is not None:
parameters["filter"] = filter_name
if pupil is not None:
parameters["pupil"] = pupil
if grating is not None:
parameters["grating"] = grating
if readpattern is not None:
parameters["readpatt"] = readpattern
if lamp is not None:
parameters["lamp"] = lamp
query = monitor_mast.instrument_inventory(instrument, dataproduct=JWST_DATAPRODUCTS,
add_filters=parameters, return_data=True, caom=False)
if len(query['data']) > 0:
query_results.extend(query['data'])
return query_results
def mast_query_miri(detector, aperture, templates, start_date, end_date):
"""Use ``astroquery`` to search MAST for data for given observation
templates over a given time range for MIRI. MIRI is different than
the other instruments in that (to find full frame flats and darks at
least) you need to use the detector name rather than the aperture
name. There is no full frame aperture name for the MRS detectors.
Parameters
----------
detector : str
Name of the detector to search for. One of ``MIRIMAGE``,
``MIRIFULONG``, ``MIRIFUSHORT``.
aperture : str
Aperture name on the detector (e.g. ``MIRIM_FULL``)
templates : str or list
Single, or list of, templates for the query (e.g. ``NRC_DARK``,
``MIR_FLATMRS``)
start_date : float
Starting date for the search in MJD
end_date : float
Ending date for the search in MJD
Returns
-------
query_results : list
List of dictionaries containing the query results
"""
# If a single template name is input as a string, put it in a list
if isinstance(templates, str):
templates = [templates]
instrument = 'MIRI'
# monitor_mast.instrument_inventory does not allow list inputs to
# the added_filters input (or at least if you do provide a list, then
# it becomes a nested list when it sends the query to MAST. The
# nested list is subsequently ignored by MAST.)
# So query once for each flat template, and combine outputs into a
# single list.
query_results = []
for template_name in templates:
# Create dictionary of parameters to add
if aperture.lower() != 'none':
parameters = {"date_obs_mjd": {"min": start_date, "max": end_date},
"detector": detector, "apername": aperture, "exp_type": template_name}
else:
parameters = {"date_obs_mjd": {"min": start_date, "max": end_date},
"detector": detector, "exp_type": template_name}
query = monitor_mast.instrument_inventory(instrument, dataproduct=JWST_DATAPRODUCTS,
add_filters=parameters, return_data=True, caom=False)
if len(query['data']) > 0:
query_results.extend(query['data'])
return query_results
| """Various utility functions for interacting with MAST
Authors
-------
- <NAME>
Use
---
This module can be imported as such:
>>> import mast_utils
results = mast_utils.mast_query('nircam', 'NRCA1_FULL', 'NRC_DARK', 53005.1, 53005.2)
"""
from jwql.jwql_monitors import monitor_mast
from jwql.utils.constants import JWST_DATAPRODUCTS, JWST_INSTRUMENT_NAMES_MIXEDCASE
def mast_query(instrument, templates, start_date, end_date, aperture=None, detector=None, filter_name=None,
pupil=None, grating=None, readpattern=None, lamp=None):
"""Use ``astroquery`` to search MAST for data for given observation
templates over a given time range
Parameters
----------
instrument : str
Instrument name (e.g. ``nircam``)
templates : str or list
Single, or list of, templates for the query (e.g. ``NRC_DARK``,
``MIR_FLATMRS``)
start_date : float
Starting date for the search in MJD
end_date : float
Ending date for the search in MJD
aperture : str
Detector aperture to search for (e.g. ``NRCA1_FULL``)
detector : str
Detector name (e.g. ``MIRIMAGE``)
filter_name : str
Fitler element (e.g. ``F200W``)
pupil : str
Pupil element (e.g. ``F323N``)
grating : str
Grating element (e.g. ``MIRROR``)
readpattern : str
Detector read out pattern (e.g. ``NISRAPID``)
lamp : str
Lamp name (e.g. ``LINE2``)
Returns
-------
query_results : list
List of dictionaries containing the query results
"""
# If a single template name is input as a string, put it in a list
if isinstance(templates, str):
templates = [templates]
# Make sure instrument is correct case
instrument = JWST_INSTRUMENT_NAMES_MIXEDCASE[instrument.lower()]
# monitor_mast.instrument_inventory does not allow list inputs to
# the added_filters input (or at least if you do provide a list, then
# it becomes a nested list when it sends the query to MAST. The
# nested list is subsequently ignored by MAST.)
# So query once for each flat template, and combine outputs into a
# single list.
query_results = []
for template_name in templates:
# Create dictionary of parameters to add
parameters = {"date_obs_mjd": {"min": start_date, "max": end_date},
"exp_type": template_name}
if detector is not None:
parameters["detector"] = detector
if aperture is not None:
parameters["apername"] = aperture
if filter_name is not None:
parameters["filter"] = filter_name
if pupil is not None:
parameters["pupil"] = pupil
if grating is not None:
parameters["grating"] = grating
if readpattern is not None:
parameters["readpatt"] = readpattern
if lamp is not None:
parameters["lamp"] = lamp
query = monitor_mast.instrument_inventory(instrument, dataproduct=JWST_DATAPRODUCTS,
add_filters=parameters, return_data=True, caom=False)
if len(query['data']) > 0:
query_results.extend(query['data'])
return query_results
def mast_query_miri(detector, aperture, templates, start_date, end_date):
"""Use ``astroquery`` to search MAST for data for given observation
templates over a given time range for MIRI. MIRI is different than
the other instruments in that (to find full frame flats and darks at
least) you need to use the detector name rather than the aperture
name. There is no full frame aperture name for the MRS detectors.
Parameters
----------
detector : str
Name of the detector to search for. One of ``MIRIMAGE``,
``MIRIFULONG``, ``MIRIFUSHORT``.
aperture : str
Aperture name on the detector (e.g. ``MIRIM_FULL``)
templates : str or list
Single, or list of, templates for the query (e.g. ``NRC_DARK``,
``MIR_FLATMRS``)
start_date : float
Starting date for the search in MJD
end_date : float
Ending date for the search in MJD
Returns
-------
query_results : list
List of dictionaries containing the query results
"""
# If a single template name is input as a string, put it in a list
if isinstance(templates, str):
templates = [templates]
instrument = 'MIRI'
# monitor_mast.instrument_inventory does not allow list inputs to
# the added_filters input (or at least if you do provide a list, then
# it becomes a nested list when it sends the query to MAST. The
# nested list is subsequently ignored by MAST.)
# So query once for each flat template, and combine outputs into a
# single list.
query_results = []
for template_name in templates:
# Create dictionary of parameters to add
if aperture.lower() != 'none':
parameters = {"date_obs_mjd": {"min": start_date, "max": end_date},
"detector": detector, "apername": aperture, "exp_type": template_name}
else:
parameters = {"date_obs_mjd": {"min": start_date, "max": end_date},
"detector": detector, "exp_type": template_name}
query = monitor_mast.instrument_inventory(instrument, dataproduct=JWST_DATAPRODUCTS,
add_filters=parameters, return_data=True, caom=False)
if len(query['data']) > 0:
query_results.extend(query['data'])
return query_results
| en | 0.624465 | Various utility functions for interacting with MAST Authors ------- - <NAME> Use --- This module can be imported as such: >>> import mast_utils results = mast_utils.mast_query('nircam', 'NRCA1_FULL', 'NRC_DARK', 53005.1, 53005.2) Use ``astroquery`` to search MAST for data for given observation templates over a given time range Parameters ---------- instrument : str Instrument name (e.g. ``nircam``) templates : str or list Single, or list of, templates for the query (e.g. ``NRC_DARK``, ``MIR_FLATMRS``) start_date : float Starting date for the search in MJD end_date : float Ending date for the search in MJD aperture : str Detector aperture to search for (e.g. ``NRCA1_FULL``) detector : str Detector name (e.g. ``MIRIMAGE``) filter_name : str Fitler element (e.g. ``F200W``) pupil : str Pupil element (e.g. ``F323N``) grating : str Grating element (e.g. ``MIRROR``) readpattern : str Detector read out pattern (e.g. ``NISRAPID``) lamp : str Lamp name (e.g. ``LINE2``) Returns ------- query_results : list List of dictionaries containing the query results # If a single template name is input as a string, put it in a list # Make sure instrument is correct case # monitor_mast.instrument_inventory does not allow list inputs to # the added_filters input (or at least if you do provide a list, then # it becomes a nested list when it sends the query to MAST. The # nested list is subsequently ignored by MAST.) # So query once for each flat template, and combine outputs into a # single list. # Create dictionary of parameters to add Use ``astroquery`` to search MAST for data for given observation templates over a given time range for MIRI. MIRI is different than the other instruments in that (to find full frame flats and darks at least) you need to use the detector name rather than the aperture name. There is no full frame aperture name for the MRS detectors. Parameters ---------- detector : str Name of the detector to search for. One of ``MIRIMAGE``, ``MIRIFULONG``, ``MIRIFUSHORT``. aperture : str Aperture name on the detector (e.g. ``MIRIM_FULL``) templates : str or list Single, or list of, templates for the query (e.g. ``NRC_DARK``, ``MIR_FLATMRS``) start_date : float Starting date for the search in MJD end_date : float Ending date for the search in MJD Returns ------- query_results : list List of dictionaries containing the query results # If a single template name is input as a string, put it in a list # monitor_mast.instrument_inventory does not allow list inputs to # the added_filters input (or at least if you do provide a list, then # it becomes a nested list when it sends the query to MAST. The # nested list is subsequently ignored by MAST.) # So query once for each flat template, and combine outputs into a # single list. # Create dictionary of parameters to add | 2.715309 | 3 |
configs/nusc/pp/nusc_centerpoint_pp_02voxel_two_pfn_test.py | APVGITHUB/CenterPoint | 0 | 6624459 | import itertools
import logging
from importlib_metadata import version
from det3d.utils.config_tool import get_downsample_factor
tasks = [
dict(num_class=1, class_names=["car"]),
dict(num_class=2, class_names=["truck", "construction_vehicle"]),
dict(num_class=2, class_names=["bus", "trailer"]),
dict(num_class=1, class_names=["barrier"]),
dict(num_class=2, class_names=["motorcycle", "bicycle"]),
dict(num_class=2, class_names=["pedestrian", "traffic_cone"]),
]
class_names = list(itertools.chain(*[t["class_names"] for t in tasks]))
# training and testing settings
target_assigner = dict(
tasks=tasks,
)
# model settings
model = dict(
type="PointPillars",
pretrained=None,
reader=dict(
type="PillarFeatureNet",
num_filters=[64, 64],
num_input_features=5,
with_distance=False,
voxel_size=(0.2, 0.2, 8),
pc_range=(-51.2, -51.2, -5.0, 51.2, 51.2, 3.0),
),
backbone=dict(type="PointPillarsScatter", ds_factor=1),
neck=dict(
type="RPN",
layer_nums=[3, 5, 5],
ds_layer_strides=[2, 2, 2],
ds_num_filters=[64, 128, 256],
us_layer_strides=[0.5, 1, 2],
us_num_filters=[128, 128, 128],
num_input_features=64,
logger=logging.getLogger("RPN"),
),
bbox_head=dict(
# type='RPNHead',
type="CenterHead",
in_channels=sum([128, 128, 128]),
tasks=tasks,
dataset='nuscenes',
weight=0.25,
code_weights=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.2, 1.0, 1.0],
common_heads={'reg': (2, 2), 'height': (1, 2), 'dim':(3, 2), 'rot':(2, 2), 'vel': (2, 2)}, # (output_channel, num_conv)
),
)
assigner = dict(
target_assigner=target_assigner,
out_size_factor=get_downsample_factor(model),
gaussian_overlap=0.1,
max_objs=500,
min_radius=2,
)
train_cfg = dict(assigner=assigner)
test_cfg = dict(
post_center_limit_range=[-61.2, -61.2, -10.0, 61.2, 61.2, 10.0],
max_per_img=500,
nms=dict(
nms_pre_max_size=1000,
nms_post_max_size=83,
nms_iou_threshold=0.2,
),
score_threshold=0.1,
pc_range=[-51.2, -51.2],
out_size_factor=get_downsample_factor(model),
voxel_size=[0.2, 0.2]
)
# dataset settings
dataset_type = "NuScenesDataset"
nsweeps = 10
data_root = "data/nuScenes"
data_root_test = "data/nuScenes/v1.0-test"
db_sampler = dict(
type="GT-AUG",
enable=False,
db_info_path="data/nuScenes/dbinfos_train_10sweeps_withvelo.pkl",
sample_groups=[
dict(car=2),
dict(truck=3),
dict(construction_vehicle=7),
dict(bus=4),
dict(trailer=6),
dict(barrier=2),
dict(motorcycle=6),
dict(bicycle=6),
dict(pedestrian=2),
dict(traffic_cone=2),
],
db_prep_steps=[
dict(
filter_by_min_num_points=dict(
car=5,
truck=5,
bus=5,
trailer=5,
construction_vehicle=5,
traffic_cone=5,
barrier=5,
motorcycle=5,
bicycle=5,
pedestrian=5,
)
),
dict(filter_by_difficulty=[-1],),
],
global_random_rotation_range_per_object=[0, 0],
rate=1.0,
)
train_preprocessor = dict(
mode="train",
shuffle_points=True,
global_rot_noise=[-0.3925, 0.3925],
global_scale_noise=[0.95, 1.05],
db_sampler=db_sampler,
class_names=class_names,
)
val_preprocessor = dict(
mode="val",
shuffle_points=False,
)
test_preprocessor = dict(
mode="test",
shuffle_points=False,
)
voxel_generator = dict(
range=[-51.2, -51.2, -5.0, 51.2, 51.2, 3.0],
voxel_size=[0.2, 0.2, 8],
max_points_in_voxel=20,
max_voxel_num=[30000, 60000],
)
train_pipeline = [
dict(type="LoadPointCloudFromFile", dataset=dataset_type),
dict(type="LoadPointCloudAnnotations", with_bbox=True),
dict(type="Preprocess", cfg=train_preprocessor),
dict(type="Voxelization", cfg=voxel_generator),
dict(type="AssignLabel", cfg=train_cfg["assigner"]),
dict(type="Reformat"),
]
test_pipeline = [
dict(type="LoadPointCloudFromFile", dataset=dataset_type),
dict(type="LoadPointCloudAnnotations", with_bbox=True),
dict(type="Preprocess", cfg=test_preprocessor),
dict(type="Voxelization", cfg=voxel_generator),
dict(type="AssignLabel", cfg=train_cfg["assigner"]),
dict(type="Reformat"),
]
train_anno = "data/nuScenes/infos_train_10sweeps_withvelo_filter_True.pkl"
val_anno = "data/nuScenes/infos_val_10sweeps_withvelo_filter_True.pkl"
test_anno = "data/nuScenes/v1.0-test/infos_test_10sweeps_withvelo.pkl"
data = dict(
samples_per_gpu=2,
workers_per_gpu=4,
train=dict(
type=dataset_type,
root_path=data_root,
info_path=train_anno,
ann_file=train_anno,
nsweeps=nsweeps,
class_names=class_names,
pipeline=train_pipeline,
),
val=dict(
type=dataset_type,
root_path=data_root,
info_path=val_anno,
test_mode=True,
ann_file=val_anno,
nsweeps=nsweeps,
class_names=class_names,
pipeline=test_pipeline,
),
test=dict(
type=dataset_type,
root_path=data_root_test,
info_path=test_anno,
ann_file=test_anno,
test_mode=True,
nsweeps=nsweeps,
class_names=class_names,
pipeline=test_pipeline,
version='v1.0-test'
),
)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# optimizer
optimizer = dict(
type="adam", amsgrad=0.0, wd=0.01, fixed_wd=True, moving_average=False,
)
lr_config = dict(
type="one_cycle", lr_max=0.001, moms=[0.95, 0.85], div_factor=10.0, pct_start=0.4,
)
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=5,
hooks=[
dict(type="TextLoggerHook"),
# dict(type='TensorboardLoggerHook')
],
)
# yapf:enable
# runtime settings
total_epochs = 30
device_ids = range(8)
dist_params = dict(backend="nccl", init_method="env://")
log_level = "INFO"
work_dir = './work_dirs/{}/'.format(__file__[__file__.rfind('/') + 1:-3])
load_from = None
resume_from = None
workflow = [('train', 1)]
| import itertools
import logging
from importlib_metadata import version
from det3d.utils.config_tool import get_downsample_factor
tasks = [
dict(num_class=1, class_names=["car"]),
dict(num_class=2, class_names=["truck", "construction_vehicle"]),
dict(num_class=2, class_names=["bus", "trailer"]),
dict(num_class=1, class_names=["barrier"]),
dict(num_class=2, class_names=["motorcycle", "bicycle"]),
dict(num_class=2, class_names=["pedestrian", "traffic_cone"]),
]
class_names = list(itertools.chain(*[t["class_names"] for t in tasks]))
# training and testing settings
target_assigner = dict(
tasks=tasks,
)
# model settings
model = dict(
type="PointPillars",
pretrained=None,
reader=dict(
type="PillarFeatureNet",
num_filters=[64, 64],
num_input_features=5,
with_distance=False,
voxel_size=(0.2, 0.2, 8),
pc_range=(-51.2, -51.2, -5.0, 51.2, 51.2, 3.0),
),
backbone=dict(type="PointPillarsScatter", ds_factor=1),
neck=dict(
type="RPN",
layer_nums=[3, 5, 5],
ds_layer_strides=[2, 2, 2],
ds_num_filters=[64, 128, 256],
us_layer_strides=[0.5, 1, 2],
us_num_filters=[128, 128, 128],
num_input_features=64,
logger=logging.getLogger("RPN"),
),
bbox_head=dict(
# type='RPNHead',
type="CenterHead",
in_channels=sum([128, 128, 128]),
tasks=tasks,
dataset='nuscenes',
weight=0.25,
code_weights=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.2, 1.0, 1.0],
common_heads={'reg': (2, 2), 'height': (1, 2), 'dim':(3, 2), 'rot':(2, 2), 'vel': (2, 2)}, # (output_channel, num_conv)
),
)
assigner = dict(
target_assigner=target_assigner,
out_size_factor=get_downsample_factor(model),
gaussian_overlap=0.1,
max_objs=500,
min_radius=2,
)
train_cfg = dict(assigner=assigner)
test_cfg = dict(
post_center_limit_range=[-61.2, -61.2, -10.0, 61.2, 61.2, 10.0],
max_per_img=500,
nms=dict(
nms_pre_max_size=1000,
nms_post_max_size=83,
nms_iou_threshold=0.2,
),
score_threshold=0.1,
pc_range=[-51.2, -51.2],
out_size_factor=get_downsample_factor(model),
voxel_size=[0.2, 0.2]
)
# dataset settings
dataset_type = "NuScenesDataset"
nsweeps = 10
data_root = "data/nuScenes"
data_root_test = "data/nuScenes/v1.0-test"
db_sampler = dict(
type="GT-AUG",
enable=False,
db_info_path="data/nuScenes/dbinfos_train_10sweeps_withvelo.pkl",
sample_groups=[
dict(car=2),
dict(truck=3),
dict(construction_vehicle=7),
dict(bus=4),
dict(trailer=6),
dict(barrier=2),
dict(motorcycle=6),
dict(bicycle=6),
dict(pedestrian=2),
dict(traffic_cone=2),
],
db_prep_steps=[
dict(
filter_by_min_num_points=dict(
car=5,
truck=5,
bus=5,
trailer=5,
construction_vehicle=5,
traffic_cone=5,
barrier=5,
motorcycle=5,
bicycle=5,
pedestrian=5,
)
),
dict(filter_by_difficulty=[-1],),
],
global_random_rotation_range_per_object=[0, 0],
rate=1.0,
)
train_preprocessor = dict(
mode="train",
shuffle_points=True,
global_rot_noise=[-0.3925, 0.3925],
global_scale_noise=[0.95, 1.05],
db_sampler=db_sampler,
class_names=class_names,
)
val_preprocessor = dict(
mode="val",
shuffle_points=False,
)
test_preprocessor = dict(
mode="test",
shuffle_points=False,
)
voxel_generator = dict(
range=[-51.2, -51.2, -5.0, 51.2, 51.2, 3.0],
voxel_size=[0.2, 0.2, 8],
max_points_in_voxel=20,
max_voxel_num=[30000, 60000],
)
train_pipeline = [
dict(type="LoadPointCloudFromFile", dataset=dataset_type),
dict(type="LoadPointCloudAnnotations", with_bbox=True),
dict(type="Preprocess", cfg=train_preprocessor),
dict(type="Voxelization", cfg=voxel_generator),
dict(type="AssignLabel", cfg=train_cfg["assigner"]),
dict(type="Reformat"),
]
test_pipeline = [
dict(type="LoadPointCloudFromFile", dataset=dataset_type),
dict(type="LoadPointCloudAnnotations", with_bbox=True),
dict(type="Preprocess", cfg=test_preprocessor),
dict(type="Voxelization", cfg=voxel_generator),
dict(type="AssignLabel", cfg=train_cfg["assigner"]),
dict(type="Reformat"),
]
train_anno = "data/nuScenes/infos_train_10sweeps_withvelo_filter_True.pkl"
val_anno = "data/nuScenes/infos_val_10sweeps_withvelo_filter_True.pkl"
test_anno = "data/nuScenes/v1.0-test/infos_test_10sweeps_withvelo.pkl"
data = dict(
samples_per_gpu=2,
workers_per_gpu=4,
train=dict(
type=dataset_type,
root_path=data_root,
info_path=train_anno,
ann_file=train_anno,
nsweeps=nsweeps,
class_names=class_names,
pipeline=train_pipeline,
),
val=dict(
type=dataset_type,
root_path=data_root,
info_path=val_anno,
test_mode=True,
ann_file=val_anno,
nsweeps=nsweeps,
class_names=class_names,
pipeline=test_pipeline,
),
test=dict(
type=dataset_type,
root_path=data_root_test,
info_path=test_anno,
ann_file=test_anno,
test_mode=True,
nsweeps=nsweeps,
class_names=class_names,
pipeline=test_pipeline,
version='v1.0-test'
),
)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# optimizer
optimizer = dict(
type="adam", amsgrad=0.0, wd=0.01, fixed_wd=True, moving_average=False,
)
lr_config = dict(
type="one_cycle", lr_max=0.001, moms=[0.95, 0.85], div_factor=10.0, pct_start=0.4,
)
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=5,
hooks=[
dict(type="TextLoggerHook"),
# dict(type='TensorboardLoggerHook')
],
)
# yapf:enable
# runtime settings
total_epochs = 30
device_ids = range(8)
dist_params = dict(backend="nccl", init_method="env://")
log_level = "INFO"
work_dir = './work_dirs/{}/'.format(__file__[__file__.rfind('/') + 1:-3])
load_from = None
resume_from = None
workflow = [('train', 1)]
| en | 0.566061 | # training and testing settings # model settings # type='RPNHead', # (output_channel, num_conv) # dataset settings # optimizer # yapf:disable # dict(type='TensorboardLoggerHook') # yapf:enable # runtime settings | 1.752591 | 2 |
bin/funannotate-test.py | Ram-Nagar/funannotate | 1 | 6624460 | <gh_stars>1-10
#!/usr/bin/env python
import sys
import os
import subprocess
import urllib2
import socket
import argparse
import shutil
#setup menu with argparse
class MyFormatter(argparse.ArgumentDefaultsHelpFormatter):
def __init__(self,prog):
super(MyFormatter,self).__init__(prog,max_help_position=48)
parser=argparse.ArgumentParser(prog='funannotate-test.py',
description='''Script to download and then test funannotate installation''',
epilog="""Written by <NAME> (2016-2018) <EMAIL>""",
formatter_class = MyFormatter)
parser.add_argument('-t','--tests', required=True, nargs='+',
choices=['all', 'clean', 'mask', 'predict', 'annotate', 'busco', 'rna-seq', 'compare'],
help='select which tests to run')
parser.add_argument('--cpus', default=2, type=int, help='Number of CPUs to use')
args=parser.parse_args()
download_links = {'mask': 'https://osf.io/hbryz/download?version=1',
'clean': 'https://osf.io/8pjbe/download?version=1',
'predict': 'https://osf.io/te2pf/download?version=1',
'busco': 'https://osf.io/kyrd9/download?version=1',
'rna-seq': 'https://osf.io/t7j83/download?version=1',
'annotate': 'https://osf.io/97pyn/download?version=1',
'compare': 'https://osf.io/7s9xh/download?version=1'}
def checkFile(input):
def _getSize(filename):
st = os.stat(filename)
return st.st_size
if os.path.isfile(input):
filesize = _getSize(input)
if int(filesize) < 1:
return False
else:
return True
elif os.path.islink(input):
return True
else:
return False
def countfasta(input):
count = 0
with open(input, 'rU') as f:
for line in f:
if line.startswith (">"):
count += 1
return count
def countGFFgenes(input):
count = 0
with open(input, 'rU') as f:
for line in f:
if "\tgene\t" in line:
count += 1
return count
def runCMD(cmd, dir):
print('CMD: {:}'.format(' '.join(cmd)))
print("#########################################################")
subprocess.call(cmd, cwd=dir)
def download(url, name):
file_name = name
try:
u = urllib2.urlopen(url)
f = open(file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print("Downloading: {0} Bytes: {1}".format(url, file_size))
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
p = float(file_size_dl) / file_size
status = r"{0} [{1:.2%}]".format(file_size_dl, p)
status = status + chr(8)*(len(status)+1)
sys.stdout.write(status)
sys.stdout.flush()
f.close()
except socket.error as e:
if e.errno != errno.ECONNRESET:
raise
pass
def runMaskTest():
print("#########################################################")
print('Running `funannotate mask` unit testing: RepeatModeler --> RepeatMasker')
tmpdir = 'test-mask_'+pid
os.makedirs(tmpdir)
inputFasta = 'test.fa'
if not os.path.isfile(inputFasta):
if not os.path.isfile('test-mask.tar.gz'):
download(download_links.get('mask'), 'test-mask.tar.gz')
subprocess.call(['tar', '-zxf', 'test-mask.tar.gz'])
shutil.copyfile(inputFasta, os.path.join(tmpdir, inputFasta))
runCMD(['funannotate', 'mask', '-i', inputFasta, '-o', 'test.masked.fa', '--cpus', str(args.cpus)], tmpdir)
#check that everything worked
assert checkFile(os.path.join(tmpdir, 'test.masked.fa'))
library = False
for file in os.listdir(tmpdir):
if file.startswith('repeatmodeler-library'):
assert checkFile(os.path.join(tmpdir, file))
library = True
print("#########################################################")
if library:
print('SUCCESS: `funannotate mask` test complete.')
shutil.rmtree(tmpdir)
else:
print('ERROR: `funannotate mask` test failed, RepeatModeler or RepeatMasker not properly installed.')
print("#########################################################\n")
def runCleanTest():
print("#########################################################")
print('Running `funannotate clean` unit testing: minimap2 mediated assembly duplications')
tmpdir = 'test-clean_'+pid
os.makedirs(tmpdir)
inputFasta = 'test.clean.fa'
if not os.path.isfile(inputFasta):
if not os.path.isfile('test-clean.tar.gz'):
download(download_links.get('clean'), 'test-clean.tar.gz')
subprocess.call(['tar', '-zxf', 'test-clean.tar.gz'])
shutil.copyfile(inputFasta, os.path.join(tmpdir, inputFasta))
assert countfasta(os.path.join(tmpdir, inputFasta)) == 6
#run exhaustive
runCMD(['funannotate', 'clean', '-i', inputFasta, '-o', 'test.exhaustive.fa', '--exhaustive'], tmpdir)
print("#########################################################")
try:
assert countfasta(os.path.join(tmpdir, 'test.exhaustive.fa')) == 3
print('SUCCESS: `funannotate clean` test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: `funannotate clean` test failed.')
print("#########################################################\n")
def runPredictTest():
print("#########################################################")
print('Running `funannotate predict` unit testing')
tmpdir = 'test-predict_'+pid
os.makedirs(tmpdir)
inputFasta = 'test.softmasked.fa'
protEvidence = 'protein.evidence.fasta'
if not checkFile(inputFasta) or not checkFile(protEvidence):
if not os.path.isfile('test-predict.tar.gz'):
download(download_links.get('predict'), 'test-predict.tar.gz')
subprocess.call(['tar', '-zxf', 'test-predict.tar.gz'])
shutil.copyfile(inputFasta, os.path.join(tmpdir, inputFasta))
shutil.copyfile(protEvidence, os.path.join(tmpdir, protEvidence))
#run predict
runCMD(['funannotate', 'predict', '-i', inputFasta,
'--protein_evidence', protEvidence,
'-o', 'annotate', '--augustus_species', 'yeast',
'--cpus', str(args.cpus), '--species', "Awesome testicus"], tmpdir)
print("#########################################################")
#check results
try:
assert 1500 <= countGFFgenes(os.path.join(tmpdir, 'annotate', 'predict_results', 'Awesome_testicus.gff3')) <= 1700
print('SUCCESS: `funannotate predict` test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: `funannotate predict` test failed - check logfiles')
print("#########################################################\n")
def runBuscoTest():
print("#########################################################")
print('Running `funannotate predict` BUSCO-mediated training unit testing')
#need to delete any pre-existing Augustus training data
try:
AUGUSTUS = os.environ["AUGUSTUS_CONFIG_PATH"]
except KeyError:
lib.log.error("$AUGUSTUS_CONFIG_PATH environmental variable not found, set to continue.")
return
if os.path.isdir(os.path.join(AUGUSTUS, 'species', 'awesome_busco')):
shutil.rmtree(os.path.join(AUGUSTUS, 'species', 'awesome_busco'))
tmpdir = 'test-busco_'+pid
os.makedirs(tmpdir)
inputFasta = 'test.softmasked.fa'
protEvidence = 'protein.evidence.fasta'
if not checkFile(inputFasta) or not checkFile(protEvidence):
if not os.path.isfile('test-busco.tar.gz'):
download(download_links.get('predict'), 'test-busco.tar.gz')
subprocess.call(['tar', '-zxf', 'test-busco.tar.gz'])
shutil.copyfile(inputFasta, os.path.join(tmpdir, inputFasta))
shutil.copyfile(protEvidence, os.path.join(tmpdir, protEvidence))
#run predict
runCMD(['funannotate', 'predict', '-i', inputFasta,
'--protein_evidence', protEvidence,
'-o', 'annotate', '--cpus', str(args.cpus),
'--species', "Awesome busco"], tmpdir)
print("#########################################################")
#check results
try:
assert 1500 <= countGFFgenes(os.path.join(tmpdir, 'annotate', 'predict_results', 'Awesome_busco.gff3')) <= 1700
print('SUCCESS: `funannotate predict` BUSCO-mediated training test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: `funannotate predict` BUSCO-mediated training test failed - check logfiles')
print("#########################################################\n")
def runAnnotateTest():
print("#########################################################")
print('Running `funannotate annotate` unit testing')
tmpdir = 'test-annotate_'+pid
os.makedirs(tmpdir)
input = 'Genome_one.gbk'
iprscan = 'genome_one.iprscan.xml'
emapper = 'genome_one.emapper.annotations'
if not checkFile(input) or not checkFile(iprscan) or not checkFile(emapper):
if not os.path.isfile('test-annotate.tar.gz'):
download(download_links.get('annotate'), 'test-annotate.tar.gz')
subprocess.call(['tar', '-zxf', 'test-annotate.tar.gz'])
shutil.copyfile(input, os.path.join(tmpdir, input))
shutil.copyfile(iprscan, os.path.join(tmpdir, iprscan))
shutil.copyfile(emapper, os.path.join(tmpdir, emapper))
#run predict
runCMD(['funannotate', 'annotate', '--genbank', input,
'-o', 'annotate', '--cpus', str(args.cpus),
'--iprscan', iprscan,
'--eggnog', emapper], tmpdir)
print("#########################################################")
#check results
try:
assert checkFile(os.path.join(tmpdir, 'annotate', 'annotate_results', 'Genome_one.gbk'))
assert checkFile(os.path.join(tmpdir, 'annotate', 'annotate_results', 'Genome_one.sqn'))
assert checkFile(os.path.join(tmpdir, 'annotate', 'annotate_results', 'Genome_one.agp'))
assert checkFile(os.path.join(tmpdir, 'annotate', 'annotate_results', 'Genome_one.tbl'))
assert checkFile(os.path.join(tmpdir, 'annotate', 'annotate_results', 'Genome_one.annotations.txt'))
print('SUCCESS: `funannotate annotate` test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: `funannotate annotate` test failed - check logfiles')
print("#########################################################\n")
def runCompareTest():
print("#########################################################")
print('Running `funannotate compare` unit testing')
tmpdir = 'test-compare_'+pid
os.makedirs(tmpdir)
input1 = 'Genome_one.gbk'
input2 = 'Genome_two.gbk'
input3 = 'Genome_three.gbk'
if not checkFile(input1) or not checkFile(input2) or not checkFile(input3):
if not os.path.isfile('test-compare.tar.gz'):
download(download_links.get('compare'), 'test-compare.tar.gz')
subprocess.call(['tar', '-zxf', 'test-compare.tar.gz'])
shutil.copyfile(input1, os.path.join(tmpdir, input1))
shutil.copyfile(input2, os.path.join(tmpdir, input2))
shutil.copyfile(input3, os.path.join(tmpdir, input3))
#run predict
runCMD(['funannotate', 'compare',
'-i', input1, input2, input3,
'-o', 'compare', '--cpus', str(args.cpus),
'--run_dnds', 'estimate', '--outgroup', 'botrytis_cinerea.dikarya'], tmpdir)
print("#########################################################")
#check results
try:
assert checkFile(os.path.join(tmpdir, 'compare', 'index.html'))
assert checkFile(os.path.join(tmpdir, 'compare', 'phylogeny.html'))
assert checkFile(os.path.join(tmpdir, 'compare.tar.gz'))
print('SUCCESS: `funannotate compare` test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: `funannotate compare` test failed - check logfiles')
print("#########################################################\n")
def runRNAseqTest():
print("#########################################################")
print('Running funannotate RNA-seq training/prediction unit testing')
#need to delete any pre-existing Augustus training data
try:
AUGUSTUS = os.environ["AUGUSTUS_CONFIG_PATH"]
except KeyError:
lib.log.error("$AUGUSTUS_CONFIG_PATH environmental variable not found, set to continue.")
return
if os.path.isdir(os.path.join(AUGUSTUS, 'species', 'awesome_rna')):
shutil.rmtree(os.path.join(AUGUSTUS, 'species', 'awesome_rna'))
tmpdir = 'test-rna_seq_'+pid
os.makedirs(tmpdir)
inputFasta = 'test.softmasked.fa'
protEvidence = 'protein.evidence.fasta'
illumina = 'rna-seq.illumina.fastq.gz'
nanopore = 'rna-seq.nanopore.fastq.gz'
if not checkFile(inputFasta) or not checkFile(protEvidence) or not checkFile(illumina) or not checkFile(nanopore):
if not os.path.isfile('test-rna_seq.tar.gz'):
download(download_links.get('rna-seq'), 'test-rna_seq.tar.gz')
subprocess.call(['tar', '-zxf', 'test-rna_seq.tar.gz'])
for f in [inputFasta, protEvidence, illumina, nanopore]:
shutil.copyfile(f, os.path.join(tmpdir, f))
#run train
runCMD(['funannotate', 'train', '-i', inputFasta,
'--single', illumina, '--nanopore_mrna', nanopore,
'-o', 'rna-seq', '--cpus', str(args.cpus), '--jaccard_clip',
'--species', "Awesome rna"], tmpdir)
#run predict
print("#########################################################")
print('Now running `funannotate predict` using RNA-seq training data')
runCMD(['funannotate', 'predict', '-i', inputFasta,
'--protein_evidence', protEvidence,
'-o', 'rna-seq', '--cpus', str(args.cpus),
'--species', "Awesome rna"], tmpdir)
#run update
print("#########################################################")
print('Now running `funannotate update` to run PASA-mediated UTR addition and multiple transcripts')
runCMD(['funannotate', 'update', '-i', 'rna-seq',
'--cpus', str(args.cpus)], tmpdir)
print("#########################################################")
#check results
try:
assert 1630 <= countGFFgenes(os.path.join(tmpdir, 'rna-seq', 'update_results', 'Awesome_rna.gff3')) <= 1830
print('SUCCESS: funannotate RNA-seq training/prediction test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: funannotate RNA-seq training/prediction test failed - check logfiles')
print("#########################################################\n")
pid = str(os.getpid())
if 'clean' in args.tests or 'all' in args.tests:
runCleanTest()
if 'mask' in args.tests or 'all' in args.tests:
runMaskTest()
if 'predict' in args.tests or 'all' in args.tests:
runPredictTest()
if 'busco' in args.tests or 'all' in args.tests:
runBuscoTest()
if 'rna-seq' in args.tests or 'all' in args.tests:
runRNAseqTest()
if 'annotate' in args.tests or 'all' in args.tests:
runAnnotateTest()
if 'compare' in args.tests or 'all' in args.tests:
runCompareTest() | #!/usr/bin/env python
import sys
import os
import subprocess
import urllib2
import socket
import argparse
import shutil
#setup menu with argparse
class MyFormatter(argparse.ArgumentDefaultsHelpFormatter):
def __init__(self,prog):
super(MyFormatter,self).__init__(prog,max_help_position=48)
parser=argparse.ArgumentParser(prog='funannotate-test.py',
description='''Script to download and then test funannotate installation''',
epilog="""Written by <NAME> (2016-2018) <EMAIL>""",
formatter_class = MyFormatter)
parser.add_argument('-t','--tests', required=True, nargs='+',
choices=['all', 'clean', 'mask', 'predict', 'annotate', 'busco', 'rna-seq', 'compare'],
help='select which tests to run')
parser.add_argument('--cpus', default=2, type=int, help='Number of CPUs to use')
args=parser.parse_args()
download_links = {'mask': 'https://osf.io/hbryz/download?version=1',
'clean': 'https://osf.io/8pjbe/download?version=1',
'predict': 'https://osf.io/te2pf/download?version=1',
'busco': 'https://osf.io/kyrd9/download?version=1',
'rna-seq': 'https://osf.io/t7j83/download?version=1',
'annotate': 'https://osf.io/97pyn/download?version=1',
'compare': 'https://osf.io/7s9xh/download?version=1'}
def checkFile(input):
def _getSize(filename):
st = os.stat(filename)
return st.st_size
if os.path.isfile(input):
filesize = _getSize(input)
if int(filesize) < 1:
return False
else:
return True
elif os.path.islink(input):
return True
else:
return False
def countfasta(input):
count = 0
with open(input, 'rU') as f:
for line in f:
if line.startswith (">"):
count += 1
return count
def countGFFgenes(input):
count = 0
with open(input, 'rU') as f:
for line in f:
if "\tgene\t" in line:
count += 1
return count
def runCMD(cmd, dir):
print('CMD: {:}'.format(' '.join(cmd)))
print("#########################################################")
subprocess.call(cmd, cwd=dir)
def download(url, name):
file_name = name
try:
u = urllib2.urlopen(url)
f = open(file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print("Downloading: {0} Bytes: {1}".format(url, file_size))
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
p = float(file_size_dl) / file_size
status = r"{0} [{1:.2%}]".format(file_size_dl, p)
status = status + chr(8)*(len(status)+1)
sys.stdout.write(status)
sys.stdout.flush()
f.close()
except socket.error as e:
if e.errno != errno.ECONNRESET:
raise
pass
def runMaskTest():
print("#########################################################")
print('Running `funannotate mask` unit testing: RepeatModeler --> RepeatMasker')
tmpdir = 'test-mask_'+pid
os.makedirs(tmpdir)
inputFasta = 'test.fa'
if not os.path.isfile(inputFasta):
if not os.path.isfile('test-mask.tar.gz'):
download(download_links.get('mask'), 'test-mask.tar.gz')
subprocess.call(['tar', '-zxf', 'test-mask.tar.gz'])
shutil.copyfile(inputFasta, os.path.join(tmpdir, inputFasta))
runCMD(['funannotate', 'mask', '-i', inputFasta, '-o', 'test.masked.fa', '--cpus', str(args.cpus)], tmpdir)
#check that everything worked
assert checkFile(os.path.join(tmpdir, 'test.masked.fa'))
library = False
for file in os.listdir(tmpdir):
if file.startswith('repeatmodeler-library'):
assert checkFile(os.path.join(tmpdir, file))
library = True
print("#########################################################")
if library:
print('SUCCESS: `funannotate mask` test complete.')
shutil.rmtree(tmpdir)
else:
print('ERROR: `funannotate mask` test failed, RepeatModeler or RepeatMasker not properly installed.')
print("#########################################################\n")
def runCleanTest():
print("#########################################################")
print('Running `funannotate clean` unit testing: minimap2 mediated assembly duplications')
tmpdir = 'test-clean_'+pid
os.makedirs(tmpdir)
inputFasta = 'test.clean.fa'
if not os.path.isfile(inputFasta):
if not os.path.isfile('test-clean.tar.gz'):
download(download_links.get('clean'), 'test-clean.tar.gz')
subprocess.call(['tar', '-zxf', 'test-clean.tar.gz'])
shutil.copyfile(inputFasta, os.path.join(tmpdir, inputFasta))
assert countfasta(os.path.join(tmpdir, inputFasta)) == 6
#run exhaustive
runCMD(['funannotate', 'clean', '-i', inputFasta, '-o', 'test.exhaustive.fa', '--exhaustive'], tmpdir)
print("#########################################################")
try:
assert countfasta(os.path.join(tmpdir, 'test.exhaustive.fa')) == 3
print('SUCCESS: `funannotate clean` test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: `funannotate clean` test failed.')
print("#########################################################\n")
def runPredictTest():
print("#########################################################")
print('Running `funannotate predict` unit testing')
tmpdir = 'test-predict_'+pid
os.makedirs(tmpdir)
inputFasta = 'test.softmasked.fa'
protEvidence = 'protein.evidence.fasta'
if not checkFile(inputFasta) or not checkFile(protEvidence):
if not os.path.isfile('test-predict.tar.gz'):
download(download_links.get('predict'), 'test-predict.tar.gz')
subprocess.call(['tar', '-zxf', 'test-predict.tar.gz'])
shutil.copyfile(inputFasta, os.path.join(tmpdir, inputFasta))
shutil.copyfile(protEvidence, os.path.join(tmpdir, protEvidence))
#run predict
runCMD(['funannotate', 'predict', '-i', inputFasta,
'--protein_evidence', protEvidence,
'-o', 'annotate', '--augustus_species', 'yeast',
'--cpus', str(args.cpus), '--species', "Awesome testicus"], tmpdir)
print("#########################################################")
#check results
try:
assert 1500 <= countGFFgenes(os.path.join(tmpdir, 'annotate', 'predict_results', 'Awesome_testicus.gff3')) <= 1700
print('SUCCESS: `funannotate predict` test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: `funannotate predict` test failed - check logfiles')
print("#########################################################\n")
def runBuscoTest():
print("#########################################################")
print('Running `funannotate predict` BUSCO-mediated training unit testing')
#need to delete any pre-existing Augustus training data
try:
AUGUSTUS = os.environ["AUGUSTUS_CONFIG_PATH"]
except KeyError:
lib.log.error("$AUGUSTUS_CONFIG_PATH environmental variable not found, set to continue.")
return
if os.path.isdir(os.path.join(AUGUSTUS, 'species', 'awesome_busco')):
shutil.rmtree(os.path.join(AUGUSTUS, 'species', 'awesome_busco'))
tmpdir = 'test-busco_'+pid
os.makedirs(tmpdir)
inputFasta = 'test.softmasked.fa'
protEvidence = 'protein.evidence.fasta'
if not checkFile(inputFasta) or not checkFile(protEvidence):
if not os.path.isfile('test-busco.tar.gz'):
download(download_links.get('predict'), 'test-busco.tar.gz')
subprocess.call(['tar', '-zxf', 'test-busco.tar.gz'])
shutil.copyfile(inputFasta, os.path.join(tmpdir, inputFasta))
shutil.copyfile(protEvidence, os.path.join(tmpdir, protEvidence))
#run predict
runCMD(['funannotate', 'predict', '-i', inputFasta,
'--protein_evidence', protEvidence,
'-o', 'annotate', '--cpus', str(args.cpus),
'--species', "Awesome busco"], tmpdir)
print("#########################################################")
#check results
try:
assert 1500 <= countGFFgenes(os.path.join(tmpdir, 'annotate', 'predict_results', 'Awesome_busco.gff3')) <= 1700
print('SUCCESS: `funannotate predict` BUSCO-mediated training test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: `funannotate predict` BUSCO-mediated training test failed - check logfiles')
print("#########################################################\n")
def runAnnotateTest():
print("#########################################################")
print('Running `funannotate annotate` unit testing')
tmpdir = 'test-annotate_'+pid
os.makedirs(tmpdir)
input = 'Genome_one.gbk'
iprscan = 'genome_one.iprscan.xml'
emapper = 'genome_one.emapper.annotations'
if not checkFile(input) or not checkFile(iprscan) or not checkFile(emapper):
if not os.path.isfile('test-annotate.tar.gz'):
download(download_links.get('annotate'), 'test-annotate.tar.gz')
subprocess.call(['tar', '-zxf', 'test-annotate.tar.gz'])
shutil.copyfile(input, os.path.join(tmpdir, input))
shutil.copyfile(iprscan, os.path.join(tmpdir, iprscan))
shutil.copyfile(emapper, os.path.join(tmpdir, emapper))
#run predict
runCMD(['funannotate', 'annotate', '--genbank', input,
'-o', 'annotate', '--cpus', str(args.cpus),
'--iprscan', iprscan,
'--eggnog', emapper], tmpdir)
print("#########################################################")
#check results
try:
assert checkFile(os.path.join(tmpdir, 'annotate', 'annotate_results', 'Genome_one.gbk'))
assert checkFile(os.path.join(tmpdir, 'annotate', 'annotate_results', 'Genome_one.sqn'))
assert checkFile(os.path.join(tmpdir, 'annotate', 'annotate_results', 'Genome_one.agp'))
assert checkFile(os.path.join(tmpdir, 'annotate', 'annotate_results', 'Genome_one.tbl'))
assert checkFile(os.path.join(tmpdir, 'annotate', 'annotate_results', 'Genome_one.annotations.txt'))
print('SUCCESS: `funannotate annotate` test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: `funannotate annotate` test failed - check logfiles')
print("#########################################################\n")
def runCompareTest():
print("#########################################################")
print('Running `funannotate compare` unit testing')
tmpdir = 'test-compare_'+pid
os.makedirs(tmpdir)
input1 = 'Genome_one.gbk'
input2 = 'Genome_two.gbk'
input3 = 'Genome_three.gbk'
if not checkFile(input1) or not checkFile(input2) or not checkFile(input3):
if not os.path.isfile('test-compare.tar.gz'):
download(download_links.get('compare'), 'test-compare.tar.gz')
subprocess.call(['tar', '-zxf', 'test-compare.tar.gz'])
shutil.copyfile(input1, os.path.join(tmpdir, input1))
shutil.copyfile(input2, os.path.join(tmpdir, input2))
shutil.copyfile(input3, os.path.join(tmpdir, input3))
#run predict
runCMD(['funannotate', 'compare',
'-i', input1, input2, input3,
'-o', 'compare', '--cpus', str(args.cpus),
'--run_dnds', 'estimate', '--outgroup', 'botrytis_cinerea.dikarya'], tmpdir)
print("#########################################################")
#check results
try:
assert checkFile(os.path.join(tmpdir, 'compare', 'index.html'))
assert checkFile(os.path.join(tmpdir, 'compare', 'phylogeny.html'))
assert checkFile(os.path.join(tmpdir, 'compare.tar.gz'))
print('SUCCESS: `funannotate compare` test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: `funannotate compare` test failed - check logfiles')
print("#########################################################\n")
def runRNAseqTest():
print("#########################################################")
print('Running funannotate RNA-seq training/prediction unit testing')
#need to delete any pre-existing Augustus training data
try:
AUGUSTUS = os.environ["AUGUSTUS_CONFIG_PATH"]
except KeyError:
lib.log.error("$AUGUSTUS_CONFIG_PATH environmental variable not found, set to continue.")
return
if os.path.isdir(os.path.join(AUGUSTUS, 'species', 'awesome_rna')):
shutil.rmtree(os.path.join(AUGUSTUS, 'species', 'awesome_rna'))
tmpdir = 'test-rna_seq_'+pid
os.makedirs(tmpdir)
inputFasta = 'test.softmasked.fa'
protEvidence = 'protein.evidence.fasta'
illumina = 'rna-seq.illumina.fastq.gz'
nanopore = 'rna-seq.nanopore.fastq.gz'
if not checkFile(inputFasta) or not checkFile(protEvidence) or not checkFile(illumina) or not checkFile(nanopore):
if not os.path.isfile('test-rna_seq.tar.gz'):
download(download_links.get('rna-seq'), 'test-rna_seq.tar.gz')
subprocess.call(['tar', '-zxf', 'test-rna_seq.tar.gz'])
for f in [inputFasta, protEvidence, illumina, nanopore]:
shutil.copyfile(f, os.path.join(tmpdir, f))
#run train
runCMD(['funannotate', 'train', '-i', inputFasta,
'--single', illumina, '--nanopore_mrna', nanopore,
'-o', 'rna-seq', '--cpus', str(args.cpus), '--jaccard_clip',
'--species', "Awesome rna"], tmpdir)
#run predict
print("#########################################################")
print('Now running `funannotate predict` using RNA-seq training data')
runCMD(['funannotate', 'predict', '-i', inputFasta,
'--protein_evidence', protEvidence,
'-o', 'rna-seq', '--cpus', str(args.cpus),
'--species', "Awesome rna"], tmpdir)
#run update
print("#########################################################")
print('Now running `funannotate update` to run PASA-mediated UTR addition and multiple transcripts')
runCMD(['funannotate', 'update', '-i', 'rna-seq',
'--cpus', str(args.cpus)], tmpdir)
print("#########################################################")
#check results
try:
assert 1630 <= countGFFgenes(os.path.join(tmpdir, 'rna-seq', 'update_results', 'Awesome_rna.gff3')) <= 1830
print('SUCCESS: funannotate RNA-seq training/prediction test complete.')
shutil.rmtree(tmpdir)
except AssertionError:
print('ERROR: funannotate RNA-seq training/prediction test failed - check logfiles')
print("#########################################################\n")
pid = str(os.getpid())
if 'clean' in args.tests or 'all' in args.tests:
runCleanTest()
if 'mask' in args.tests or 'all' in args.tests:
runMaskTest()
if 'predict' in args.tests or 'all' in args.tests:
runPredictTest()
if 'busco' in args.tests or 'all' in args.tests:
runBuscoTest()
if 'rna-seq' in args.tests or 'all' in args.tests:
runRNAseqTest()
if 'annotate' in args.tests or 'all' in args.tests:
runAnnotateTest()
if 'compare' in args.tests or 'all' in args.tests:
runCompareTest() | de | 0.69622 | #!/usr/bin/env python #setup menu with argparse Script to download and then test funannotate installation Written by <NAME> (2016-2018) <EMAIL> ########################################################") ########################################################") #check that everything worked ########################################################") ########################################################\n") ########################################################") #run exhaustive ########################################################") ########################################################\n") ########################################################") #run predict ########################################################") #check results ########################################################\n") ########################################################") #need to delete any pre-existing Augustus training data #run predict ########################################################") #check results ########################################################\n") ########################################################") #run predict ########################################################") #check results ########################################################\n") ########################################################") #run predict ########################################################") #check results ########################################################\n") ########################################################") #need to delete any pre-existing Augustus training data #run train #run predict ########################################################") #run update ########################################################") ########################################################") #check results ########################################################\n") | 2.410385 | 2 |
scraper/spider/spiders/cmich.py | adaschevici/unicrawlers | 0 | 6624461 | <reponame>adaschevici/unicrawlers
# -*- coding: utf-8 -*-
import os
from twisted.internet import reactor, defer
from scrapy.selector import Selector
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.spider import BaseSpider
from scrapy.http import Request
from scrapy import log
from urllib import urlencode
from spider.items import StudentItem
from scrapy.http import FormRequest
from spider.spiders.basic import StudentSpider
class CmichSpider(StudentSpider):
name = 'cmich'
start_urls = ['https://www.cmich.edu/search/pages/peopleresults.aspx']
def __init__(self, *args, **kwargs):
super(CmichSpider, self).__init__(*args, **kwargs)
def parse(self, response):
phrases = self.get_search_phrases()
self.state['progress_current'] = 0
self.state['progress_total'] = len(phrases)
for phrase in phrases:
yield self.get_search_request(response, str(phrase))
def get_search_request(self, response, phrase):
return FormRequest.from_response(response,
formdata={
'ctl00$SPWebPartManager1$g_a27adfaf_8519_4c39_a3c9_495e9b106eae$ctl00$tbPeopleQuery': phrase,
'ctl00$SPWebPartManager1$g_a27adfaf_8519_4c39_a3c9_495e9b106eae$ctl00$btnPeopleGo': 'Search',
'ctl00$SPWebPartManager1$g_a27adfaf_8519_4c39_a3c9_495e9b106eae$ctl00$rblFilter': 's'
},
dont_click=True,
callback=self.people
)
def people(self, response):
sel = Selector(response)
students = sel.xpath('//td[@class="cmuDirResultsInfo"]')
self.state['progress_current'] += 1
for student in students:
yield StudentItem(
name=student.xpath('.//div[@class="cmuDirName"]/text()').extract()[0],
email=student.xpath('.//div[@class="cmuDirContact"]/a/text()').extract()[0]
)
| # -*- coding: utf-8 -*-
import os
from twisted.internet import reactor, defer
from scrapy.selector import Selector
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.spider import BaseSpider
from scrapy.http import Request
from scrapy import log
from urllib import urlencode
from spider.items import StudentItem
from scrapy.http import FormRequest
from spider.spiders.basic import StudentSpider
class CmichSpider(StudentSpider):
name = 'cmich'
start_urls = ['https://www.cmich.edu/search/pages/peopleresults.aspx']
def __init__(self, *args, **kwargs):
super(CmichSpider, self).__init__(*args, **kwargs)
def parse(self, response):
phrases = self.get_search_phrases()
self.state['progress_current'] = 0
self.state['progress_total'] = len(phrases)
for phrase in phrases:
yield self.get_search_request(response, str(phrase))
def get_search_request(self, response, phrase):
return FormRequest.from_response(response,
formdata={
'ctl00$SPWebPartManager1$g_a27adfaf_8519_4c39_a3c9_495e9b106eae$ctl00$tbPeopleQuery': phrase,
'ctl00$SPWebPartManager1$g_a27adfaf_8519_4c39_a3c9_495e9b106eae$ctl00$btnPeopleGo': 'Search',
'ctl00$SPWebPartManager1$g_a27adfaf_8519_4c39_a3c9_495e9b106eae$ctl00$rblFilter': 's'
},
dont_click=True,
callback=self.people
)
def people(self, response):
sel = Selector(response)
students = sel.xpath('//td[@class="cmuDirResultsInfo"]')
self.state['progress_current'] += 1
for student in students:
yield StudentItem(
name=student.xpath('.//div[@class="cmuDirName"]/text()').extract()[0],
email=student.xpath('.//div[@class="cmuDirContact"]/a/text()').extract()[0]
) | en | 0.769321 | # -*- coding: utf-8 -*- | 2.170487 | 2 |
home/migrations/0005_delete_job.py | AkinWilderman/myPort | 0 | 6624462 | # Generated by Django 2.1.7 on 2019-07-05 23:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('home', '0004_auto_20190706_0004'),
]
operations = [
migrations.DeleteModel(
name='Job',
),
]
| # Generated by Django 2.1.7 on 2019-07-05 23:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('home', '0004_auto_20190706_0004'),
]
operations = [
migrations.DeleteModel(
name='Job',
),
]
| en | 0.702617 | # Generated by Django 2.1.7 on 2019-07-05 23:09 | 1.330764 | 1 |
examples/extensions/kmeans.py | NunoEdgarGFlowHub/cvxpy | 0 | 6624463 | """
Copyright 2013 <NAME>
This file is part of CVXPY.
CVXPY is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CVXPY is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CVXPY. If not, see <http://www.gnu.org/licenses/>.
"""
import cvxpy as cvx
import mixed_integer as mi
print(__doc__)
from time import time
import numpy as np
import matplotlib.pyplot as plt
from sklearn import metrics
from sklearn.cluster import KMeans
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA
from sklearn.preprocessing import scale
np.random.seed(42)
digits = load_digits()
data = scale(digits.data)
n_samples, n_features = data.shape
n_digits = len(np.unique(digits.target))
labels = digits.target
sample_size = 300
print("n_digits: %d, \t n_samples %d, \t n_features %d"
% (n_digits, n_samples, n_features))
print(79 * '_')
print('% 9s' % 'init'
' time inertia homo compl v-meas ARI AMI silhouette')
def bench_k_means(estimator, name, data):
t0 = time()
estimator.fit(data)
print('% 9s %.2fs %i %.3f %.3f %.3f %.3f %.3f %.3f'
% (name, (time() - t0), estimator.inertia_,
metrics.homogeneity_score(labels, estimator.labels_),
metrics.completeness_score(labels, estimator.labels_),
metrics.v_measure_score(labels, estimator.labels_),
metrics.adjusted_rand_score(labels, estimator.labels_),
metrics.adjusted_mutual_info_score(labels, estimator.labels_),
metrics.silhouette_score(data, estimator.labels_,
metric='euclidean',
sample_size=sample_size)))
bench_k_means(KMeans(init='k-means++', n_clusters=n_digits, n_init=10),
name="k-means++", data=data)
bench_k_means(KMeans(init='random', n_clusters=n_digits, n_init=10),
name="random", data=data)
# in this case the seeding of the centers is deterministic, hence we run the
# kmeans algorithm only once with n_init=1
pca = PCA(n_components=n_digits).fit(data)
bench_k_means(KMeans(init=pca.components_, n_clusters=n_digits, n_init=1),
name="PCA-based",
data=data)
print(79 * '_')
###############################################################################
# Visualize the results on PCA-reduced data
reduced_data = PCA(n_components=2).fit_transform(data)
kmeans = KMeans(init='k-means++', n_clusters=n_digits, n_init=10)
kmeans.fit(reduced_data)
# Step size of the mesh. Decrease to increase the quality of the VQ.
h = .02 # point in the mesh [x_min, m_max]x[y_min, y_max].
# Plot the decision boundary. For that, we will assign a color to each
x_min, x_max = reduced_data[:, 0].min() + 1, reduced_data[:, 0].max() - 1
y_min, y_max = reduced_data[:, 1].min() + 1, reduced_data[:, 1].max() - 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
# Obtain labels for each point in mesh. Use last trained model.
Z = kmeans.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure(1)
plt.clf()
plt.imshow(Z, interpolation='nearest',
extent=(xx.min(), xx.max(), yy.min(), yy.max()),
cmap=plt.cm.Paired,
aspect='auto', origin='lower')
plt.plot(reduced_data[:, 0], reduced_data[:, 1], 'k.', markersize=2)
# Plot the centroids as a white X
centroids = kmeans.cluster_centers_
plt.scatter(centroids[:, 0], centroids[:, 1],
marker='x', s=169, linewidths=3,
color='w', zorder=10)
plt.title('K-means clustering on the digits dataset (PCA-reduced data)\n'
'Centroids are marked with white cross')
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
plt.xticks(())
plt.yticks(())
plt.show()
| """
Copyright 2013 <NAME>
This file is part of CVXPY.
CVXPY is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CVXPY is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CVXPY. If not, see <http://www.gnu.org/licenses/>.
"""
import cvxpy as cvx
import mixed_integer as mi
print(__doc__)
from time import time
import numpy as np
import matplotlib.pyplot as plt
from sklearn import metrics
from sklearn.cluster import KMeans
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA
from sklearn.preprocessing import scale
np.random.seed(42)
digits = load_digits()
data = scale(digits.data)
n_samples, n_features = data.shape
n_digits = len(np.unique(digits.target))
labels = digits.target
sample_size = 300
print("n_digits: %d, \t n_samples %d, \t n_features %d"
% (n_digits, n_samples, n_features))
print(79 * '_')
print('% 9s' % 'init'
' time inertia homo compl v-meas ARI AMI silhouette')
def bench_k_means(estimator, name, data):
t0 = time()
estimator.fit(data)
print('% 9s %.2fs %i %.3f %.3f %.3f %.3f %.3f %.3f'
% (name, (time() - t0), estimator.inertia_,
metrics.homogeneity_score(labels, estimator.labels_),
metrics.completeness_score(labels, estimator.labels_),
metrics.v_measure_score(labels, estimator.labels_),
metrics.adjusted_rand_score(labels, estimator.labels_),
metrics.adjusted_mutual_info_score(labels, estimator.labels_),
metrics.silhouette_score(data, estimator.labels_,
metric='euclidean',
sample_size=sample_size)))
bench_k_means(KMeans(init='k-means++', n_clusters=n_digits, n_init=10),
name="k-means++", data=data)
bench_k_means(KMeans(init='random', n_clusters=n_digits, n_init=10),
name="random", data=data)
# in this case the seeding of the centers is deterministic, hence we run the
# kmeans algorithm only once with n_init=1
pca = PCA(n_components=n_digits).fit(data)
bench_k_means(KMeans(init=pca.components_, n_clusters=n_digits, n_init=1),
name="PCA-based",
data=data)
print(79 * '_')
###############################################################################
# Visualize the results on PCA-reduced data
reduced_data = PCA(n_components=2).fit_transform(data)
kmeans = KMeans(init='k-means++', n_clusters=n_digits, n_init=10)
kmeans.fit(reduced_data)
# Step size of the mesh. Decrease to increase the quality of the VQ.
h = .02 # point in the mesh [x_min, m_max]x[y_min, y_max].
# Plot the decision boundary. For that, we will assign a color to each
x_min, x_max = reduced_data[:, 0].min() + 1, reduced_data[:, 0].max() - 1
y_min, y_max = reduced_data[:, 1].min() + 1, reduced_data[:, 1].max() - 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
# Obtain labels for each point in mesh. Use last trained model.
Z = kmeans.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure(1)
plt.clf()
plt.imshow(Z, interpolation='nearest',
extent=(xx.min(), xx.max(), yy.min(), yy.max()),
cmap=plt.cm.Paired,
aspect='auto', origin='lower')
plt.plot(reduced_data[:, 0], reduced_data[:, 1], 'k.', markersize=2)
# Plot the centroids as a white X
centroids = kmeans.cluster_centers_
plt.scatter(centroids[:, 0], centroids[:, 1],
marker='x', s=169, linewidths=3,
color='w', zorder=10)
plt.title('K-means clustering on the digits dataset (PCA-reduced data)\n'
'Centroids are marked with white cross')
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
plt.xticks(())
plt.yticks(())
plt.show()
| en | 0.822247 | Copyright 2013 <NAME> This file is part of CVXPY. CVXPY is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. CVXPY is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with CVXPY. If not, see <http://www.gnu.org/licenses/>. # in this case the seeding of the centers is deterministic, hence we run the # kmeans algorithm only once with n_init=1 ############################################################################### # Visualize the results on PCA-reduced data # Step size of the mesh. Decrease to increase the quality of the VQ. # point in the mesh [x_min, m_max]x[y_min, y_max]. # Plot the decision boundary. For that, we will assign a color to each # Obtain labels for each point in mesh. Use last trained model. # Put the result into a color plot # Plot the centroids as a white X | 1.964874 | 2 |
ux/parser.py | baudo2048/uxlang | 0 | 6624464 | import sys
import os
import math
inFolder = "C:/Users/u416183/lab/work/node/htmlLang/dist/ux/"
outFolder = "C:/Users/u416183/lab/work/node/htmlLang/dist/"
scriptFolder = "C:/Users/u416183/lab/work/node/htmlLang/dist/script/"
def getTokenType(t):
sentinel = t[0]
if (sentinel == "."):
return "attr"
if (sentinel == "-"):
return "style"
if (sentinel == "'"):
return "text"
if (sentinel == "\\"):
return "include"
return "elem"
def isImport(t):
sentinel = t[0]
if (sentinel== "*"):
return True
else:
return False
def countTabs2(l):
count=0
for c in l:
if(c=="\t"):
count=count+1
return count
def countTabs(l):
count = int((len(l) - len(l.lstrip(' ')))/4)
return count
def leadingTabs(n):
lt = ""
for x in range(0,n):
lt = lt + "\t"
return lt
def parseInclude(fileName):
# PRIMO PASSAGGIO
## Leggo il file la prima volta e sostituisco gli include e salvo in un file intermedio
file1 = open(inFolder + fileName + '.ux', 'r')
fileOut = open(inFolder + fileName + '.intermediate.ux', 'w')
Lines = file1.readlines()
for line in Lines:
currTabNum = countTabs(line)
tokens = line.split()
if (len(tokens)==0):
fileOut.write("\n")
continue
leadToken = tokens[0]
tokenType = getTokenType(leadToken)
if (tokenType == "include"):
fileInclude = open(inFolder + tokens[1], 'r')
LinesInclude = fileInclude.readlines()
for lineInclude in LinesInclude:
fileOut.write(leadingTabs(currTabNum) + lineInclude)
else:
fileOut.write(line)
def bonificaImport(fileName): # Porto in testa gli importo che erano presenti nel file .script importato
file1 = open(inFolder + fileName + '.intermediate.js', 'r')
fileOut = open(outFolder + fileName + '.js', 'w')
Lines = file1.readlines()
importArr = []
codeArr = []
for line in Lines:
tokens = line.split()
if (len(tokens)==0):
continue
if (tokens[0]=='import'):
importArr.append(line)
else:
codeArr.append(line)
for i in importArr:
fileOut.write(i)
for c in codeArr:
fileOut.write(c)
file1.close()
fileOut.close()
os.remove(inFolder + fileName + '.intermediate.js')
def writeAttribute(t, f, c, ns): # t = tokens f = fileOut c = currentVarName ns = namespace
if(t[0][1:3]=="on"):
val = t[1]
for i in range(2, len(t)):
val = val + " " + t[i]
f.append("{}{} = {};".format(c, t[0], val))
f.append("\n")
else:
val = t[1]
for i in range(2, len(t)):
val = val + " " + t[i]
if(ns=="0"):
f.append("{}{} = '{}';".format(c, t[0], val))
else:
f.append("{}.setAttribute('{}','{}');".format(c, t[0][1:], val))
f.append("\n")
def writeText(t, f, c, n): # t = tokens f = fileOut c = currentVarName n = count
# TRASFORMARE QUESTO CASO IN NODE ELEMENT!!!
val = t[0][1:len(t[0])]
for i in range(1, len(t)):
val = val + " " + t[i]
f.append("var textNode_{} = document.createTextNode('{}');".format(str(n), val))
f.append("\n")
f.append("{}.append(textNode_{});".format(c, str(n)))
f.append("\n")
def writeStyle(t, f, c): # t = tokens f = fileOut c = currentVarName
val = t[1]
for i in range(2, len(t)):
val = val + " " + t[i]
f.append("{}.style{} = '{}';".format(c, t[0].replace("-","."), val))
f.append("\n")
def writeImport(t, f, ct): # t = tokens f = fileOut ct = currTabNum
inFolder = "C:/Users/u416183/lab/work7py/htmlLang/htmlLang/app/ux/"
outFolder = "C:/Users/u416183/lab/work7py/htmlLang/htmlLang/dist/"
scriptFolder = "C:/Users/u416183/lab/work7py/htmlLang/htmlLang/app/script/"
fileImport = open(inFolder + t[1], 'r')
LinesImport = fileImport.readlines()
for lineImport in LinesImport:
f.write(leadingTabs(ct) + lineImport)
def parse(fileName):
ns = {
"svg": "1",
"circle": "1",
"rect": "1"
}
importArr = []
codeArr = []
# PRIMO PASSAGGIO: GLI INCLUDE
#parseInclude(fileName)
# SECONDO PASSAGGIO
fileIs = inFolder + fileName + '.ux'
print("file1 is + {}".format(fileIs))
file1 = open(inFolder + fileName + '.ux', 'r')
fileOut = open(inFolder + fileName + '.intermediate.js', 'w')
Lines = file1.readlines()
count = 0
currentVarName = ""
currentNS = "0"
appends = []
currTabNum = 0
prevTabNum = 0
isFirstElement = True
firstElement = ''
for line in Lines:
tokens = line.split()
if (len(tokens)==0):
continue
leadToken = tokens[0]
tokenType = getTokenType(leadToken)
currTabNum = countTabs(line)
if (tokenType=="elem"):
#currentVarName = writeElement(tokens, fileOut) ...
if (len(tokens)==1):
if(isImport(leadToken)):
varName = leadToken[1:] + "_" + str(count)
else:
varName = leadToken + "_" + str(count)
else:
if(isImport(leadToken)):
varName = tokens[1]
else:
varName = tokens[1]
if(isFirstElement):
isFirstElement = False
firstElement = varName
codeArr.append("export default function {}() ".format(fileName))
codeArr.append("{")
currentVarName = varName # non funzia da sistemare
if( (ns.get(tokens[0],"0")) =="0"):
currentNS = "0"
else:
currentNS = "1"
codeArr.append("\n")
if(isImport(leadToken)):
importArr.append("import {}_ from './{}.js';".format(varName, leadToken[1:]))
codeArr.append("var {} = {}();".format(varName, varName + '_'))
else:
if( (ns.get(tokens[0],"0")) =="0"):
codeArr.append("var {} = document.createElement('{}');".format(varName, tokens[0]))
else:
codeArr.append("var {} = document.createElementNS('http://www.w3.org/2000/svg', '{}');".format(varName, tokens[0]))
codeArr.append("\n")
## APPENDS
if(prevTabNum>currTabNum):
#SVUOTO
print("svuoto")
diff = prevTabNum - currTabNum
for x in range(0,diff):
for child in appends[len(appends)-1]:
prevLiv = appends[len(appends)-2]
varEl = prevLiv[len(prevLiv)-1]
codeArr.append("{}.appendChild({});".format(varEl, child))
codeArr.append("\n")
codeArr.append("\n")
del appends[len(appends)-1]
if(currTabNum>len(appends)-1):
appends.append([])
appends[currTabNum].append(varName)
if (tokenType=="attr"):
writeAttribute(tokens, codeArr, currentVarName, currentNS)
if (tokenType=="text"):
writeText(tokens, codeArr, currentVarName, count)
if (tokenType=="style"):
writeStyle(tokens, codeArr, currentVarName)
prevTabNum = currTabNum
count=count+1
#SVUOTO
print("svuoto final - {}".format(appends))
while(len(appends)>1):
for child in appends[len(appends)-1]:
prevLiv = appends[len(appends)-2]
varEl = prevLiv[len(prevLiv)-1]
codeArr.append("{}.appendChild({});".format(varEl, child))
codeArr.append("\n")
codeArr.append("\n")
del appends[len(appends)-1]
## PRIMA DI CHIUDERE LO SCRIPT COPIO comp.script.js se esiste
if(os.path.exists(scriptFolder + fileName + '.script.js')):
fileScript = open(scriptFolder + fileName + '.script.js', 'r')
LinesScript = fileScript.readlines()
for lineScript in LinesScript:
codeArr.append(lineScript)
codeArr.append("\n");
codeArr.append("return {};".format(firstElement))
codeArr.append("}")
## Scrivo su file
for i in importArr:
fileOut.write(i)
fileOut.write('\n');
for l in codeArr:
fileOut.write(l)
## Rimuovo i files intermedi
file1.close()
fileOut.close()
#os.remove(fileName + '.intermediate.ux')
##
parse(sys.argv[1])
bonificaImport(sys.argv[1])
| import sys
import os
import math
inFolder = "C:/Users/u416183/lab/work/node/htmlLang/dist/ux/"
outFolder = "C:/Users/u416183/lab/work/node/htmlLang/dist/"
scriptFolder = "C:/Users/u416183/lab/work/node/htmlLang/dist/script/"
def getTokenType(t):
sentinel = t[0]
if (sentinel == "."):
return "attr"
if (sentinel == "-"):
return "style"
if (sentinel == "'"):
return "text"
if (sentinel == "\\"):
return "include"
return "elem"
def isImport(t):
sentinel = t[0]
if (sentinel== "*"):
return True
else:
return False
def countTabs2(l):
count=0
for c in l:
if(c=="\t"):
count=count+1
return count
def countTabs(l):
count = int((len(l) - len(l.lstrip(' ')))/4)
return count
def leadingTabs(n):
lt = ""
for x in range(0,n):
lt = lt + "\t"
return lt
def parseInclude(fileName):
# PRIMO PASSAGGIO
## Leggo il file la prima volta e sostituisco gli include e salvo in un file intermedio
file1 = open(inFolder + fileName + '.ux', 'r')
fileOut = open(inFolder + fileName + '.intermediate.ux', 'w')
Lines = file1.readlines()
for line in Lines:
currTabNum = countTabs(line)
tokens = line.split()
if (len(tokens)==0):
fileOut.write("\n")
continue
leadToken = tokens[0]
tokenType = getTokenType(leadToken)
if (tokenType == "include"):
fileInclude = open(inFolder + tokens[1], 'r')
LinesInclude = fileInclude.readlines()
for lineInclude in LinesInclude:
fileOut.write(leadingTabs(currTabNum) + lineInclude)
else:
fileOut.write(line)
def bonificaImport(fileName): # Porto in testa gli importo che erano presenti nel file .script importato
file1 = open(inFolder + fileName + '.intermediate.js', 'r')
fileOut = open(outFolder + fileName + '.js', 'w')
Lines = file1.readlines()
importArr = []
codeArr = []
for line in Lines:
tokens = line.split()
if (len(tokens)==0):
continue
if (tokens[0]=='import'):
importArr.append(line)
else:
codeArr.append(line)
for i in importArr:
fileOut.write(i)
for c in codeArr:
fileOut.write(c)
file1.close()
fileOut.close()
os.remove(inFolder + fileName + '.intermediate.js')
def writeAttribute(t, f, c, ns): # t = tokens f = fileOut c = currentVarName ns = namespace
if(t[0][1:3]=="on"):
val = t[1]
for i in range(2, len(t)):
val = val + " " + t[i]
f.append("{}{} = {};".format(c, t[0], val))
f.append("\n")
else:
val = t[1]
for i in range(2, len(t)):
val = val + " " + t[i]
if(ns=="0"):
f.append("{}{} = '{}';".format(c, t[0], val))
else:
f.append("{}.setAttribute('{}','{}');".format(c, t[0][1:], val))
f.append("\n")
def writeText(t, f, c, n): # t = tokens f = fileOut c = currentVarName n = count
# TRASFORMARE QUESTO CASO IN NODE ELEMENT!!!
val = t[0][1:len(t[0])]
for i in range(1, len(t)):
val = val + " " + t[i]
f.append("var textNode_{} = document.createTextNode('{}');".format(str(n), val))
f.append("\n")
f.append("{}.append(textNode_{});".format(c, str(n)))
f.append("\n")
def writeStyle(t, f, c): # t = tokens f = fileOut c = currentVarName
val = t[1]
for i in range(2, len(t)):
val = val + " " + t[i]
f.append("{}.style{} = '{}';".format(c, t[0].replace("-","."), val))
f.append("\n")
def writeImport(t, f, ct): # t = tokens f = fileOut ct = currTabNum
inFolder = "C:/Users/u416183/lab/work7py/htmlLang/htmlLang/app/ux/"
outFolder = "C:/Users/u416183/lab/work7py/htmlLang/htmlLang/dist/"
scriptFolder = "C:/Users/u416183/lab/work7py/htmlLang/htmlLang/app/script/"
fileImport = open(inFolder + t[1], 'r')
LinesImport = fileImport.readlines()
for lineImport in LinesImport:
f.write(leadingTabs(ct) + lineImport)
def parse(fileName):
ns = {
"svg": "1",
"circle": "1",
"rect": "1"
}
importArr = []
codeArr = []
# PRIMO PASSAGGIO: GLI INCLUDE
#parseInclude(fileName)
# SECONDO PASSAGGIO
fileIs = inFolder + fileName + '.ux'
print("file1 is + {}".format(fileIs))
file1 = open(inFolder + fileName + '.ux', 'r')
fileOut = open(inFolder + fileName + '.intermediate.js', 'w')
Lines = file1.readlines()
count = 0
currentVarName = ""
currentNS = "0"
appends = []
currTabNum = 0
prevTabNum = 0
isFirstElement = True
firstElement = ''
for line in Lines:
tokens = line.split()
if (len(tokens)==0):
continue
leadToken = tokens[0]
tokenType = getTokenType(leadToken)
currTabNum = countTabs(line)
if (tokenType=="elem"):
#currentVarName = writeElement(tokens, fileOut) ...
if (len(tokens)==1):
if(isImport(leadToken)):
varName = leadToken[1:] + "_" + str(count)
else:
varName = leadToken + "_" + str(count)
else:
if(isImport(leadToken)):
varName = tokens[1]
else:
varName = tokens[1]
if(isFirstElement):
isFirstElement = False
firstElement = varName
codeArr.append("export default function {}() ".format(fileName))
codeArr.append("{")
currentVarName = varName # non funzia da sistemare
if( (ns.get(tokens[0],"0")) =="0"):
currentNS = "0"
else:
currentNS = "1"
codeArr.append("\n")
if(isImport(leadToken)):
importArr.append("import {}_ from './{}.js';".format(varName, leadToken[1:]))
codeArr.append("var {} = {}();".format(varName, varName + '_'))
else:
if( (ns.get(tokens[0],"0")) =="0"):
codeArr.append("var {} = document.createElement('{}');".format(varName, tokens[0]))
else:
codeArr.append("var {} = document.createElementNS('http://www.w3.org/2000/svg', '{}');".format(varName, tokens[0]))
codeArr.append("\n")
## APPENDS
if(prevTabNum>currTabNum):
#SVUOTO
print("svuoto")
diff = prevTabNum - currTabNum
for x in range(0,diff):
for child in appends[len(appends)-1]:
prevLiv = appends[len(appends)-2]
varEl = prevLiv[len(prevLiv)-1]
codeArr.append("{}.appendChild({});".format(varEl, child))
codeArr.append("\n")
codeArr.append("\n")
del appends[len(appends)-1]
if(currTabNum>len(appends)-1):
appends.append([])
appends[currTabNum].append(varName)
if (tokenType=="attr"):
writeAttribute(tokens, codeArr, currentVarName, currentNS)
if (tokenType=="text"):
writeText(tokens, codeArr, currentVarName, count)
if (tokenType=="style"):
writeStyle(tokens, codeArr, currentVarName)
prevTabNum = currTabNum
count=count+1
#SVUOTO
print("svuoto final - {}".format(appends))
while(len(appends)>1):
for child in appends[len(appends)-1]:
prevLiv = appends[len(appends)-2]
varEl = prevLiv[len(prevLiv)-1]
codeArr.append("{}.appendChild({});".format(varEl, child))
codeArr.append("\n")
codeArr.append("\n")
del appends[len(appends)-1]
## PRIMA DI CHIUDERE LO SCRIPT COPIO comp.script.js se esiste
if(os.path.exists(scriptFolder + fileName + '.script.js')):
fileScript = open(scriptFolder + fileName + '.script.js', 'r')
LinesScript = fileScript.readlines()
for lineScript in LinesScript:
codeArr.append(lineScript)
codeArr.append("\n");
codeArr.append("return {};".format(firstElement))
codeArr.append("}")
## Scrivo su file
for i in importArr:
fileOut.write(i)
fileOut.write('\n');
for l in codeArr:
fileOut.write(l)
## Rimuovo i files intermedi
file1.close()
fileOut.close()
#os.remove(fileName + '.intermediate.ux')
##
parse(sys.argv[1])
bonificaImport(sys.argv[1])
| it | 0.515901 | # PRIMO PASSAGGIO ## Leggo il file la prima volta e sostituisco gli include e salvo in un file intermedio # Porto in testa gli importo che erano presenti nel file .script importato # t = tokens f = fileOut c = currentVarName ns = namespace # t = tokens f = fileOut c = currentVarName n = count # TRASFORMARE QUESTO CASO IN NODE ELEMENT!!! # t = tokens f = fileOut c = currentVarName # t = tokens f = fileOut ct = currTabNum # PRIMO PASSAGGIO: GLI INCLUDE #parseInclude(fileName) # SECONDO PASSAGGIO #currentVarName = writeElement(tokens, fileOut) ... # non funzia da sistemare ## APPENDS #SVUOTO #SVUOTO ## PRIMA DI CHIUDERE LO SCRIPT COPIO comp.script.js se esiste ## Scrivo su file ## Rimuovo i files intermedi #os.remove(fileName + '.intermediate.ux') ## | 2.817545 | 3 |
python/cudf/cudf/core/column/datetime.py | JohnZed/cudf | 0 | 6624465 | # Copyright (c) 2019-2020, NVIDIA CORPORATION.
import datetime as dt
import numpy as np
import pandas as pd
import pyarrow as pa
import cudf._lib as libcudf
from cudf._lib.nvtx import annotate
from cudf.core.buffer import Buffer
from cudf.core.column import column
from cudf.utils import utils
from cudf.utils.dtypes import is_scalar, np_to_pa_dtype
# nanoseconds per time_unit
_numpy_to_pandas_conversion = {
"ns": 1,
"us": 1000,
"ms": 1000000,
"s": 1000000000,
"m": 60000000000,
"h": 3600000000000,
"D": 1000000000 * 86400,
}
class DatetimeColumn(column.ColumnBase):
def __init__(
self, data, dtype, mask=None, size=None, offset=0, null_count=None
):
"""
Parameters
----------
data : Buffer
The datetime values
dtype : np.dtype
The data type
mask : Buffer; optional
The validity mask
"""
dtype = np.dtype(dtype)
if data.size % dtype.itemsize:
raise ValueError("Buffer size must be divisible by element size")
if size is None:
size = data.size // dtype.itemsize
size = size - offset
super().__init__(
data,
size=size,
dtype=dtype,
mask=mask,
offset=offset,
null_count=null_count,
)
assert self.dtype.type is np.datetime64
self._time_unit, _ = np.datetime_data(self.dtype)
def __contains__(self, item):
# Handles improper item types
try:
item = np.datetime64(item, self._time_unit)
except Exception:
return False
return item.astype("int_") in self.as_numerical
@property
def time_unit(self):
return self._time_unit
@property
def year(self):
return self.get_dt_field("year")
@property
def month(self):
return self.get_dt_field("month")
@property
def day(self):
return self.get_dt_field("day")
@property
def hour(self):
return self.get_dt_field("hour")
@property
def minute(self):
return self.get_dt_field("minute")
@property
def second(self):
return self.get_dt_field("second")
@property
def weekday(self):
return self.get_dt_field("weekday")
def get_dt_field(self, field):
return libcudf.datetime.extract_datetime_component(self, field)
def normalize_binop_value(self, other):
if isinstance(other, dt.datetime):
other = np.datetime64(other)
if isinstance(other, pd.Timestamp):
m = _numpy_to_pandas_conversion[self.time_unit]
ary = utils.scalar_broadcast_to(
other.value * m, size=len(self), dtype=self.dtype
)
elif isinstance(other, np.datetime64):
other = other.astype(self.dtype)
ary = utils.scalar_broadcast_to(
other, size=len(self), dtype=self.dtype
)
else:
raise TypeError("cannot broadcast {}".format(type(other)))
return column.build_column(data=Buffer(ary), dtype=self.dtype)
@property
def as_numerical(self):
from cudf.core.column import build_column
return build_column(
data=self.base_data,
dtype=np.int64,
mask=self.base_mask,
offset=self.offset,
size=self.size,
)
def as_datetime_column(self, dtype, **kwargs):
dtype = np.dtype(dtype)
if dtype == self.dtype:
return self
return libcudf.unary.cast(self, dtype=dtype)
def as_numerical_column(self, dtype, **kwargs):
return self.as_numerical.astype(dtype)
def as_string_column(self, dtype, **kwargs):
from cudf.core.column import string
if len(self) > 0:
return string._numeric_to_str_typecast_functions[
np.dtype(self.dtype)
](self, **kwargs)
else:
return column.column_empty(0, dtype="object", masked=False)
def to_pandas(self, index=None):
return pd.Series(
self.to_array(fillna="pandas").astype(self.dtype), index=index
)
def to_arrow(self):
mask = None
if self.nullable:
mask = pa.py_buffer(self.mask_array_view.copy_to_host())
data = pa.py_buffer(self.as_numerical.data_array_view.copy_to_host())
pa_dtype = np_to_pa_dtype(self.dtype)
return pa.Array.from_buffers(
type=pa_dtype,
length=len(self),
buffers=[mask, data],
null_count=self.null_count,
)
def default_na_value(self):
"""Returns the default NA value for this column
"""
dkind = self.dtype.kind
if dkind == "M":
return np.datetime64("nat", self.time_unit)
else:
raise TypeError(
"datetime column of {} has no NaN value".format(self.dtype)
)
def binary_operator(self, op, rhs, reflect=False):
lhs, rhs = self, rhs
if op in ("eq", "ne", "lt", "gt", "le", "ge"):
out_dtype = np.bool
else:
raise TypeError(
f"Series of dtype {self.dtype} cannot perform "
f" the operation {op}"
)
return binop(lhs, rhs, op=op, out_dtype=out_dtype)
def fillna(self, fill_value):
if is_scalar(fill_value):
fill_value = np.datetime64(fill_value, self.time_unit)
else:
fill_value = column.as_column(fill_value, nan_as_null=False)
result = libcudf.replace.replace_nulls(self, fill_value)
result = column.build_column(
result.base_data,
result.dtype,
mask=None,
offset=result.offset,
size=result.size,
)
return result
def min(self, dtype=None):
return libcudf.reduce.reduce("min", self, dtype=dtype)
def max(self, dtype=None):
return libcudf.reduce.reduce("max", self, dtype=dtype)
def find_first_value(self, value, closest=False):
"""
Returns offset of first value that matches
"""
value = pd.to_datetime(value)
value = column.as_column(value).as_numerical[0]
return self.as_numerical.find_first_value(value, closest=closest)
def find_last_value(self, value, closest=False):
"""
Returns offset of last value that matches
"""
value = pd.to_datetime(value)
value = column.as_column(value).as_numerical[0]
return self.as_numerical.find_last_value(value, closest=closest)
@property
def is_unique(self):
return self.as_numerical.is_unique
def can_cast_safely(self, to_dtype):
if np.issubdtype(to_dtype, np.datetime64):
to_res, _ = np.datetime_data(to_dtype)
self_res, _ = np.datetime_data(self.dtype)
max_int = np.iinfo(np.dtype("int64")).max
max_dist = self.max().astype(np.timedelta64, copy=False)
min_dist = self.min().astype(np.timedelta64, copy=False)
self_delta_dtype = np.timedelta64(0, self_res).dtype
if max_dist <= np.timedelta64(max_int, to_res).astype(
self_delta_dtype
) and min_dist <= np.timedelta64(max_int, to_res).astype(
self_delta_dtype
):
return True
else:
return False
elif to_dtype == np.dtype("int64") or to_dtype == np.dtype("O"):
# can safely cast to representation, or string
return True
else:
return False
@annotate("BINARY_OP", color="orange", domain="cudf_python")
def binop(lhs, rhs, op, out_dtype):
out = libcudf.binaryop.binaryop(lhs, rhs, op, out_dtype)
return out
def infer_format(element, **kwargs):
"""
Infers datetime format from a string, also takes cares for `ms` and `ns`
"""
import re
fmt = pd.core.tools.datetimes._guess_datetime_format(element, **kwargs)
if fmt is not None:
return fmt
element_parts = element.split(".")
if len(element_parts) != 2:
raise ValueError("Unable to infer the timestamp format from the data")
# There is possibility that the element is of following format
# '00:00:03.333333 2016-01-01'
second_part = re.split(r"(\D+)", element_parts[1], maxsplit=1)
subsecond_fmt = ".%" + str(len(second_part[0])) + "f"
first_part = pd.core.tools.datetimes._guess_datetime_format(
element_parts[0], **kwargs
)
# For the case where first_part is '00:00:03'
if first_part is None:
tmp = "1970-01-01 " + element_parts[0]
first_part = pd.core.tools.datetimes._guess_datetime_format(
tmp, **kwargs
).split(" ", 1)[1]
if first_part is None:
raise ValueError("Unable to infer the timestamp format from the data")
if len(second_part) > 1:
second_part = pd.core.tools.datetimes._guess_datetime_format(
"".join(second_part[1:]), **kwargs
)
else:
second_part = ""
try:
fmt = first_part + subsecond_fmt + second_part
except Exception:
raise ValueError("Unable to infer the timestamp format from the data")
return fmt
| # Copyright (c) 2019-2020, NVIDIA CORPORATION.
import datetime as dt
import numpy as np
import pandas as pd
import pyarrow as pa
import cudf._lib as libcudf
from cudf._lib.nvtx import annotate
from cudf.core.buffer import Buffer
from cudf.core.column import column
from cudf.utils import utils
from cudf.utils.dtypes import is_scalar, np_to_pa_dtype
# nanoseconds per time_unit
_numpy_to_pandas_conversion = {
"ns": 1,
"us": 1000,
"ms": 1000000,
"s": 1000000000,
"m": 60000000000,
"h": 3600000000000,
"D": 1000000000 * 86400,
}
class DatetimeColumn(column.ColumnBase):
def __init__(
self, data, dtype, mask=None, size=None, offset=0, null_count=None
):
"""
Parameters
----------
data : Buffer
The datetime values
dtype : np.dtype
The data type
mask : Buffer; optional
The validity mask
"""
dtype = np.dtype(dtype)
if data.size % dtype.itemsize:
raise ValueError("Buffer size must be divisible by element size")
if size is None:
size = data.size // dtype.itemsize
size = size - offset
super().__init__(
data,
size=size,
dtype=dtype,
mask=mask,
offset=offset,
null_count=null_count,
)
assert self.dtype.type is np.datetime64
self._time_unit, _ = np.datetime_data(self.dtype)
def __contains__(self, item):
# Handles improper item types
try:
item = np.datetime64(item, self._time_unit)
except Exception:
return False
return item.astype("int_") in self.as_numerical
@property
def time_unit(self):
return self._time_unit
@property
def year(self):
return self.get_dt_field("year")
@property
def month(self):
return self.get_dt_field("month")
@property
def day(self):
return self.get_dt_field("day")
@property
def hour(self):
return self.get_dt_field("hour")
@property
def minute(self):
return self.get_dt_field("minute")
@property
def second(self):
return self.get_dt_field("second")
@property
def weekday(self):
return self.get_dt_field("weekday")
def get_dt_field(self, field):
return libcudf.datetime.extract_datetime_component(self, field)
def normalize_binop_value(self, other):
if isinstance(other, dt.datetime):
other = np.datetime64(other)
if isinstance(other, pd.Timestamp):
m = _numpy_to_pandas_conversion[self.time_unit]
ary = utils.scalar_broadcast_to(
other.value * m, size=len(self), dtype=self.dtype
)
elif isinstance(other, np.datetime64):
other = other.astype(self.dtype)
ary = utils.scalar_broadcast_to(
other, size=len(self), dtype=self.dtype
)
else:
raise TypeError("cannot broadcast {}".format(type(other)))
return column.build_column(data=Buffer(ary), dtype=self.dtype)
@property
def as_numerical(self):
from cudf.core.column import build_column
return build_column(
data=self.base_data,
dtype=np.int64,
mask=self.base_mask,
offset=self.offset,
size=self.size,
)
def as_datetime_column(self, dtype, **kwargs):
dtype = np.dtype(dtype)
if dtype == self.dtype:
return self
return libcudf.unary.cast(self, dtype=dtype)
def as_numerical_column(self, dtype, **kwargs):
return self.as_numerical.astype(dtype)
def as_string_column(self, dtype, **kwargs):
from cudf.core.column import string
if len(self) > 0:
return string._numeric_to_str_typecast_functions[
np.dtype(self.dtype)
](self, **kwargs)
else:
return column.column_empty(0, dtype="object", masked=False)
def to_pandas(self, index=None):
return pd.Series(
self.to_array(fillna="pandas").astype(self.dtype), index=index
)
def to_arrow(self):
mask = None
if self.nullable:
mask = pa.py_buffer(self.mask_array_view.copy_to_host())
data = pa.py_buffer(self.as_numerical.data_array_view.copy_to_host())
pa_dtype = np_to_pa_dtype(self.dtype)
return pa.Array.from_buffers(
type=pa_dtype,
length=len(self),
buffers=[mask, data],
null_count=self.null_count,
)
def default_na_value(self):
"""Returns the default NA value for this column
"""
dkind = self.dtype.kind
if dkind == "M":
return np.datetime64("nat", self.time_unit)
else:
raise TypeError(
"datetime column of {} has no NaN value".format(self.dtype)
)
def binary_operator(self, op, rhs, reflect=False):
lhs, rhs = self, rhs
if op in ("eq", "ne", "lt", "gt", "le", "ge"):
out_dtype = np.bool
else:
raise TypeError(
f"Series of dtype {self.dtype} cannot perform "
f" the operation {op}"
)
return binop(lhs, rhs, op=op, out_dtype=out_dtype)
def fillna(self, fill_value):
if is_scalar(fill_value):
fill_value = np.datetime64(fill_value, self.time_unit)
else:
fill_value = column.as_column(fill_value, nan_as_null=False)
result = libcudf.replace.replace_nulls(self, fill_value)
result = column.build_column(
result.base_data,
result.dtype,
mask=None,
offset=result.offset,
size=result.size,
)
return result
def min(self, dtype=None):
return libcudf.reduce.reduce("min", self, dtype=dtype)
def max(self, dtype=None):
return libcudf.reduce.reduce("max", self, dtype=dtype)
def find_first_value(self, value, closest=False):
"""
Returns offset of first value that matches
"""
value = pd.to_datetime(value)
value = column.as_column(value).as_numerical[0]
return self.as_numerical.find_first_value(value, closest=closest)
def find_last_value(self, value, closest=False):
"""
Returns offset of last value that matches
"""
value = pd.to_datetime(value)
value = column.as_column(value).as_numerical[0]
return self.as_numerical.find_last_value(value, closest=closest)
@property
def is_unique(self):
return self.as_numerical.is_unique
def can_cast_safely(self, to_dtype):
if np.issubdtype(to_dtype, np.datetime64):
to_res, _ = np.datetime_data(to_dtype)
self_res, _ = np.datetime_data(self.dtype)
max_int = np.iinfo(np.dtype("int64")).max
max_dist = self.max().astype(np.timedelta64, copy=False)
min_dist = self.min().astype(np.timedelta64, copy=False)
self_delta_dtype = np.timedelta64(0, self_res).dtype
if max_dist <= np.timedelta64(max_int, to_res).astype(
self_delta_dtype
) and min_dist <= np.timedelta64(max_int, to_res).astype(
self_delta_dtype
):
return True
else:
return False
elif to_dtype == np.dtype("int64") or to_dtype == np.dtype("O"):
# can safely cast to representation, or string
return True
else:
return False
@annotate("BINARY_OP", color="orange", domain="cudf_python")
def binop(lhs, rhs, op, out_dtype):
out = libcudf.binaryop.binaryop(lhs, rhs, op, out_dtype)
return out
def infer_format(element, **kwargs):
"""
Infers datetime format from a string, also takes cares for `ms` and `ns`
"""
import re
fmt = pd.core.tools.datetimes._guess_datetime_format(element, **kwargs)
if fmt is not None:
return fmt
element_parts = element.split(".")
if len(element_parts) != 2:
raise ValueError("Unable to infer the timestamp format from the data")
# There is possibility that the element is of following format
# '00:00:03.333333 2016-01-01'
second_part = re.split(r"(\D+)", element_parts[1], maxsplit=1)
subsecond_fmt = ".%" + str(len(second_part[0])) + "f"
first_part = pd.core.tools.datetimes._guess_datetime_format(
element_parts[0], **kwargs
)
# For the case where first_part is '00:00:03'
if first_part is None:
tmp = "1970-01-01 " + element_parts[0]
first_part = pd.core.tools.datetimes._guess_datetime_format(
tmp, **kwargs
).split(" ", 1)[1]
if first_part is None:
raise ValueError("Unable to infer the timestamp format from the data")
if len(second_part) > 1:
second_part = pd.core.tools.datetimes._guess_datetime_format(
"".join(second_part[1:]), **kwargs
)
else:
second_part = ""
try:
fmt = first_part + subsecond_fmt + second_part
except Exception:
raise ValueError("Unable to infer the timestamp format from the data")
return fmt
| en | 0.626886 | # Copyright (c) 2019-2020, NVIDIA CORPORATION. # nanoseconds per time_unit Parameters ---------- data : Buffer The datetime values dtype : np.dtype The data type mask : Buffer; optional The validity mask # Handles improper item types Returns the default NA value for this column Returns offset of first value that matches Returns offset of last value that matches # can safely cast to representation, or string Infers datetime format from a string, also takes cares for `ms` and `ns` # There is possibility that the element is of following format # '00:00:03.333333 2016-01-01' # For the case where first_part is '00:00:03' | 2.665322 | 3 |
Lib/vanilla/vanillaBase.py | RafalBuchner/vanilla | 0 | 6624466 | <gh_stars>0
import platform
from distutils.version import StrictVersion
from Foundation import NSObject
from AppKit import NSFont, NSRegularControlSize, NSSmallControlSize, NSMiniControlSize, \
NSViewMinXMargin, NSViewMaxXMargin, NSViewMaxYMargin, NSViewMinYMargin, \
NSViewWidthSizable, NSViewHeightSizable, \
NSLayoutConstraint, NSLayoutFormatAlignAllLeft, \
NSLayoutAttributeLeft, NSLayoutAttributeRight, NSLayoutAttributeTop, NSLayoutAttributeBottom, NSLayoutAttributeLeading, NSLayoutAttributeTrailing, \
NSLayoutAttributeWidth, NSLayoutAttributeHeight, NSLayoutAttributeCenterX, NSLayoutAttributeCenterY, NSLayoutAttributeBaseline, \
NSLayoutRelationLessThanOrEqual, NSLayoutRelationEqual, NSLayoutRelationGreaterThanOrEqual
try:
from AppKit import NSLayoutAttributeLastBaseline, NSLayoutAttributeFirstBaseline
except ImportError:
NSLayoutAttributeLastBaseline = 11
NSLayoutAttributeFirstBaseline = 12
from vanilla.nsSubclasses import getNSSubclass
class VanillaError(Exception): pass
class VanillaWarning(Warning): pass
# --------------------
# OS Version Constants
# --------------------
macVersion = platform.mac_ver()[0]
if platform.system() != "Darwin":
macVersion = "0.0"
osVersionCurrent = StrictVersion(macVersion)
osVersion10_16 = StrictVersion("10.16") # macOS11 Big Sur seems to be 10.16
osVersion10_15 = StrictVersion("10.15")
osVersion10_14 = StrictVersion("10.14")
osVersion10_13 = StrictVersion("10.13")
osVersion10_12 = StrictVersion("10.12")
osVersion10_11 = StrictVersion("10.11")
osVersion10_10 = StrictVersion("10.10")
osVersion10_9 = StrictVersion("10.9")
osVersion10_8 = StrictVersion("10.8")
osVersion10_7 = StrictVersion("10.7")
osVersion10_6 = StrictVersion("10.6")
# ---------
# Base View
# ---------
class VanillaBaseObject(object):
frameAdjustments = None
def __setattr__(self, attr, value):
_setAttr(VanillaBaseObject, self, attr, value)
def __delattr__(self, attr):
_delAttr(VanillaBaseObject, self, attr)
def _setupView(self, classOrName, posSize, callback=None):
self._autoLayoutViews = {}
self._testForDeprecatedAttributes()
cls = getNSSubclass(classOrName)
self._nsObject = cls(self)
self._posSize = posSize
self._setCallback(callback)
self._setAutosizingFromPosSize(posSize)
def _breakCycles(self):
if hasattr(self, "_target"):
self._target.callback = None
def _testForDeprecatedAttributes(self):
from warnings import warn
if hasattr(self, "_frameAdjustments"):
warn(DeprecationWarning("The _frameAdjustments attribute is deprecated. Use the frameAdjustments attribute."))
self.frameAdjustments = self._frameAdjustments
if hasattr(self, "_allFrameAdjustments"):
warn(DeprecationWarning("The _allFrameAdjustments attribute is deprecated. Use the allFrameAdjustments attribute."))
self.allFrameAdjustments = self._allFrameAdjustments
def _setCallback(self, callback):
if callback is not None:
self._target = VanillaCallbackWrapper(callback)
self._nsObject.setTarget_(self._target)
self._nsObject.setAction_("action:")
def _setAutosizingFromPosSize(self, posSize):
if posSize == "auto":
return
l, t, w, h = posSize
mask = 0
if l < 0:
mask |= NSViewMinXMargin
if w <= 0 and (w > 0 or l >= 0):
mask |= NSViewWidthSizable
if w > 0 and l >= 0:
mask |= NSViewMaxXMargin
if t < 0:
mask |= NSViewMaxYMargin
if h <= 0 and (h > 0 or t >= 0):
mask |= NSViewHeightSizable
if h > 0 and t >= 0:
mask |= NSViewMinYMargin
self._nsObject.setAutoresizingMask_(mask)
def _setFrame(self, parentFrame, animate=False):
if self._posSize == "auto":
return
l, t, w, h = self._posSize
frame = _calcFrame(parentFrame, ((l, t), (w, h)))
frame = self._adjustPosSize(frame)
if animate:
self._nsObject.animator().setFrame_(frame)
else:
self._nsObject.setFrame_(frame)
def _adjustPosSize(self, frame):
if hasattr(self._nsObject, "cell") and self._nsObject.cell() is not None:
sizeStyle = _reverseSizeStyleMap[self._nsObject.cell().controlSize()]
else:
sizeStyle = None
adjustments = self.frameAdjustments
if adjustments:
if sizeStyle is None:
aL, aB, aW, aH = adjustments
else:
aL, aB, aW, aH = adjustments.get(sizeStyle, (0, 0, 0, 0))
(fL, fB), (fW, fH) = frame
fL = fL + aL
fB = fB + aB
fW = fW + aW
fH = fH + aH
frame = ((fL, fB), (fW, fH))
return frame
def _getContentView(self):
return self._nsObject
def enable(self, onOff):
"""
Enable or disable the object. *onOff* should be a boolean.
"""
self._nsObject.setEnabled_(onOff)
def isVisible(self):
"""
Return a bool indicating if the object is visible or not.
"""
return not self._nsObject.isHidden()
def show(self, onOff):
"""
Show or hide the object.
**onOff** A boolean value representing if the object should be shown or not.
"""
self._nsObject.setHidden_(not onOff)
def getPosSize(self):
"""
The position and size of the object as a tuple of form *(left, top, width, height)*.
"""
return self._posSize
def setPosSize(self, posSize, animate=False):
"""
Set the position and size of the object.
**posSize** A tuple of form *(left, top, width, height)*.
**animate** A boolean flag telling to animate the transition. Off by default.
"""
self._posSize = posSize
if posSize == "auto":
return
self._setAutosizingFromPosSize(posSize)
superview = self._nsObject.superview()
if superview is not None:
self._setFrame(superview.frame(), animate)
superview.setNeedsDisplay_(True)
def addAutoPosSizeRules(self, rules, metrics=None):
"""
Add auto layout rules for controls/view in this view.
**rules** must be a list of rule definitions.
Rule definitions may take two forms:
* strings that follow the `Visual Format Language`_
* dictionaries with the following key/value pairs:
+---------------------------+-------------------------------------------------------------------+
| key | value |
+===========================+===================================================================+
| *"view1"* | The vanilla wrapped view for the left side of the rule. |
+---------------------------+-------------------------------------------------------------------+
| *"attribute1"* | The attribute of the view for the left side of the rule. |
| | See below for options. |
+---------------------------+-------------------------------------------------------------------+
| *"relation"* (optional) | The relationship between the left side of the rule |
| | and the right side of the rule. See below for options. |
| | The default value is `"=="`. |
+---------------------------+-------------------------------------------------------------------+
| *"view2"* | The vanilla wrapped view for the right side of the rule. |
+---------------------------+-------------------------------------------------------------------+
| *"attribute2"* | The attribute of the view for the right side of the rule. |
| | See below for options. |
+---------------------------+-------------------------------------------------------------------+
| *"multiplier"* (optional) | The constant multiplied with the attribute on the right side of |
| | the rule as part of getting the modified attribute. |
| | The default value is `1`. |
+---------------------------+-------------------------------------------------------------------+
| *"constant"* (optional) | The constant added to the multiplied attribute value on the right |
| | side of the rule to yield the final modified attribute. |
| | The default value is `0`. |
+---------------------------+-------------------------------------------------------------------+
The `attribute1` and `attribute2` options are:
+-------------------+--------------------------------+
| value | AppKit equivalent |
+===================+================================+
| *"left"* | NSLayoutAttributeLeft |
+-------------------+--------------------------------+
| *"right"* | NSLayoutAttributeRight |
+-------------------+--------------------------------+
| *"top"* | NSLayoutAttributeTop |
+-------------------+--------------------------------+
| *"bottom"* | NSLayoutAttributeBottom |
+-------------------+--------------------------------+
| *"leading"* | NSLayoutAttributeLeading |
+-------------------+--------------------------------+
| *"trailing"* | NSLayoutAttributeTrailing |
+-------------------+--------------------------------+
| *"width"* | NSLayoutAttributeWidth |
+-------------------+--------------------------------+
| *"height"* | NSLayoutAttributeHeight |
+-------------------+--------------------------------+
| *"centerX"* | NSLayoutAttributeCenterX |
+-------------------+--------------------------------+
| *"centerY"* | NSLayoutAttributeCenterY |
+-------------------+--------------------------------+
| *"baseline"* | NSLayoutAttributeBaseline |
+-------------------+--------------------------------+
| *"lastBaseline"* | NSLayoutAttributeLastBaseline |
+-------------------+--------------------------------+
| *"firstBaseline"* | NSLayoutAttributeFirstBaseline |
+-------------------+--------------------------------+
Refer to the `NSLayoutAttribute`_ documentation for the information about what each of these do.
The `relation` options are:
+--------+------------------------------------+
| value | AppKit equivalent |
+========+====================================+
| *"<="* | NSLayoutRelationLessThanOrEqual |
+--------+------------------------------------+
| *"=="* | NSLayoutRelationEqual |
+--------+------------------------------------+
| *">="* | NSLayoutRelationGreaterThanOrEqual |
+--------+------------------------------------+
Refer to the `NSLayoutRelation`_ documentation for the information about what each of these do.
**metrics** may be either *None* or a dict containing
key value pairs representing metrics keywords used in the
rules defined with strings.
.. _Visual Format Language: https://developer.apple.com/library/archive/documentation/UserExperience/Conceptual/AutolayoutPG/VisualFormatLanguage.html#//apple_ref/doc/uid/TP40010853-CH27-SW1
.. _NSLayoutAttribute: https://developer.apple.com/documentation/uikit/nslayoutattribute?language=objc
.. _NSLayoutRelation: https://developer.apple.com/documentation/uikit/nslayoutrelation?language=objc
"""
_addAutoLayoutRules(self, rules, metrics)
def move(self, x, y):
"""
Move the object by *x* units and *y* units.
"""
posSize = self.getPosSize()
if posSize == "auto":
return
l, t, w, h = posSize
l = l + x
t = t + y
self.setPosSize((l, t, w, h))
def resize(self, width, height):
"""
Change the size of the object to *width* and *height*.
"""
posSize = self.getPosSize()
if posSize == "auto":
return
l, t, w, h = posSize
self.setPosSize((l, t, width, height))
# ------------
# Base Control
# ------------
_sizeStyleMap = {
"regular": NSRegularControlSize,
"small": NSSmallControlSize,
"mini": NSMiniControlSize
}
_reverseSizeStyleMap = {
NSRegularControlSize: "regular",
NSSmallControlSize: "small",
NSMiniControlSize: "mini"
}
class VanillaBaseControl(VanillaBaseObject):
def _setSizeStyle(self, value):
value = _sizeStyleMap[value]
self._nsObject.cell().setControlSize_(value)
font = NSFont.systemFontOfSize_(NSFont.systemFontSizeForControlSize_(value))
self._nsObject.setFont_(font)
def setTitle(self, title):
"""
Set the control title.
**title** A string representing the title.
"""
self._nsObject.setTitle_(title)
def getTitle(self):
"""
Get the control title.
"""
return self._nsObject.title()
def isEnabled(self):
"""
Return a bool indicating if the object is enable or not.
"""
return self._nsObject.isEnabled()
def set(self, value):
raise NotImplementedError
def get(self):
raise NotImplementedError
def bind(self, key, callback):
raise NotImplementedError
# -------------------
# Sub-View Management
# -------------------
def _recursiveSetFrame(view):
for subview in view.subviews():
if hasattr(subview, "vanillaWrapper"):
obj = subview.vanillaWrapper()
if obj is not None and hasattr(obj, "_posSize"):
obj.setPosSize(obj.getPosSize())
_recursiveSetFrame(subview)
def _setAttr(cls, obj, attr, value):
if hasattr(value, "getPosSize") and value.getPosSize() == "auto":
view = value._nsObject
view.setTranslatesAutoresizingMaskIntoConstraints_(False)
obj._autoLayoutViews[attr] = view
if isinstance(value, VanillaBaseObject) and hasattr(value, "_posSize"):
assert not hasattr(obj, attr), "can't replace vanilla attribute"
view = obj._getContentView()
frame = view.frame()
value._setFrame(frame)
view.addSubview_(value._nsObject)
_recursiveSetFrame(value._nsObject)
#elif isinstance(value, NSView) and not attr.startswith("_"):
# assert not hasattr(obj, attr), "can't replace vanilla attribute"
# view = obj._getContentView()
# view.addSubview_(value)
super(cls, obj).__setattr__(attr, value)
def _delAttr(cls, obj, attr):
if hasattr(obj, "_autoLayoutViews"):
if attr in obj._autoLayoutViews:
del obj._autoLayoutViews[attr]
value = getattr(obj, attr)
if isinstance(value, VanillaBaseObject):
value._nsObject.removeFromSuperview()
#elif isinstance(value, NSView):
# value.removeFromSuperview()
super(cls, obj).__delattr__(attr)
# -------------------
# Auto Layout Support
# -------------------
_layoutAttributeMap = dict(
left=NSLayoutAttributeLeft,
right=NSLayoutAttributeRight,
top=NSLayoutAttributeTop,
bottom=NSLayoutAttributeBottom,
leading=NSLayoutAttributeLeading,
trailing=NSLayoutAttributeTrailing,
width=NSLayoutAttributeWidth,
height=NSLayoutAttributeHeight,
centerX=NSLayoutAttributeCenterX,
centerY=NSLayoutAttributeCenterY,
baseline=NSLayoutAttributeBaseline,
lastBaseline=NSLayoutAttributeLastBaseline,
firstBaseline=NSLayoutAttributeFirstBaseline,
)
_layoutRelationMap = {
"<=" : NSLayoutRelationLessThanOrEqual,
"==" : NSLayoutRelationEqual,
">=" : NSLayoutRelationGreaterThanOrEqual
}
def _addAutoLayoutRules(obj, rules, metrics=None):
view = obj._getContentView()
if metrics is None:
metrics = {}
for rule in rules:
if isinstance(rule, dict):
view1 = rule["view1"]._getContentView()
attribute1 = _layoutAttributeMap[rule["attribute1"]]
relation = _layoutRelationMap[rule.get("relation", "==")]
view2 = rule["view2"]._getContentView()
attribute2 = _layoutAttributeMap[rule["attribute2"]]
multiplier = rule.get("multiplier", 1)
constant = rule.get("constant", 0)
constraints = NSLayoutConstraint.constraintWithItem_attribute_relatedBy_toItem_attribute_multiplier_constant_(
view1,
attribute1,
relation,
view2,
attribute2,
multiplier,
constant
)
else:
constraints = NSLayoutConstraint.constraintsWithVisualFormat_options_metrics_views_(
rule,
0,
metrics,
obj._autoLayoutViews
)
view.addConstraints_(constraints)
# --------------------------
# Frame-Based Layout Support
# --------------------------
def _calcFrame(parentFrame, posSize, absolutePositioning=False):
"""
Convert a vanilla posSize rect to a Cocoa frame.
"""
(pL, pB), (pW, pH) = parentFrame
(l, t), (w, h) = posSize
if not absolutePositioning:
if l < 0:
l = pW + l
if w <= 0:
w = pW + w - l
if t < 0:
t = pH + t
if h <= 0:
h = pH + h - t
b = pH - t - h # flip it upside down
return (l, b), (w, h)
def _flipFrame(parentFrame, objFrame):
"""
Translate a Cocoa frame to vanilla coordinates.
"""
(pL, pB), (pW, pH) = parentFrame
(oL, oB), (oW, oH) = objFrame
oT = pH - oB - oH
return oL, oT, oW, oH
# ----------------
# Callback Support
# ----------------
class VanillaCallbackWrapper(NSObject):
def __new__(cls, callback):
return cls.alloc().initWithCallback_(callback)
def initWithCallback_(self, callback):
self = self.init()
self.callback = callback
return self
def action_(self, sender):
if hasattr(sender, "vanillaWrapper"):
sender = sender.vanillaWrapper()
if self.callback is not None:
self.callback(sender)
def _breakCycles(view):
"""
Break cyclic references by deleting _target attributes.
"""
if hasattr(view, "vanillaWrapper"):
obj = view.vanillaWrapper()
if hasattr(obj, "_breakCycles"):
obj._breakCycles()
for view in view.subviews():
_breakCycles(view)
| import platform
from distutils.version import StrictVersion
from Foundation import NSObject
from AppKit import NSFont, NSRegularControlSize, NSSmallControlSize, NSMiniControlSize, \
NSViewMinXMargin, NSViewMaxXMargin, NSViewMaxYMargin, NSViewMinYMargin, \
NSViewWidthSizable, NSViewHeightSizable, \
NSLayoutConstraint, NSLayoutFormatAlignAllLeft, \
NSLayoutAttributeLeft, NSLayoutAttributeRight, NSLayoutAttributeTop, NSLayoutAttributeBottom, NSLayoutAttributeLeading, NSLayoutAttributeTrailing, \
NSLayoutAttributeWidth, NSLayoutAttributeHeight, NSLayoutAttributeCenterX, NSLayoutAttributeCenterY, NSLayoutAttributeBaseline, \
NSLayoutRelationLessThanOrEqual, NSLayoutRelationEqual, NSLayoutRelationGreaterThanOrEqual
try:
from AppKit import NSLayoutAttributeLastBaseline, NSLayoutAttributeFirstBaseline
except ImportError:
NSLayoutAttributeLastBaseline = 11
NSLayoutAttributeFirstBaseline = 12
from vanilla.nsSubclasses import getNSSubclass
class VanillaError(Exception): pass
class VanillaWarning(Warning): pass
# --------------------
# OS Version Constants
# --------------------
macVersion = platform.mac_ver()[0]
if platform.system() != "Darwin":
macVersion = "0.0"
osVersionCurrent = StrictVersion(macVersion)
osVersion10_16 = StrictVersion("10.16") # macOS11 Big Sur seems to be 10.16
osVersion10_15 = StrictVersion("10.15")
osVersion10_14 = StrictVersion("10.14")
osVersion10_13 = StrictVersion("10.13")
osVersion10_12 = StrictVersion("10.12")
osVersion10_11 = StrictVersion("10.11")
osVersion10_10 = StrictVersion("10.10")
osVersion10_9 = StrictVersion("10.9")
osVersion10_8 = StrictVersion("10.8")
osVersion10_7 = StrictVersion("10.7")
osVersion10_6 = StrictVersion("10.6")
# ---------
# Base View
# ---------
class VanillaBaseObject(object):
frameAdjustments = None
def __setattr__(self, attr, value):
_setAttr(VanillaBaseObject, self, attr, value)
def __delattr__(self, attr):
_delAttr(VanillaBaseObject, self, attr)
def _setupView(self, classOrName, posSize, callback=None):
self._autoLayoutViews = {}
self._testForDeprecatedAttributes()
cls = getNSSubclass(classOrName)
self._nsObject = cls(self)
self._posSize = posSize
self._setCallback(callback)
self._setAutosizingFromPosSize(posSize)
def _breakCycles(self):
if hasattr(self, "_target"):
self._target.callback = None
def _testForDeprecatedAttributes(self):
from warnings import warn
if hasattr(self, "_frameAdjustments"):
warn(DeprecationWarning("The _frameAdjustments attribute is deprecated. Use the frameAdjustments attribute."))
self.frameAdjustments = self._frameAdjustments
if hasattr(self, "_allFrameAdjustments"):
warn(DeprecationWarning("The _allFrameAdjustments attribute is deprecated. Use the allFrameAdjustments attribute."))
self.allFrameAdjustments = self._allFrameAdjustments
def _setCallback(self, callback):
if callback is not None:
self._target = VanillaCallbackWrapper(callback)
self._nsObject.setTarget_(self._target)
self._nsObject.setAction_("action:")
def _setAutosizingFromPosSize(self, posSize):
if posSize == "auto":
return
l, t, w, h = posSize
mask = 0
if l < 0:
mask |= NSViewMinXMargin
if w <= 0 and (w > 0 or l >= 0):
mask |= NSViewWidthSizable
if w > 0 and l >= 0:
mask |= NSViewMaxXMargin
if t < 0:
mask |= NSViewMaxYMargin
if h <= 0 and (h > 0 or t >= 0):
mask |= NSViewHeightSizable
if h > 0 and t >= 0:
mask |= NSViewMinYMargin
self._nsObject.setAutoresizingMask_(mask)
def _setFrame(self, parentFrame, animate=False):
if self._posSize == "auto":
return
l, t, w, h = self._posSize
frame = _calcFrame(parentFrame, ((l, t), (w, h)))
frame = self._adjustPosSize(frame)
if animate:
self._nsObject.animator().setFrame_(frame)
else:
self._nsObject.setFrame_(frame)
def _adjustPosSize(self, frame):
if hasattr(self._nsObject, "cell") and self._nsObject.cell() is not None:
sizeStyle = _reverseSizeStyleMap[self._nsObject.cell().controlSize()]
else:
sizeStyle = None
adjustments = self.frameAdjustments
if adjustments:
if sizeStyle is None:
aL, aB, aW, aH = adjustments
else:
aL, aB, aW, aH = adjustments.get(sizeStyle, (0, 0, 0, 0))
(fL, fB), (fW, fH) = frame
fL = fL + aL
fB = fB + aB
fW = fW + aW
fH = fH + aH
frame = ((fL, fB), (fW, fH))
return frame
def _getContentView(self):
return self._nsObject
def enable(self, onOff):
"""
Enable or disable the object. *onOff* should be a boolean.
"""
self._nsObject.setEnabled_(onOff)
def isVisible(self):
"""
Return a bool indicating if the object is visible or not.
"""
return not self._nsObject.isHidden()
def show(self, onOff):
"""
Show or hide the object.
**onOff** A boolean value representing if the object should be shown or not.
"""
self._nsObject.setHidden_(not onOff)
def getPosSize(self):
"""
The position and size of the object as a tuple of form *(left, top, width, height)*.
"""
return self._posSize
def setPosSize(self, posSize, animate=False):
"""
Set the position and size of the object.
**posSize** A tuple of form *(left, top, width, height)*.
**animate** A boolean flag telling to animate the transition. Off by default.
"""
self._posSize = posSize
if posSize == "auto":
return
self._setAutosizingFromPosSize(posSize)
superview = self._nsObject.superview()
if superview is not None:
self._setFrame(superview.frame(), animate)
superview.setNeedsDisplay_(True)
def addAutoPosSizeRules(self, rules, metrics=None):
"""
Add auto layout rules for controls/view in this view.
**rules** must be a list of rule definitions.
Rule definitions may take two forms:
* strings that follow the `Visual Format Language`_
* dictionaries with the following key/value pairs:
+---------------------------+-------------------------------------------------------------------+
| key | value |
+===========================+===================================================================+
| *"view1"* | The vanilla wrapped view for the left side of the rule. |
+---------------------------+-------------------------------------------------------------------+
| *"attribute1"* | The attribute of the view for the left side of the rule. |
| | See below for options. |
+---------------------------+-------------------------------------------------------------------+
| *"relation"* (optional) | The relationship between the left side of the rule |
| | and the right side of the rule. See below for options. |
| | The default value is `"=="`. |
+---------------------------+-------------------------------------------------------------------+
| *"view2"* | The vanilla wrapped view for the right side of the rule. |
+---------------------------+-------------------------------------------------------------------+
| *"attribute2"* | The attribute of the view for the right side of the rule. |
| | See below for options. |
+---------------------------+-------------------------------------------------------------------+
| *"multiplier"* (optional) | The constant multiplied with the attribute on the right side of |
| | the rule as part of getting the modified attribute. |
| | The default value is `1`. |
+---------------------------+-------------------------------------------------------------------+
| *"constant"* (optional) | The constant added to the multiplied attribute value on the right |
| | side of the rule to yield the final modified attribute. |
| | The default value is `0`. |
+---------------------------+-------------------------------------------------------------------+
The `attribute1` and `attribute2` options are:
+-------------------+--------------------------------+
| value | AppKit equivalent |
+===================+================================+
| *"left"* | NSLayoutAttributeLeft |
+-------------------+--------------------------------+
| *"right"* | NSLayoutAttributeRight |
+-------------------+--------------------------------+
| *"top"* | NSLayoutAttributeTop |
+-------------------+--------------------------------+
| *"bottom"* | NSLayoutAttributeBottom |
+-------------------+--------------------------------+
| *"leading"* | NSLayoutAttributeLeading |
+-------------------+--------------------------------+
| *"trailing"* | NSLayoutAttributeTrailing |
+-------------------+--------------------------------+
| *"width"* | NSLayoutAttributeWidth |
+-------------------+--------------------------------+
| *"height"* | NSLayoutAttributeHeight |
+-------------------+--------------------------------+
| *"centerX"* | NSLayoutAttributeCenterX |
+-------------------+--------------------------------+
| *"centerY"* | NSLayoutAttributeCenterY |
+-------------------+--------------------------------+
| *"baseline"* | NSLayoutAttributeBaseline |
+-------------------+--------------------------------+
| *"lastBaseline"* | NSLayoutAttributeLastBaseline |
+-------------------+--------------------------------+
| *"firstBaseline"* | NSLayoutAttributeFirstBaseline |
+-------------------+--------------------------------+
Refer to the `NSLayoutAttribute`_ documentation for the information about what each of these do.
The `relation` options are:
+--------+------------------------------------+
| value | AppKit equivalent |
+========+====================================+
| *"<="* | NSLayoutRelationLessThanOrEqual |
+--------+------------------------------------+
| *"=="* | NSLayoutRelationEqual |
+--------+------------------------------------+
| *">="* | NSLayoutRelationGreaterThanOrEqual |
+--------+------------------------------------+
Refer to the `NSLayoutRelation`_ documentation for the information about what each of these do.
**metrics** may be either *None* or a dict containing
key value pairs representing metrics keywords used in the
rules defined with strings.
.. _Visual Format Language: https://developer.apple.com/library/archive/documentation/UserExperience/Conceptual/AutolayoutPG/VisualFormatLanguage.html#//apple_ref/doc/uid/TP40010853-CH27-SW1
.. _NSLayoutAttribute: https://developer.apple.com/documentation/uikit/nslayoutattribute?language=objc
.. _NSLayoutRelation: https://developer.apple.com/documentation/uikit/nslayoutrelation?language=objc
"""
_addAutoLayoutRules(self, rules, metrics)
def move(self, x, y):
"""
Move the object by *x* units and *y* units.
"""
posSize = self.getPosSize()
if posSize == "auto":
return
l, t, w, h = posSize
l = l + x
t = t + y
self.setPosSize((l, t, w, h))
def resize(self, width, height):
"""
Change the size of the object to *width* and *height*.
"""
posSize = self.getPosSize()
if posSize == "auto":
return
l, t, w, h = posSize
self.setPosSize((l, t, width, height))
# ------------
# Base Control
# ------------
_sizeStyleMap = {
"regular": NSRegularControlSize,
"small": NSSmallControlSize,
"mini": NSMiniControlSize
}
_reverseSizeStyleMap = {
NSRegularControlSize: "regular",
NSSmallControlSize: "small",
NSMiniControlSize: "mini"
}
class VanillaBaseControl(VanillaBaseObject):
def _setSizeStyle(self, value):
value = _sizeStyleMap[value]
self._nsObject.cell().setControlSize_(value)
font = NSFont.systemFontOfSize_(NSFont.systemFontSizeForControlSize_(value))
self._nsObject.setFont_(font)
def setTitle(self, title):
"""
Set the control title.
**title** A string representing the title.
"""
self._nsObject.setTitle_(title)
def getTitle(self):
"""
Get the control title.
"""
return self._nsObject.title()
def isEnabled(self):
"""
Return a bool indicating if the object is enable or not.
"""
return self._nsObject.isEnabled()
def set(self, value):
raise NotImplementedError
def get(self):
raise NotImplementedError
def bind(self, key, callback):
raise NotImplementedError
# -------------------
# Sub-View Management
# -------------------
def _recursiveSetFrame(view):
for subview in view.subviews():
if hasattr(subview, "vanillaWrapper"):
obj = subview.vanillaWrapper()
if obj is not None and hasattr(obj, "_posSize"):
obj.setPosSize(obj.getPosSize())
_recursiveSetFrame(subview)
def _setAttr(cls, obj, attr, value):
if hasattr(value, "getPosSize") and value.getPosSize() == "auto":
view = value._nsObject
view.setTranslatesAutoresizingMaskIntoConstraints_(False)
obj._autoLayoutViews[attr] = view
if isinstance(value, VanillaBaseObject) and hasattr(value, "_posSize"):
assert not hasattr(obj, attr), "can't replace vanilla attribute"
view = obj._getContentView()
frame = view.frame()
value._setFrame(frame)
view.addSubview_(value._nsObject)
_recursiveSetFrame(value._nsObject)
#elif isinstance(value, NSView) and not attr.startswith("_"):
# assert not hasattr(obj, attr), "can't replace vanilla attribute"
# view = obj._getContentView()
# view.addSubview_(value)
super(cls, obj).__setattr__(attr, value)
def _delAttr(cls, obj, attr):
if hasattr(obj, "_autoLayoutViews"):
if attr in obj._autoLayoutViews:
del obj._autoLayoutViews[attr]
value = getattr(obj, attr)
if isinstance(value, VanillaBaseObject):
value._nsObject.removeFromSuperview()
#elif isinstance(value, NSView):
# value.removeFromSuperview()
super(cls, obj).__delattr__(attr)
# -------------------
# Auto Layout Support
# -------------------
_layoutAttributeMap = dict(
left=NSLayoutAttributeLeft,
right=NSLayoutAttributeRight,
top=NSLayoutAttributeTop,
bottom=NSLayoutAttributeBottom,
leading=NSLayoutAttributeLeading,
trailing=NSLayoutAttributeTrailing,
width=NSLayoutAttributeWidth,
height=NSLayoutAttributeHeight,
centerX=NSLayoutAttributeCenterX,
centerY=NSLayoutAttributeCenterY,
baseline=NSLayoutAttributeBaseline,
lastBaseline=NSLayoutAttributeLastBaseline,
firstBaseline=NSLayoutAttributeFirstBaseline,
)
_layoutRelationMap = {
"<=" : NSLayoutRelationLessThanOrEqual,
"==" : NSLayoutRelationEqual,
">=" : NSLayoutRelationGreaterThanOrEqual
}
def _addAutoLayoutRules(obj, rules, metrics=None):
view = obj._getContentView()
if metrics is None:
metrics = {}
for rule in rules:
if isinstance(rule, dict):
view1 = rule["view1"]._getContentView()
attribute1 = _layoutAttributeMap[rule["attribute1"]]
relation = _layoutRelationMap[rule.get("relation", "==")]
view2 = rule["view2"]._getContentView()
attribute2 = _layoutAttributeMap[rule["attribute2"]]
multiplier = rule.get("multiplier", 1)
constant = rule.get("constant", 0)
constraints = NSLayoutConstraint.constraintWithItem_attribute_relatedBy_toItem_attribute_multiplier_constant_(
view1,
attribute1,
relation,
view2,
attribute2,
multiplier,
constant
)
else:
constraints = NSLayoutConstraint.constraintsWithVisualFormat_options_metrics_views_(
rule,
0,
metrics,
obj._autoLayoutViews
)
view.addConstraints_(constraints)
# --------------------------
# Frame-Based Layout Support
# --------------------------
def _calcFrame(parentFrame, posSize, absolutePositioning=False):
"""
Convert a vanilla posSize rect to a Cocoa frame.
"""
(pL, pB), (pW, pH) = parentFrame
(l, t), (w, h) = posSize
if not absolutePositioning:
if l < 0:
l = pW + l
if w <= 0:
w = pW + w - l
if t < 0:
t = pH + t
if h <= 0:
h = pH + h - t
b = pH - t - h # flip it upside down
return (l, b), (w, h)
def _flipFrame(parentFrame, objFrame):
"""
Translate a Cocoa frame to vanilla coordinates.
"""
(pL, pB), (pW, pH) = parentFrame
(oL, oB), (oW, oH) = objFrame
oT = pH - oB - oH
return oL, oT, oW, oH
# ----------------
# Callback Support
# ----------------
class VanillaCallbackWrapper(NSObject):
def __new__(cls, callback):
return cls.alloc().initWithCallback_(callback)
def initWithCallback_(self, callback):
self = self.init()
self.callback = callback
return self
def action_(self, sender):
if hasattr(sender, "vanillaWrapper"):
sender = sender.vanillaWrapper()
if self.callback is not None:
self.callback(sender)
def _breakCycles(view):
"""
Break cyclic references by deleting _target attributes.
"""
if hasattr(view, "vanillaWrapper"):
obj = view.vanillaWrapper()
if hasattr(obj, "_breakCycles"):
obj._breakCycles()
for view in view.subviews():
_breakCycles(view) | en | 0.466715 | # -------------------- # OS Version Constants # -------------------- # macOS11 Big Sur seems to be 10.16 # --------- # Base View # --------- Enable or disable the object. *onOff* should be a boolean. Return a bool indicating if the object is visible or not. Show or hide the object. **onOff** A boolean value representing if the object should be shown or not. The position and size of the object as a tuple of form *(left, top, width, height)*. Set the position and size of the object. **posSize** A tuple of form *(left, top, width, height)*. **animate** A boolean flag telling to animate the transition. Off by default. Add auto layout rules for controls/view in this view. **rules** must be a list of rule definitions. Rule definitions may take two forms: * strings that follow the `Visual Format Language`_ * dictionaries with the following key/value pairs: +---------------------------+-------------------------------------------------------------------+ | key | value | +===========================+===================================================================+ | *"view1"* | The vanilla wrapped view for the left side of the rule. | +---------------------------+-------------------------------------------------------------------+ | *"attribute1"* | The attribute of the view for the left side of the rule. | | | See below for options. | +---------------------------+-------------------------------------------------------------------+ | *"relation"* (optional) | The relationship between the left side of the rule | | | and the right side of the rule. See below for options. | | | The default value is `"=="`. | +---------------------------+-------------------------------------------------------------------+ | *"view2"* | The vanilla wrapped view for the right side of the rule. | +---------------------------+-------------------------------------------------------------------+ | *"attribute2"* | The attribute of the view for the right side of the rule. | | | See below for options. | +---------------------------+-------------------------------------------------------------------+ | *"multiplier"* (optional) | The constant multiplied with the attribute on the right side of | | | the rule as part of getting the modified attribute. | | | The default value is `1`. | +---------------------------+-------------------------------------------------------------------+ | *"constant"* (optional) | The constant added to the multiplied attribute value on the right | | | side of the rule to yield the final modified attribute. | | | The default value is `0`. | +---------------------------+-------------------------------------------------------------------+ The `attribute1` and `attribute2` options are: +-------------------+--------------------------------+ | value | AppKit equivalent | +===================+================================+ | *"left"* | NSLayoutAttributeLeft | +-------------------+--------------------------------+ | *"right"* | NSLayoutAttributeRight | +-------------------+--------------------------------+ | *"top"* | NSLayoutAttributeTop | +-------------------+--------------------------------+ | *"bottom"* | NSLayoutAttributeBottom | +-------------------+--------------------------------+ | *"leading"* | NSLayoutAttributeLeading | +-------------------+--------------------------------+ | *"trailing"* | NSLayoutAttributeTrailing | +-------------------+--------------------------------+ | *"width"* | NSLayoutAttributeWidth | +-------------------+--------------------------------+ | *"height"* | NSLayoutAttributeHeight | +-------------------+--------------------------------+ | *"centerX"* | NSLayoutAttributeCenterX | +-------------------+--------------------------------+ | *"centerY"* | NSLayoutAttributeCenterY | +-------------------+--------------------------------+ | *"baseline"* | NSLayoutAttributeBaseline | +-------------------+--------------------------------+ | *"lastBaseline"* | NSLayoutAttributeLastBaseline | +-------------------+--------------------------------+ | *"firstBaseline"* | NSLayoutAttributeFirstBaseline | +-------------------+--------------------------------+ Refer to the `NSLayoutAttribute`_ documentation for the information about what each of these do. The `relation` options are: +--------+------------------------------------+ | value | AppKit equivalent | +========+====================================+ | *"<="* | NSLayoutRelationLessThanOrEqual | +--------+------------------------------------+ | *"=="* | NSLayoutRelationEqual | +--------+------------------------------------+ | *">="* | NSLayoutRelationGreaterThanOrEqual | +--------+------------------------------------+ Refer to the `NSLayoutRelation`_ documentation for the information about what each of these do. **metrics** may be either *None* or a dict containing key value pairs representing metrics keywords used in the rules defined with strings. .. _Visual Format Language: https://developer.apple.com/library/archive/documentation/UserExperience/Conceptual/AutolayoutPG/VisualFormatLanguage.html#//apple_ref/doc/uid/TP40010853-CH27-SW1 .. _NSLayoutAttribute: https://developer.apple.com/documentation/uikit/nslayoutattribute?language=objc .. _NSLayoutRelation: https://developer.apple.com/documentation/uikit/nslayoutrelation?language=objc Move the object by *x* units and *y* units. Change the size of the object to *width* and *height*. # ------------ # Base Control # ------------ Set the control title. **title** A string representing the title. Get the control title. Return a bool indicating if the object is enable or not. # ------------------- # Sub-View Management # ------------------- #elif isinstance(value, NSView) and not attr.startswith("_"): # assert not hasattr(obj, attr), "can't replace vanilla attribute" # view = obj._getContentView() # view.addSubview_(value) #elif isinstance(value, NSView): # value.removeFromSuperview() # ------------------- # Auto Layout Support # ------------------- # -------------------------- # Frame-Based Layout Support # -------------------------- Convert a vanilla posSize rect to a Cocoa frame. # flip it upside down Translate a Cocoa frame to vanilla coordinates. # ---------------- # Callback Support # ---------------- Break cyclic references by deleting _target attributes. | 2.010521 | 2 |
evaluation/models.py | AymenQ/tarteel.io | 1 | 6624467 | from django.db import models
from django.forms import ModelForm
from restapi.models import AnnotatedRecording
class TajweedEvaluation(models.Model):
"""A model that contains the information we want to receive from the expert
regarding the data."""
# Degree Choices
MAJOR_DEGREE = 'jali'
MINOR_DEGREE = 'khafi'
DEGREE_CHOICES = (
(MAJOR_DEGREE, 'Jali'),
(MINOR_DEGREE, 'Khafi')
)
# Category Choices
GHUNNAH = 'ghunnah'
IDGHAAM_GHUNNAH = 'idghaam_ghunnah'
IDGHAAM__NO_GHUNNAH = 'idghaam_no_ghunnah'
IDGHAAM_MUTAJAANISAIN = 'idghaam_mutajaanisain'
IDGHAAM_MUTAQARIBAIN = 'idghaam_mutaqaribain'
IDGHAAM_SHAFAWI = 'idghaam_shafawi'
IKHFA = 'ikhfa'
IKHFA_SHAFAWI = 'ikhfa_shafawi'
IQLAB = 'iqlab'
MADD_2 = 'madd_2'
MADD_246 = 'madd_246'
MADD_MUTTASIL = 'madd_muttasil'
MADD_MUNFASIL = 'madd_munfasil'
MADD_6 = 'madd_6'
QALQALAH = 'qalqalah'
HAMZAT_WASL = 'hamzat_wasl'
LAM_SHAMSIYYAH = 'lam_shamsiyyah'
SILENT = 'silent'
CATEGORY_CHOICES = (
(GHUNNAH, 'Ghunnah'),
(IDGHAAM_GHUNNAH, 'Idghaam with Ghunnah'),
(IDGHAAM__NO_GHUNNAH, 'Idghaam without Ghunnah'),
(IDGHAAM_MUTAJAANISAIN, 'Idghaam Mutajaanisain'),
(IDGHAAM_MUTAQARIBAIN, '<NAME>'),
(IDGHAAM_SHAFAWI, '<NAME>'),
(IKHFA, 'Ikhfa'),
(IKHFA_SHAFAWI, '<NAME>'),
(IQLAB, 'Iqlab'),
(MADD_2, 'Regular Madd'),
(MADD_246, 'Madd al-Aarid/al-Leen'),
(MADD_MUTTASIL, 'Madd al-Muttasil'),
(MADD_MUNFASIL, 'Madd al-Munfasil'),
(MADD_6, 'M<NAME>'),
(QALQALAH, 'Qalqalah'),
(HAMZAT_WASL, 'Hamzat al-Wasl'),
(LAM_SHAMSIYYAH, 'Lam al-Shamsiyyah'),
(SILENT, 'Silent')
)
session_id = models.CharField(max_length=32, blank=True)
platform = models.CharField(max_length=32, default='web')
# Link the rule evaluation with a specific recording
associated_recording = models.ForeignKey(AnnotatedRecording,
on_delete=models.CASCADE,
null=True)
result = models.BooleanField(default=False)
degree = models.CharField(choices=DEGREE_CHOICES, default=MAJOR_DEGREE,
max_length=32)
category = models.CharField(choices=CATEGORY_CHOICES, default=GHUNNAH,
max_length=50)
class TajweedEvaluationForm(ModelForm):
class Meta:
model = TajweedEvaluation
fields = ['degree', 'category']
class Evaluation(models.Model):
session_id = models.CharField(max_length=32, blank=True)
associated_recording = models.ForeignKey(AnnotatedRecording,
on_delete=models.CASCADE,
null=True)
platform = models.CharField(max_length=32, default='web')
evaluation = models.CharField(max_length=32, default=False)
| from django.db import models
from django.forms import ModelForm
from restapi.models import AnnotatedRecording
class TajweedEvaluation(models.Model):
"""A model that contains the information we want to receive from the expert
regarding the data."""
# Degree Choices
MAJOR_DEGREE = 'jali'
MINOR_DEGREE = 'khafi'
DEGREE_CHOICES = (
(MAJOR_DEGREE, 'Jali'),
(MINOR_DEGREE, 'Khafi')
)
# Category Choices
GHUNNAH = 'ghunnah'
IDGHAAM_GHUNNAH = 'idghaam_ghunnah'
IDGHAAM__NO_GHUNNAH = 'idghaam_no_ghunnah'
IDGHAAM_MUTAJAANISAIN = 'idghaam_mutajaanisain'
IDGHAAM_MUTAQARIBAIN = 'idghaam_mutaqaribain'
IDGHAAM_SHAFAWI = 'idghaam_shafawi'
IKHFA = 'ikhfa'
IKHFA_SHAFAWI = 'ikhfa_shafawi'
IQLAB = 'iqlab'
MADD_2 = 'madd_2'
MADD_246 = 'madd_246'
MADD_MUTTASIL = 'madd_muttasil'
MADD_MUNFASIL = 'madd_munfasil'
MADD_6 = 'madd_6'
QALQALAH = 'qalqalah'
HAMZAT_WASL = 'hamzat_wasl'
LAM_SHAMSIYYAH = 'lam_shamsiyyah'
SILENT = 'silent'
CATEGORY_CHOICES = (
(GHUNNAH, 'Ghunnah'),
(IDGHAAM_GHUNNAH, 'Idghaam with Ghunnah'),
(IDGHAAM__NO_GHUNNAH, 'Idghaam without Ghunnah'),
(IDGHAAM_MUTAJAANISAIN, 'Idghaam Mutajaanisain'),
(IDGHAAM_MUTAQARIBAIN, '<NAME>'),
(IDGHAAM_SHAFAWI, '<NAME>'),
(IKHFA, 'Ikhfa'),
(IKHFA_SHAFAWI, '<NAME>'),
(IQLAB, 'Iqlab'),
(MADD_2, 'Regular Madd'),
(MADD_246, 'Madd al-Aarid/al-Leen'),
(MADD_MUTTASIL, 'Madd al-Muttasil'),
(MADD_MUNFASIL, 'Madd al-Munfasil'),
(MADD_6, 'M<NAME>'),
(QALQALAH, 'Qalqalah'),
(HAMZAT_WASL, 'Hamzat al-Wasl'),
(LAM_SHAMSIYYAH, 'Lam al-Shamsiyyah'),
(SILENT, 'Silent')
)
session_id = models.CharField(max_length=32, blank=True)
platform = models.CharField(max_length=32, default='web')
# Link the rule evaluation with a specific recording
associated_recording = models.ForeignKey(AnnotatedRecording,
on_delete=models.CASCADE,
null=True)
result = models.BooleanField(default=False)
degree = models.CharField(choices=DEGREE_CHOICES, default=MAJOR_DEGREE,
max_length=32)
category = models.CharField(choices=CATEGORY_CHOICES, default=GHUNNAH,
max_length=50)
class TajweedEvaluationForm(ModelForm):
class Meta:
model = TajweedEvaluation
fields = ['degree', 'category']
class Evaluation(models.Model):
session_id = models.CharField(max_length=32, blank=True)
associated_recording = models.ForeignKey(AnnotatedRecording,
on_delete=models.CASCADE,
null=True)
platform = models.CharField(max_length=32, default='web')
evaluation = models.CharField(max_length=32, default=False)
| en | 0.924843 | A model that contains the information we want to receive from the expert regarding the data. # Degree Choices # Category Choices # Link the rule evaluation with a specific recording | 2.603192 | 3 |
pymc/tests/test_smc.py | 5hv5hvnk/pymc | 0 | 6624468 | # Copyright 2020 The PyMC Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import aesara
import aesara.tensor as at
import numpy as np
import pytest
import scipy.stats as st
from aesara.graph.basic import ancestors
from aesara.tensor.random.op import RandomVariable
from aesara.tensor.random.var import (
RandomGeneratorSharedVariable,
RandomStateSharedVariable,
)
from aesara.tensor.sort import SortOp
from arviz.data.inference_data import InferenceData
import pymc as pm
from pymc.aesaraf import floatX
from pymc.backends.base import MultiTrace
from pymc.smc.smc import IMH
from pymc.tests.helpers import SeededTest, assert_random_state_equal
class TestSMC(SeededTest):
"""Tests for the default SMC kernel"""
def setup_class(self):
super().setup_class()
self.samples = 1000
n = 4
mu1 = np.ones(n) * 0.5
mu2 = -mu1
stdev = 0.1
sigma = np.power(stdev, 2) * np.eye(n)
isigma = np.linalg.inv(sigma)
dsigma = np.linalg.det(sigma)
w1 = stdev
w2 = 1 - stdev
def two_gaussians(x):
"""
Mixture of gaussians likelihood
"""
log_like1 = (
-0.5 * n * at.log(2 * np.pi)
- 0.5 * at.log(dsigma)
- 0.5 * (x - mu1).T.dot(isigma).dot(x - mu1)
)
log_like2 = (
-0.5 * n * at.log(2 * np.pi)
- 0.5 * at.log(dsigma)
- 0.5 * (x - mu2).T.dot(isigma).dot(x - mu2)
)
return at.log(w1 * at.exp(log_like1) + w2 * at.exp(log_like2))
with pm.Model() as self.SMC_test:
X = pm.Uniform("X", lower=-2, upper=2.0, shape=n)
llk = pm.Potential("muh", two_gaussians(X))
self.muref = mu1
with pm.Model() as self.fast_model:
x = pm.Normal("x", 0, 1)
y = pm.Normal("y", x, 1, observed=0)
def test_sample(self):
initial_rng_state = np.random.get_state()
with self.SMC_test:
mtrace = pm.sample_smc(draws=self.samples, return_inferencedata=False)
# Verify sampling was done with a non-global random generator
assert_random_state_equal(initial_rng_state, np.random.get_state())
x = mtrace["X"]
mu1d = np.abs(x).mean(axis=0)
np.testing.assert_allclose(self.muref, mu1d, rtol=0.0, atol=0.03)
def test_discrete_rounding_proposal(self):
"""
Test that discrete variable values are automatically rounded
in SMC logp functions
"""
with pm.Model() as m:
z = pm.Bernoulli("z", p=0.7)
like = pm.Potential("like", z * 1.0)
smc = IMH(model=m)
smc.initialize_population()
smc._initialize_kernel()
assert smc.prior_logp_func(floatX(np.array([-0.51]))) == -np.inf
assert np.isclose(smc.prior_logp_func(floatX(np.array([-0.49]))), np.log(0.3))
assert np.isclose(smc.prior_logp_func(floatX(np.array([0.49]))), np.log(0.3))
assert np.isclose(smc.prior_logp_func(floatX(np.array([0.51]))), np.log(0.7))
assert smc.prior_logp_func(floatX(np.array([1.51]))) == -np.inf
def test_unobserved_discrete(self):
n = 10
rng = self.get_random_state()
z_true = np.zeros(n, dtype=int)
z_true[int(n / 2) :] = 1
y = st.norm(np.array([-1, 1])[z_true], 0.25).rvs(random_state=rng)
with pm.Model() as m:
z = pm.Bernoulli("z", p=0.5, size=n)
mu = pm.math.switch(z, 1.0, -1.0)
like = pm.Normal("like", mu=mu, sigma=0.25, observed=y)
trace = pm.sample_smc(chains=1, return_inferencedata=False)
assert np.all(np.median(trace["z"], axis=0) == z_true)
def test_marginal_likelihood(self):
"""
Verifies that the log marginal likelihood function
can be correctly computed for a Beta-Bernoulli model.
"""
data = np.repeat([1, 0], [50, 50])
marginals = []
a_prior_0, b_prior_0 = 1.0, 1.0
a_prior_1, b_prior_1 = 20.0, 20.0
for alpha, beta in ((a_prior_0, b_prior_0), (a_prior_1, b_prior_1)):
with pm.Model() as model:
a = pm.Beta("a", alpha, beta)
y = pm.Bernoulli("y", a, observed=data)
trace = pm.sample_smc(2000, chains=2, return_inferencedata=False)
# log_marignal_likelihood is found in the last value of each chain
lml = np.mean([chain[-1] for chain in trace.report.log_marginal_likelihood])
marginals.append(lml)
# compare to the analytical result
assert abs(np.exp(marginals[1] - marginals[0]) - 4.0) <= 1
def test_start(self):
with pm.Model() as model:
a = pm.Poisson("a", 5)
b = pm.HalfNormal("b", 10)
y = pm.Normal("y", a, b, observed=[1, 2, 3, 4])
start = {
"a": np.random.poisson(5, size=500),
"b_log__": np.abs(np.random.normal(0, 10, size=500)),
}
trace = pm.sample_smc(500, chains=1, start=start)
def test_kernel_kwargs(self):
with self.fast_model:
trace = pm.sample_smc(
draws=10,
chains=1,
threshold=0.7,
correlation_threshold=0.02,
return_inferencedata=False,
kernel=pm.smc.IMH,
)
assert trace.report.threshold == 0.7
assert trace.report.n_draws == 10
assert trace.report.correlation_threshold == 0.02
with self.fast_model:
trace = pm.sample_smc(
draws=10,
chains=1,
threshold=0.95,
correlation_threshold=0.02,
return_inferencedata=False,
kernel=pm.smc.MH,
)
assert trace.report.threshold == 0.95
assert trace.report.n_draws == 10
assert trace.report.correlation_threshold == 0.02
@pytest.mark.parametrize("chains", (1, 2))
def test_return_datatype(self, chains):
draws = 10
with self.fast_model:
idata = pm.sample_smc(chains=chains, draws=draws)
mt = pm.sample_smc(chains=chains, draws=draws, return_inferencedata=False)
assert isinstance(idata, InferenceData)
assert "sample_stats" in idata
assert idata.posterior.dims["chain"] == chains
assert idata.posterior.dims["draw"] == draws
assert isinstance(mt, MultiTrace)
assert mt.nchains == chains
assert mt["x"].size == chains * draws
def test_convergence_checks(self):
with self.fast_model:
with pytest.warns(
UserWarning,
match="The number of samples is too small",
):
pm.sample_smc(draws=99)
def test_deprecated_parallel_arg(self):
with self.fast_model:
with pytest.warns(
FutureWarning,
match="The argument parallel is deprecated",
):
pm.sample_smc(draws=10, chains=1, parallel=False)
def test_deprecated_abc_args(self):
with self.fast_model:
with pytest.warns(
FutureWarning,
match='The kernel string argument "ABC" in sample_smc has been deprecated',
):
pm.sample_smc(draws=10, chains=1, kernel="ABC")
with pytest.warns(
FutureWarning,
match='The kernel string argument "Metropolis" in sample_smc has been deprecated',
):
pm.sample_smc(draws=10, chains=1, kernel="Metropolis")
with pytest.warns(
FutureWarning,
match="save_sim_data has been deprecated",
):
pm.sample_smc(draws=10, chains=1, save_sim_data=True)
with pytest.warns(
FutureWarning,
match="save_log_pseudolikelihood has been deprecated",
):
pm.sample_smc(draws=10, chains=1, save_log_pseudolikelihood=True)
class TestSimulator(SeededTest):
"""
Tests for pm.Simulator. They are included in this file because Simulator was
designed primarily to be used with SMC sampling.
"""
@staticmethod
def count_rvs(end_node):
return len(
[
node
for node in ancestors([end_node])
if node.owner is not None and isinstance(node.owner.op, RandomVariable)
]
)
@staticmethod
def normal_sim(rng, a, b, size):
return rng.normal(a, b, size=size)
@staticmethod
def abs_diff(eps, obs_data, sim_data):
return np.mean(np.abs((obs_data - sim_data) / eps))
@staticmethod
def quantiles(x):
return np.quantile(x, [0.25, 0.5, 0.75])
def setup_class(self):
super().setup_class()
self.data = np.random.normal(loc=0, scale=1, size=1000)
with pm.Model() as self.SMABC_test:
a = pm.Normal("a", mu=0, sigma=1)
b = pm.HalfNormal("b", sigma=1)
s = pm.Simulator("s", self.normal_sim, a, b, sum_stat="sort", observed=self.data)
self.s = s
with pm.Model() as self.SMABC_potential:
a = pm.Normal("a", mu=0, sigma=1, initval=0.5)
b = pm.HalfNormal("b", sigma=1)
c = pm.Potential("c", pm.math.switch(a > 0, 0, -np.inf))
s = pm.Simulator("s", self.normal_sim, a, b, observed=self.data)
def test_one_gaussian(self):
assert self.count_rvs(self.SMABC_test.logpt()) == 1
with self.SMABC_test:
trace = pm.sample_smc(draws=1000, chains=1, return_inferencedata=False)
pr_p = pm.sample_prior_predictive(1000, return_inferencedata=False)
po_p = pm.sample_posterior_predictive(
trace, keep_size=False, return_inferencedata=False
)
assert abs(self.data.mean() - trace["a"].mean()) < 0.05
assert abs(self.data.std() - trace["b"].mean()) < 0.05
assert pr_p["s"].shape == (1000, 1000)
assert abs(0 - pr_p["s"].mean()) < 0.15
assert abs(1.4 - pr_p["s"].std()) < 0.10
assert po_p["s"].shape == (1000, 1000)
assert abs(self.data.mean() - po_p["s"].mean()) < 0.10
assert abs(self.data.std() - po_p["s"].std()) < 0.10
@pytest.mark.parametrize("floatX", ["float32", "float64"])
def test_custom_dist_sum_stat(self, floatX):
with aesara.config.change_flags(floatX=floatX):
with pm.Model() as m:
a = pm.Normal("a", mu=0, sigma=1)
b = pm.HalfNormal("b", sigma=1)
s = pm.Simulator(
"s",
self.normal_sim,
a,
b,
distance=self.abs_diff,
sum_stat=self.quantiles,
observed=self.data,
)
assert self.count_rvs(m.logpt()) == 1
with m:
pm.sample_smc(draws=100)
@pytest.mark.parametrize("floatX", ["float32", "float64"])
def test_custom_dist_sum_stat_scalar(self, floatX):
"""
Test that automatically wrapped functions cope well with scalar inputs
"""
scalar_data = 5
with aesara.config.change_flags(floatX=floatX):
with pm.Model() as m:
s = pm.Simulator(
"s",
self.normal_sim,
0,
1,
distance=self.abs_diff,
sum_stat=self.quantiles,
observed=scalar_data,
)
assert self.count_rvs(m.logpt()) == 1
with pm.Model() as m:
s = pm.Simulator(
"s",
self.normal_sim,
0,
1,
distance=self.abs_diff,
sum_stat="mean",
observed=scalar_data,
)
assert self.count_rvs(m.logpt()) == 1
def test_model_with_potential(self):
assert self.count_rvs(self.SMABC_potential.logpt()) == 1
with self.SMABC_potential:
trace = pm.sample_smc(draws=100, chains=1, return_inferencedata=False)
assert np.all(trace["a"] >= 0)
def test_simulator_metropolis_mcmc(self):
with self.SMABC_test as m:
step = pm.Metropolis([m.rvs_to_values[m["a"]], m.rvs_to_values[m["b"]]])
trace = pm.sample(step=step, return_inferencedata=False)
assert abs(self.data.mean() - trace["a"].mean()) < 0.05
assert abs(self.data.std() - trace["b"].mean()) < 0.05
def test_multiple_simulators(self):
true_a = 2
true_b = -2
data1 = np.random.normal(true_a, 0.1, size=1000)
data2 = np.random.normal(true_b, 0.1, size=1000)
with pm.Model() as m:
a = pm.Normal("a", mu=0, sigma=3)
b = pm.Normal("b", mu=0, sigma=3)
sim1 = pm.Simulator(
"sim1",
self.normal_sim,
a,
0.1,
distance="gaussian",
sum_stat="sort",
observed=data1,
)
sim2 = pm.Simulator(
"sim2",
self.normal_sim,
b,
0.1,
distance="laplace",
sum_stat="mean",
epsilon=0.1,
observed=data2,
)
assert self.count_rvs(m.logpt()) == 2
# Check that the logps use the correct methods
a_val = m.rvs_to_values[a]
sim1_val = m.rvs_to_values[sim1]
logp_sim1 = pm.joint_logpt(sim1, sim1_val)
logp_sim1_fn = aesara.function([a_val], logp_sim1)
b_val = m.rvs_to_values[b]
sim2_val = m.rvs_to_values[sim2]
logp_sim2 = pm.joint_logpt(sim2, sim2_val)
logp_sim2_fn = aesara.function([b_val], logp_sim2)
assert any(
node for node in logp_sim1_fn.maker.fgraph.toposort() if isinstance(node.op, SortOp)
)
assert not any(
node for node in logp_sim2_fn.maker.fgraph.toposort() if isinstance(node.op, SortOp)
)
with m:
trace = pm.sample_smc(return_inferencedata=False)
assert abs(true_a - trace["a"].mean()) < 0.05
assert abs(true_b - trace["b"].mean()) < 0.05
def test_nested_simulators(self):
true_a = 2
rng = self.get_random_state()
data = rng.normal(true_a, 0.1, size=1000)
with pm.Model() as m:
sim1 = pm.Simulator(
"sim1",
self.normal_sim,
params=(0, 4),
distance="gaussian",
sum_stat="identity",
)
sim2 = pm.Simulator(
"sim2",
self.normal_sim,
params=(sim1, 0.1),
distance="gaussian",
sum_stat="mean",
epsilon=0.1,
observed=data,
)
assert self.count_rvs(m.logpt()) == 2
with m:
trace = pm.sample_smc(return_inferencedata=False)
assert np.abs(true_a - trace["sim1"].mean()) < 0.1
def test_upstream_rngs_not_in_compiled_logp(self):
smc = IMH(model=self.SMABC_test)
smc.initialize_population()
smc._initialize_kernel()
likelihood_func = smc.likelihood_logp_func
# Test graph is stochastic
inarray = floatX(np.array([0, 0]))
assert likelihood_func(inarray) != likelihood_func(inarray)
# Test only one shared RNG is present
compiled_graph = likelihood_func.maker.fgraph.outputs
shared_rng_vars = [
node
for node in ancestors(compiled_graph)
if isinstance(node, (RandomStateSharedVariable, RandomGeneratorSharedVariable))
]
assert len(shared_rng_vars) == 1
def test_simulator_error_msg(self):
msg = "The distance metric not_real is not implemented"
with pytest.raises(ValueError, match=msg):
with pm.Model() as m:
sim = pm.Simulator("sim", self.normal_sim, 0, 1, distance="not_real")
msg = "The summary statistic not_real is not implemented"
with pytest.raises(ValueError, match=msg):
with pm.Model() as m:
sim = pm.Simulator("sim", self.normal_sim, 0, 1, sum_stat="not_real")
msg = "Cannot pass both unnamed parameters and `params`"
with pytest.raises(ValueError, match=msg):
with pm.Model() as m:
sim = pm.Simulator("sim", self.normal_sim, 0, params=(1))
@pytest.mark.xfail(reason="KL not refactored")
def test_automatic_use_of_sort(self):
with pm.Model() as model:
s_k = pm.Simulator(
"s_k",
None,
params=None,
distance="kullback_leibler",
sum_stat="sort",
observed=self.data,
)
assert s_k.distribution.sum_stat is pm.distributions.simulator.identity
def test_name_is_string_type(self):
with self.SMABC_potential:
assert not self.SMABC_potential.name
trace = pm.sample_smc(draws=10, chains=1, return_inferencedata=False)
assert isinstance(trace._straces[0].name, str)
def test_named_model(self):
# Named models used to fail with Simulator because the arguments to the
# random fn used to be passed by name. This is no longer true.
# https://github.com/pymc-devs/pymc/pull/4365#issuecomment-761221146
name = "NamedModel"
with pm.Model(name=name):
a = pm.Normal("a", mu=0, sigma=1)
b = pm.HalfNormal("b", sigma=1)
s = pm.Simulator("s", self.normal_sim, a, b, observed=self.data)
trace = pm.sample_smc(draws=10, chains=2, return_inferencedata=False)
assert f"{name}::a" in trace.varnames
assert f"{name}::b" in trace.varnames
assert f"{name}::b_log__" in trace.varnames
class TestMHKernel(SeededTest):
def test_normal_model(self):
data = st.norm(10, 0.5).rvs(1000, random_state=self.get_random_state())
initial_rng_state = np.random.get_state()
with pm.Model() as m:
mu = pm.Normal("mu", 0, 3)
sigma = pm.HalfNormal("sigma", 1)
y = pm.Normal("y", mu, sigma, observed=data)
idata = pm.sample_smc(draws=2000, kernel=pm.smc.MH)
assert_random_state_equal(initial_rng_state, np.random.get_state())
post = idata.posterior.stack(sample=("chain", "draw"))
assert np.abs(post["mu"].mean() - 10) < 0.1
assert np.abs(post["sigma"].mean() - 0.5) < 0.05
def test_proposal_dist_shape(self):
with pm.Model() as m:
x = pm.Normal("x", 0, 1)
y = pm.Normal("y", x, 1, observed=0)
trace = pm.sample_smc(
draws=10,
chains=1,
kernel=pm.smc.MH,
return_inferencedata=False,
)
| # Copyright 2020 The PyMC Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import aesara
import aesara.tensor as at
import numpy as np
import pytest
import scipy.stats as st
from aesara.graph.basic import ancestors
from aesara.tensor.random.op import RandomVariable
from aesara.tensor.random.var import (
RandomGeneratorSharedVariable,
RandomStateSharedVariable,
)
from aesara.tensor.sort import SortOp
from arviz.data.inference_data import InferenceData
import pymc as pm
from pymc.aesaraf import floatX
from pymc.backends.base import MultiTrace
from pymc.smc.smc import IMH
from pymc.tests.helpers import SeededTest, assert_random_state_equal
class TestSMC(SeededTest):
"""Tests for the default SMC kernel"""
def setup_class(self):
super().setup_class()
self.samples = 1000
n = 4
mu1 = np.ones(n) * 0.5
mu2 = -mu1
stdev = 0.1
sigma = np.power(stdev, 2) * np.eye(n)
isigma = np.linalg.inv(sigma)
dsigma = np.linalg.det(sigma)
w1 = stdev
w2 = 1 - stdev
def two_gaussians(x):
"""
Mixture of gaussians likelihood
"""
log_like1 = (
-0.5 * n * at.log(2 * np.pi)
- 0.5 * at.log(dsigma)
- 0.5 * (x - mu1).T.dot(isigma).dot(x - mu1)
)
log_like2 = (
-0.5 * n * at.log(2 * np.pi)
- 0.5 * at.log(dsigma)
- 0.5 * (x - mu2).T.dot(isigma).dot(x - mu2)
)
return at.log(w1 * at.exp(log_like1) + w2 * at.exp(log_like2))
with pm.Model() as self.SMC_test:
X = pm.Uniform("X", lower=-2, upper=2.0, shape=n)
llk = pm.Potential("muh", two_gaussians(X))
self.muref = mu1
with pm.Model() as self.fast_model:
x = pm.Normal("x", 0, 1)
y = pm.Normal("y", x, 1, observed=0)
def test_sample(self):
initial_rng_state = np.random.get_state()
with self.SMC_test:
mtrace = pm.sample_smc(draws=self.samples, return_inferencedata=False)
# Verify sampling was done with a non-global random generator
assert_random_state_equal(initial_rng_state, np.random.get_state())
x = mtrace["X"]
mu1d = np.abs(x).mean(axis=0)
np.testing.assert_allclose(self.muref, mu1d, rtol=0.0, atol=0.03)
def test_discrete_rounding_proposal(self):
"""
Test that discrete variable values are automatically rounded
in SMC logp functions
"""
with pm.Model() as m:
z = pm.Bernoulli("z", p=0.7)
like = pm.Potential("like", z * 1.0)
smc = IMH(model=m)
smc.initialize_population()
smc._initialize_kernel()
assert smc.prior_logp_func(floatX(np.array([-0.51]))) == -np.inf
assert np.isclose(smc.prior_logp_func(floatX(np.array([-0.49]))), np.log(0.3))
assert np.isclose(smc.prior_logp_func(floatX(np.array([0.49]))), np.log(0.3))
assert np.isclose(smc.prior_logp_func(floatX(np.array([0.51]))), np.log(0.7))
assert smc.prior_logp_func(floatX(np.array([1.51]))) == -np.inf
def test_unobserved_discrete(self):
n = 10
rng = self.get_random_state()
z_true = np.zeros(n, dtype=int)
z_true[int(n / 2) :] = 1
y = st.norm(np.array([-1, 1])[z_true], 0.25).rvs(random_state=rng)
with pm.Model() as m:
z = pm.Bernoulli("z", p=0.5, size=n)
mu = pm.math.switch(z, 1.0, -1.0)
like = pm.Normal("like", mu=mu, sigma=0.25, observed=y)
trace = pm.sample_smc(chains=1, return_inferencedata=False)
assert np.all(np.median(trace["z"], axis=0) == z_true)
def test_marginal_likelihood(self):
"""
Verifies that the log marginal likelihood function
can be correctly computed for a Beta-Bernoulli model.
"""
data = np.repeat([1, 0], [50, 50])
marginals = []
a_prior_0, b_prior_0 = 1.0, 1.0
a_prior_1, b_prior_1 = 20.0, 20.0
for alpha, beta in ((a_prior_0, b_prior_0), (a_prior_1, b_prior_1)):
with pm.Model() as model:
a = pm.Beta("a", alpha, beta)
y = pm.Bernoulli("y", a, observed=data)
trace = pm.sample_smc(2000, chains=2, return_inferencedata=False)
# log_marignal_likelihood is found in the last value of each chain
lml = np.mean([chain[-1] for chain in trace.report.log_marginal_likelihood])
marginals.append(lml)
# compare to the analytical result
assert abs(np.exp(marginals[1] - marginals[0]) - 4.0) <= 1
def test_start(self):
with pm.Model() as model:
a = pm.Poisson("a", 5)
b = pm.HalfNormal("b", 10)
y = pm.Normal("y", a, b, observed=[1, 2, 3, 4])
start = {
"a": np.random.poisson(5, size=500),
"b_log__": np.abs(np.random.normal(0, 10, size=500)),
}
trace = pm.sample_smc(500, chains=1, start=start)
def test_kernel_kwargs(self):
with self.fast_model:
trace = pm.sample_smc(
draws=10,
chains=1,
threshold=0.7,
correlation_threshold=0.02,
return_inferencedata=False,
kernel=pm.smc.IMH,
)
assert trace.report.threshold == 0.7
assert trace.report.n_draws == 10
assert trace.report.correlation_threshold == 0.02
with self.fast_model:
trace = pm.sample_smc(
draws=10,
chains=1,
threshold=0.95,
correlation_threshold=0.02,
return_inferencedata=False,
kernel=pm.smc.MH,
)
assert trace.report.threshold == 0.95
assert trace.report.n_draws == 10
assert trace.report.correlation_threshold == 0.02
@pytest.mark.parametrize("chains", (1, 2))
def test_return_datatype(self, chains):
draws = 10
with self.fast_model:
idata = pm.sample_smc(chains=chains, draws=draws)
mt = pm.sample_smc(chains=chains, draws=draws, return_inferencedata=False)
assert isinstance(idata, InferenceData)
assert "sample_stats" in idata
assert idata.posterior.dims["chain"] == chains
assert idata.posterior.dims["draw"] == draws
assert isinstance(mt, MultiTrace)
assert mt.nchains == chains
assert mt["x"].size == chains * draws
def test_convergence_checks(self):
with self.fast_model:
with pytest.warns(
UserWarning,
match="The number of samples is too small",
):
pm.sample_smc(draws=99)
def test_deprecated_parallel_arg(self):
with self.fast_model:
with pytest.warns(
FutureWarning,
match="The argument parallel is deprecated",
):
pm.sample_smc(draws=10, chains=1, parallel=False)
def test_deprecated_abc_args(self):
with self.fast_model:
with pytest.warns(
FutureWarning,
match='The kernel string argument "ABC" in sample_smc has been deprecated',
):
pm.sample_smc(draws=10, chains=1, kernel="ABC")
with pytest.warns(
FutureWarning,
match='The kernel string argument "Metropolis" in sample_smc has been deprecated',
):
pm.sample_smc(draws=10, chains=1, kernel="Metropolis")
with pytest.warns(
FutureWarning,
match="save_sim_data has been deprecated",
):
pm.sample_smc(draws=10, chains=1, save_sim_data=True)
with pytest.warns(
FutureWarning,
match="save_log_pseudolikelihood has been deprecated",
):
pm.sample_smc(draws=10, chains=1, save_log_pseudolikelihood=True)
class TestSimulator(SeededTest):
"""
Tests for pm.Simulator. They are included in this file because Simulator was
designed primarily to be used with SMC sampling.
"""
@staticmethod
def count_rvs(end_node):
return len(
[
node
for node in ancestors([end_node])
if node.owner is not None and isinstance(node.owner.op, RandomVariable)
]
)
@staticmethod
def normal_sim(rng, a, b, size):
return rng.normal(a, b, size=size)
@staticmethod
def abs_diff(eps, obs_data, sim_data):
return np.mean(np.abs((obs_data - sim_data) / eps))
@staticmethod
def quantiles(x):
return np.quantile(x, [0.25, 0.5, 0.75])
def setup_class(self):
super().setup_class()
self.data = np.random.normal(loc=0, scale=1, size=1000)
with pm.Model() as self.SMABC_test:
a = pm.Normal("a", mu=0, sigma=1)
b = pm.HalfNormal("b", sigma=1)
s = pm.Simulator("s", self.normal_sim, a, b, sum_stat="sort", observed=self.data)
self.s = s
with pm.Model() as self.SMABC_potential:
a = pm.Normal("a", mu=0, sigma=1, initval=0.5)
b = pm.HalfNormal("b", sigma=1)
c = pm.Potential("c", pm.math.switch(a > 0, 0, -np.inf))
s = pm.Simulator("s", self.normal_sim, a, b, observed=self.data)
def test_one_gaussian(self):
assert self.count_rvs(self.SMABC_test.logpt()) == 1
with self.SMABC_test:
trace = pm.sample_smc(draws=1000, chains=1, return_inferencedata=False)
pr_p = pm.sample_prior_predictive(1000, return_inferencedata=False)
po_p = pm.sample_posterior_predictive(
trace, keep_size=False, return_inferencedata=False
)
assert abs(self.data.mean() - trace["a"].mean()) < 0.05
assert abs(self.data.std() - trace["b"].mean()) < 0.05
assert pr_p["s"].shape == (1000, 1000)
assert abs(0 - pr_p["s"].mean()) < 0.15
assert abs(1.4 - pr_p["s"].std()) < 0.10
assert po_p["s"].shape == (1000, 1000)
assert abs(self.data.mean() - po_p["s"].mean()) < 0.10
assert abs(self.data.std() - po_p["s"].std()) < 0.10
@pytest.mark.parametrize("floatX", ["float32", "float64"])
def test_custom_dist_sum_stat(self, floatX):
with aesara.config.change_flags(floatX=floatX):
with pm.Model() as m:
a = pm.Normal("a", mu=0, sigma=1)
b = pm.HalfNormal("b", sigma=1)
s = pm.Simulator(
"s",
self.normal_sim,
a,
b,
distance=self.abs_diff,
sum_stat=self.quantiles,
observed=self.data,
)
assert self.count_rvs(m.logpt()) == 1
with m:
pm.sample_smc(draws=100)
@pytest.mark.parametrize("floatX", ["float32", "float64"])
def test_custom_dist_sum_stat_scalar(self, floatX):
"""
Test that automatically wrapped functions cope well with scalar inputs
"""
scalar_data = 5
with aesara.config.change_flags(floatX=floatX):
with pm.Model() as m:
s = pm.Simulator(
"s",
self.normal_sim,
0,
1,
distance=self.abs_diff,
sum_stat=self.quantiles,
observed=scalar_data,
)
assert self.count_rvs(m.logpt()) == 1
with pm.Model() as m:
s = pm.Simulator(
"s",
self.normal_sim,
0,
1,
distance=self.abs_diff,
sum_stat="mean",
observed=scalar_data,
)
assert self.count_rvs(m.logpt()) == 1
def test_model_with_potential(self):
assert self.count_rvs(self.SMABC_potential.logpt()) == 1
with self.SMABC_potential:
trace = pm.sample_smc(draws=100, chains=1, return_inferencedata=False)
assert np.all(trace["a"] >= 0)
def test_simulator_metropolis_mcmc(self):
with self.SMABC_test as m:
step = pm.Metropolis([m.rvs_to_values[m["a"]], m.rvs_to_values[m["b"]]])
trace = pm.sample(step=step, return_inferencedata=False)
assert abs(self.data.mean() - trace["a"].mean()) < 0.05
assert abs(self.data.std() - trace["b"].mean()) < 0.05
def test_multiple_simulators(self):
true_a = 2
true_b = -2
data1 = np.random.normal(true_a, 0.1, size=1000)
data2 = np.random.normal(true_b, 0.1, size=1000)
with pm.Model() as m:
a = pm.Normal("a", mu=0, sigma=3)
b = pm.Normal("b", mu=0, sigma=3)
sim1 = pm.Simulator(
"sim1",
self.normal_sim,
a,
0.1,
distance="gaussian",
sum_stat="sort",
observed=data1,
)
sim2 = pm.Simulator(
"sim2",
self.normal_sim,
b,
0.1,
distance="laplace",
sum_stat="mean",
epsilon=0.1,
observed=data2,
)
assert self.count_rvs(m.logpt()) == 2
# Check that the logps use the correct methods
a_val = m.rvs_to_values[a]
sim1_val = m.rvs_to_values[sim1]
logp_sim1 = pm.joint_logpt(sim1, sim1_val)
logp_sim1_fn = aesara.function([a_val], logp_sim1)
b_val = m.rvs_to_values[b]
sim2_val = m.rvs_to_values[sim2]
logp_sim2 = pm.joint_logpt(sim2, sim2_val)
logp_sim2_fn = aesara.function([b_val], logp_sim2)
assert any(
node for node in logp_sim1_fn.maker.fgraph.toposort() if isinstance(node.op, SortOp)
)
assert not any(
node for node in logp_sim2_fn.maker.fgraph.toposort() if isinstance(node.op, SortOp)
)
with m:
trace = pm.sample_smc(return_inferencedata=False)
assert abs(true_a - trace["a"].mean()) < 0.05
assert abs(true_b - trace["b"].mean()) < 0.05
def test_nested_simulators(self):
true_a = 2
rng = self.get_random_state()
data = rng.normal(true_a, 0.1, size=1000)
with pm.Model() as m:
sim1 = pm.Simulator(
"sim1",
self.normal_sim,
params=(0, 4),
distance="gaussian",
sum_stat="identity",
)
sim2 = pm.Simulator(
"sim2",
self.normal_sim,
params=(sim1, 0.1),
distance="gaussian",
sum_stat="mean",
epsilon=0.1,
observed=data,
)
assert self.count_rvs(m.logpt()) == 2
with m:
trace = pm.sample_smc(return_inferencedata=False)
assert np.abs(true_a - trace["sim1"].mean()) < 0.1
def test_upstream_rngs_not_in_compiled_logp(self):
smc = IMH(model=self.SMABC_test)
smc.initialize_population()
smc._initialize_kernel()
likelihood_func = smc.likelihood_logp_func
# Test graph is stochastic
inarray = floatX(np.array([0, 0]))
assert likelihood_func(inarray) != likelihood_func(inarray)
# Test only one shared RNG is present
compiled_graph = likelihood_func.maker.fgraph.outputs
shared_rng_vars = [
node
for node in ancestors(compiled_graph)
if isinstance(node, (RandomStateSharedVariable, RandomGeneratorSharedVariable))
]
assert len(shared_rng_vars) == 1
def test_simulator_error_msg(self):
msg = "The distance metric not_real is not implemented"
with pytest.raises(ValueError, match=msg):
with pm.Model() as m:
sim = pm.Simulator("sim", self.normal_sim, 0, 1, distance="not_real")
msg = "The summary statistic not_real is not implemented"
with pytest.raises(ValueError, match=msg):
with pm.Model() as m:
sim = pm.Simulator("sim", self.normal_sim, 0, 1, sum_stat="not_real")
msg = "Cannot pass both unnamed parameters and `params`"
with pytest.raises(ValueError, match=msg):
with pm.Model() as m:
sim = pm.Simulator("sim", self.normal_sim, 0, params=(1))
@pytest.mark.xfail(reason="KL not refactored")
def test_automatic_use_of_sort(self):
with pm.Model() as model:
s_k = pm.Simulator(
"s_k",
None,
params=None,
distance="kullback_leibler",
sum_stat="sort",
observed=self.data,
)
assert s_k.distribution.sum_stat is pm.distributions.simulator.identity
def test_name_is_string_type(self):
with self.SMABC_potential:
assert not self.SMABC_potential.name
trace = pm.sample_smc(draws=10, chains=1, return_inferencedata=False)
assert isinstance(trace._straces[0].name, str)
def test_named_model(self):
# Named models used to fail with Simulator because the arguments to the
# random fn used to be passed by name. This is no longer true.
# https://github.com/pymc-devs/pymc/pull/4365#issuecomment-761221146
name = "NamedModel"
with pm.Model(name=name):
a = pm.Normal("a", mu=0, sigma=1)
b = pm.HalfNormal("b", sigma=1)
s = pm.Simulator("s", self.normal_sim, a, b, observed=self.data)
trace = pm.sample_smc(draws=10, chains=2, return_inferencedata=False)
assert f"{name}::a" in trace.varnames
assert f"{name}::b" in trace.varnames
assert f"{name}::b_log__" in trace.varnames
class TestMHKernel(SeededTest):
def test_normal_model(self):
data = st.norm(10, 0.5).rvs(1000, random_state=self.get_random_state())
initial_rng_state = np.random.get_state()
with pm.Model() as m:
mu = pm.Normal("mu", 0, 3)
sigma = pm.HalfNormal("sigma", 1)
y = pm.Normal("y", mu, sigma, observed=data)
idata = pm.sample_smc(draws=2000, kernel=pm.smc.MH)
assert_random_state_equal(initial_rng_state, np.random.get_state())
post = idata.posterior.stack(sample=("chain", "draw"))
assert np.abs(post["mu"].mean() - 10) < 0.1
assert np.abs(post["sigma"].mean() - 0.5) < 0.05
def test_proposal_dist_shape(self):
with pm.Model() as m:
x = pm.Normal("x", 0, 1)
y = pm.Normal("y", x, 1, observed=0)
trace = pm.sample_smc(
draws=10,
chains=1,
kernel=pm.smc.MH,
return_inferencedata=False,
)
| en | 0.872629 | # Copyright 2020 The PyMC Developers # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Tests for the default SMC kernel Mixture of gaussians likelihood # Verify sampling was done with a non-global random generator Test that discrete variable values are automatically rounded in SMC logp functions Verifies that the log marginal likelihood function can be correctly computed for a Beta-Bernoulli model. # log_marignal_likelihood is found in the last value of each chain # compare to the analytical result Tests for pm.Simulator. They are included in this file because Simulator was designed primarily to be used with SMC sampling. Test that automatically wrapped functions cope well with scalar inputs # Check that the logps use the correct methods # Test graph is stochastic # Test only one shared RNG is present # Named models used to fail with Simulator because the arguments to the # random fn used to be passed by name. This is no longer true. # https://github.com/pymc-devs/pymc/pull/4365#issuecomment-761221146 | 1.908803 | 2 |
hblfit/data.py | Lachimax/hblfit | 0 | 6624469 | import astropy.table as table
import pkg_resources
def get_table(n: int):
stream = pkg_resources.resource_stream(__name__, f'data/table_{n}.csv')
return table.Table.read(stream, format="ascii.csv")
| import astropy.table as table
import pkg_resources
def get_table(n: int):
stream = pkg_resources.resource_stream(__name__, f'data/table_{n}.csv')
return table.Table.read(stream, format="ascii.csv")
| none | 1 | 2.434923 | 2 | |
scrutiny/javacard/toolwrappers/gppro.py | NimRo97/jcpeg | 2 | 6624470 | from abc import ABC
from typing import final
from overrides import overrides, EnforceOverrides
from scrutiny.config import Paths
from scrutiny.interfaces import ToolWrapper
from scrutiny.javacard.modules.atr import Atr
from scrutiny.javacard.modules.cplc import Cplc
from scrutiny.utils import execute_cmd, isfile
from scrutiny.javacard.modules.gppro import GPInfo, GPList
INFO_ARGS = ["-info"]
INFO_FILE = "gp_info.txt"
LIST_ARGS = ["-list"]
LIST_FILE = "gp_list.txt"
class GPPro(ToolWrapper, ABC, EnforceOverrides):
"""
SCRUTINY ToolWrapper for GlobalPlatformPro
"""
GP_BIN = "java -jar " + Paths.GPPRO
@final
def run_gppro(self, args, outfile):
"""
Wrapper for running GlobalPlatformPro
"""
outpath = self.get_outpath(outfile)
cmd_line = self.GP_BIN + " " + " ".join(args) + " > " + outpath
if isfile(outpath) and not self.force_mode:
print("Skipping " + cmd_line + " (results found).")
return 0
print("Running " + cmd_line + ".")
return execute_cmd(cmd_line)
class GPProInfo(GPPro):
"""
SCRUTINY ToolWrapper for GlobalPlatformPro -info
"""
@overrides
def run(self):
return super().run_gppro(INFO_ARGS, INFO_FILE)
@overrides
def parse(self):
filename = self.get_outpath(INFO_FILE)
with open(filename, "r") as f:
lines = f.readlines()
gpcplc = Cplc()
gpinfo = GPInfo()
modules = [gpcplc, gpinfo]
gpinfo_discard = ["Card Data:", "Card Capabilities:",
"More information about your card:",
"/parse?ATR"]
i = 0
while i < len(lines):
line = lines[i].rstrip()
i += 1
if line == "" or any([d in line for d in gpinfo_discard]):
continue
if line.startswith("ATR"):
atr = line.split(":")[1].strip()
modules.insert(0, Atr(atr=atr))
continue
if line.startswith("IIN"):
gpinfo.iin = line.split(":")[1].strip()
continue
if line.startswith("CIN"):
gpinfo.cin = line.split(":")[1].strip()
continue
if line.startswith("CPLC"):
first = line.split(":")[1].strip().split("=")
gpcplc.cplc[first[0]] = first[1]
while i < len(lines) and lines[i][0] == " ":
pair = lines[i].strip().split("=")
gpcplc.cplc[pair[0]] = pair[1]
i += 1
continue
if line.startswith("Support"):
gpinfo.supports.append(line)
continue
if line.startswith("Version"):
gpinfo.versions.append(line)
continue
gpinfo.other.append(line)
return modules
class GPProList(GPPro):
"""
SCRUTINY ToolWrapper for GlobalPlatformPro -list
"""
@overrides
def run(self):
return super().run_gppro(LIST_ARGS, LIST_FILE)
@overrides
def parse(self):
filename = self.get_outpath(LIST_FILE)
with open(filename, "r") as f:
lines = f.readlines()
gplist = GPList()
i = 0
while i < len(lines):
line = lines[i].rstrip()
i += 1
if line.startswith("ISD"):
gplist.isd = line.split(":")[1].strip().split(" ")[0]
continue
if line.startswith("APP"):
gplist.app.append(line.split(":")[1].strip().split(" ")[0])
continue
if line.startswith("PKG"):
gplist.pkg.append(line.split(":")[1].strip().split(" ")[0])
continue
return [gplist]
| from abc import ABC
from typing import final
from overrides import overrides, EnforceOverrides
from scrutiny.config import Paths
from scrutiny.interfaces import ToolWrapper
from scrutiny.javacard.modules.atr import Atr
from scrutiny.javacard.modules.cplc import Cplc
from scrutiny.utils import execute_cmd, isfile
from scrutiny.javacard.modules.gppro import GPInfo, GPList
INFO_ARGS = ["-info"]
INFO_FILE = "gp_info.txt"
LIST_ARGS = ["-list"]
LIST_FILE = "gp_list.txt"
class GPPro(ToolWrapper, ABC, EnforceOverrides):
"""
SCRUTINY ToolWrapper for GlobalPlatformPro
"""
GP_BIN = "java -jar " + Paths.GPPRO
@final
def run_gppro(self, args, outfile):
"""
Wrapper for running GlobalPlatformPro
"""
outpath = self.get_outpath(outfile)
cmd_line = self.GP_BIN + " " + " ".join(args) + " > " + outpath
if isfile(outpath) and not self.force_mode:
print("Skipping " + cmd_line + " (results found).")
return 0
print("Running " + cmd_line + ".")
return execute_cmd(cmd_line)
class GPProInfo(GPPro):
"""
SCRUTINY ToolWrapper for GlobalPlatformPro -info
"""
@overrides
def run(self):
return super().run_gppro(INFO_ARGS, INFO_FILE)
@overrides
def parse(self):
filename = self.get_outpath(INFO_FILE)
with open(filename, "r") as f:
lines = f.readlines()
gpcplc = Cplc()
gpinfo = GPInfo()
modules = [gpcplc, gpinfo]
gpinfo_discard = ["Card Data:", "Card Capabilities:",
"More information about your card:",
"/parse?ATR"]
i = 0
while i < len(lines):
line = lines[i].rstrip()
i += 1
if line == "" or any([d in line for d in gpinfo_discard]):
continue
if line.startswith("ATR"):
atr = line.split(":")[1].strip()
modules.insert(0, Atr(atr=atr))
continue
if line.startswith("IIN"):
gpinfo.iin = line.split(":")[1].strip()
continue
if line.startswith("CIN"):
gpinfo.cin = line.split(":")[1].strip()
continue
if line.startswith("CPLC"):
first = line.split(":")[1].strip().split("=")
gpcplc.cplc[first[0]] = first[1]
while i < len(lines) and lines[i][0] == " ":
pair = lines[i].strip().split("=")
gpcplc.cplc[pair[0]] = pair[1]
i += 1
continue
if line.startswith("Support"):
gpinfo.supports.append(line)
continue
if line.startswith("Version"):
gpinfo.versions.append(line)
continue
gpinfo.other.append(line)
return modules
class GPProList(GPPro):
"""
SCRUTINY ToolWrapper for GlobalPlatformPro -list
"""
@overrides
def run(self):
return super().run_gppro(LIST_ARGS, LIST_FILE)
@overrides
def parse(self):
filename = self.get_outpath(LIST_FILE)
with open(filename, "r") as f:
lines = f.readlines()
gplist = GPList()
i = 0
while i < len(lines):
line = lines[i].rstrip()
i += 1
if line.startswith("ISD"):
gplist.isd = line.split(":")[1].strip().split(" ")[0]
continue
if line.startswith("APP"):
gplist.app.append(line.split(":")[1].strip().split(" ")[0])
continue
if line.startswith("PKG"):
gplist.pkg.append(line.split(":")[1].strip().split(" ")[0])
continue
return [gplist]
| en | 0.487827 | SCRUTINY ToolWrapper for GlobalPlatformPro Wrapper for running GlobalPlatformPro SCRUTINY ToolWrapper for GlobalPlatformPro -info SCRUTINY ToolWrapper for GlobalPlatformPro -list | 2.674896 | 3 |
utils/experiment.py | ReddyLab/cegs-portal | 0 | 6624471 | import json
import os.path
from typing import IO, Any
from cegs_portal.search.models import Experiment, ExperimentDataFile
from .file import FileMetadata
from .misc import get_delimiter
class ExperimentDatafileMetadata:
description: str
cell_line: str
filename: str
ref_genome: str
ref_genome_patch: str
significance_measure: str
def __init__(self, file_metadata: dict[str, str]):
self.cell_line = file_metadata["cell_line"]
self.description = file_metadata["description"]
self.filename = file_metadata["file"]
self.ref_genome = file_metadata["ref_genome"]
self.ref_genome_patch = file_metadata["ref_genome_patch"]
self.significance_measure = file_metadata["significance_measure"]
def db_save(self, experiment: Experiment):
data_file = ExperimentDataFile(
cell_line=self.cell_line,
description=self.description,
experiment=experiment,
filename=self.filename,
ref_genome=self.ref_genome,
ref_genome_patch=self.ref_genome_patch,
significance_measure=self.significance_measure,
)
data_file.save()
return data_file
class ExperimentMetadata:
data_file_metadata: list[ExperimentDatafileMetadata]
description: str
experiment_type: str
name: str
filename: str
other_file_metadata: list[FileMetadata]
def __init__(self, experiment_dict: dict[str, Any], experiment_filename: str):
self.description = experiment_dict["description"]
self.experiment_type = experiment_dict["type"]
self.name = experiment_dict["name"]
self.filename = experiment_filename
self.data_file_metadata = []
self.other_file_metadata = []
for data in experiment_dict["data"]:
self.data_file_metadata.append(ExperimentDatafileMetadata(data))
for file in experiment_dict["other_files"]:
self.other_file_metadata.append(FileMetadata(file, self.filename, self.experiment_type))
def db_save(self):
experiment = Experiment(
name=self.name,
description=self.description,
experiment_type=self.experiment_type,
)
experiment.save()
for metadata in self.data_file_metadata:
metadata.db_save(experiment)
for file in self.other_file_metadata:
other_file = file.db_save()
experiment.other_files.add(other_file)
return experiment
def db_del(self):
experiment = Experiment.objects.get(name=self.name)
experiment.data_files.all().delete()
experiment.other_files.all().delete()
experiment.delete()
def metadata(self):
base_path = os.path.dirname(self.filename)
for metadata in self.data_file_metadata:
delimiter = get_delimiter(metadata.filename)
ceres_file = open(os.path.join(base_path, metadata.filename), "r", newline="")
yield ceres_file, metadata, delimiter
ceres_file.close()
@classmethod
def json_load(cls, file: IO):
experiment_data = json.load(file)
metadata = ExperimentMetadata(experiment_data, file.name)
return metadata
| import json
import os.path
from typing import IO, Any
from cegs_portal.search.models import Experiment, ExperimentDataFile
from .file import FileMetadata
from .misc import get_delimiter
class ExperimentDatafileMetadata:
description: str
cell_line: str
filename: str
ref_genome: str
ref_genome_patch: str
significance_measure: str
def __init__(self, file_metadata: dict[str, str]):
self.cell_line = file_metadata["cell_line"]
self.description = file_metadata["description"]
self.filename = file_metadata["file"]
self.ref_genome = file_metadata["ref_genome"]
self.ref_genome_patch = file_metadata["ref_genome_patch"]
self.significance_measure = file_metadata["significance_measure"]
def db_save(self, experiment: Experiment):
data_file = ExperimentDataFile(
cell_line=self.cell_line,
description=self.description,
experiment=experiment,
filename=self.filename,
ref_genome=self.ref_genome,
ref_genome_patch=self.ref_genome_patch,
significance_measure=self.significance_measure,
)
data_file.save()
return data_file
class ExperimentMetadata:
data_file_metadata: list[ExperimentDatafileMetadata]
description: str
experiment_type: str
name: str
filename: str
other_file_metadata: list[FileMetadata]
def __init__(self, experiment_dict: dict[str, Any], experiment_filename: str):
self.description = experiment_dict["description"]
self.experiment_type = experiment_dict["type"]
self.name = experiment_dict["name"]
self.filename = experiment_filename
self.data_file_metadata = []
self.other_file_metadata = []
for data in experiment_dict["data"]:
self.data_file_metadata.append(ExperimentDatafileMetadata(data))
for file in experiment_dict["other_files"]:
self.other_file_metadata.append(FileMetadata(file, self.filename, self.experiment_type))
def db_save(self):
experiment = Experiment(
name=self.name,
description=self.description,
experiment_type=self.experiment_type,
)
experiment.save()
for metadata in self.data_file_metadata:
metadata.db_save(experiment)
for file in self.other_file_metadata:
other_file = file.db_save()
experiment.other_files.add(other_file)
return experiment
def db_del(self):
experiment = Experiment.objects.get(name=self.name)
experiment.data_files.all().delete()
experiment.other_files.all().delete()
experiment.delete()
def metadata(self):
base_path = os.path.dirname(self.filename)
for metadata in self.data_file_metadata:
delimiter = get_delimiter(metadata.filename)
ceres_file = open(os.path.join(base_path, metadata.filename), "r", newline="")
yield ceres_file, metadata, delimiter
ceres_file.close()
@classmethod
def json_load(cls, file: IO):
experiment_data = json.load(file)
metadata = ExperimentMetadata(experiment_data, file.name)
return metadata
| none | 1 | 2.646439 | 3 | |
tests/clap.py | Vykstorm/mowaysim | 0 | 6624472 | <gh_stars>0
import sys, atexit
from time import sleep
from mowaysim import *
print 'Executing ' + __name__ + ' test...'
if __name__ == '__main__':
atexit.register(exit_mow)
channel = 8
moway.usbinit_moway()
ret = moway.init_moway(channel)
if ret == 0:
print 'Moway RFUSB Connected'
else:
print 'Moway RFUSB not connected. Exit'
exit(-1)
moway.set_time(20)
moway.command_moway(CMD_GREENLEDON,0)
while True:
while moway.get_mic() < 40 :
print moway.get_mic()
sleep(0.1)
print moway.get_mic()
moway.command_moway(CMD_GREENLEDOFF,0)
moway.command_moway(CMD_GO)
moway.wait_mot_end(0)
moway.command_moway(CMD_TURN_AROUND,0)
moway.wait_mot_end(0)
moway.command_moway(CMD_GREENLEDON,0)
sleep(0.5)
| import sys, atexit
from time import sleep
from mowaysim import *
print 'Executing ' + __name__ + ' test...'
if __name__ == '__main__':
atexit.register(exit_mow)
channel = 8
moway.usbinit_moway()
ret = moway.init_moway(channel)
if ret == 0:
print 'Moway RFUSB Connected'
else:
print 'Moway RFUSB not connected. Exit'
exit(-1)
moway.set_time(20)
moway.command_moway(CMD_GREENLEDON,0)
while True:
while moway.get_mic() < 40 :
print moway.get_mic()
sleep(0.1)
print moway.get_mic()
moway.command_moway(CMD_GREENLEDOFF,0)
moway.command_moway(CMD_GO)
moway.wait_mot_end(0)
moway.command_moway(CMD_TURN_AROUND,0)
moway.wait_mot_end(0)
moway.command_moway(CMD_GREENLEDON,0)
sleep(0.5) | none | 1 | 2.363906 | 2 | |
_unittests/ut_dnotebooks/test_LONG_2A_notebook_cffi.py | mohamedelkansouli/Ensae_py | 0 | 6624473 | <filename>_unittests/ut_dnotebooks/test_LONG_2A_notebook_cffi.py
# -*- coding: utf-8 -*-
"""
@brief test log(time=180s)
"""
import sys
import os
import unittest
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder
from pyquickhelper.pycode import add_missing_development_version
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
class TestSKIPNotebookRunner2a_cffi (unittest.TestCase):
def setUp(self):
add_missing_development_version(["pymyinstall", "pyensae", "pymmails", "jyquickhelper"],
__file__, hide=True)
def test_notebook_runner_2a_cffi(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
from src.ensae_teaching_cs.automation.notebook_test_helper import ls_notebooks, execute_notebooks, clean_function_1a
temp = get_temp_folder(__file__, "temp_notebook2a_")
keepnote = ls_notebooks("2a")
def filter(i, n):
if "cffi" not in n:
return False
return True
execute_notebooks(temp, keepnote, filter, fLOG=fLOG,
clean_function=clean_function_1a,
dump=src.ensae_teaching_cs)
if __name__ == "__main__":
unittest.main()
| <filename>_unittests/ut_dnotebooks/test_LONG_2A_notebook_cffi.py
# -*- coding: utf-8 -*-
"""
@brief test log(time=180s)
"""
import sys
import os
import unittest
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder
from pyquickhelper.pycode import add_missing_development_version
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
class TestSKIPNotebookRunner2a_cffi (unittest.TestCase):
def setUp(self):
add_missing_development_version(["pymyinstall", "pyensae", "pymmails", "jyquickhelper"],
__file__, hide=True)
def test_notebook_runner_2a_cffi(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
from src.ensae_teaching_cs.automation.notebook_test_helper import ls_notebooks, execute_notebooks, clean_function_1a
temp = get_temp_folder(__file__, "temp_notebook2a_")
keepnote = ls_notebooks("2a")
def filter(i, n):
if "cffi" not in n:
return False
return True
execute_notebooks(temp, keepnote, filter, fLOG=fLOG,
clean_function=clean_function_1a,
dump=src.ensae_teaching_cs)
if __name__ == "__main__":
unittest.main()
| en | 0.800325 | # -*- coding: utf-8 -*- @brief test log(time=180s) | 2.087706 | 2 |
day05b.py | jjhelmus/adventofcode | 5 | 6624474 | from __future__ import print_function
def is_nice(string):
# repeats with exactly one letter between them
if not any([string[i] == string[i+2] for i in range(len(string)-2)]):
return False
# pair appears at least twice
if any([(string.count(string[i:i+2])>=2) for i in range(len(string)-2)]):
return True
return False
test_strings = [
'qjhvhtzxzqqjkmpb',
'xxyxx',
'uurcxstgmygtbstg',
'ieodomkazucvgmuy']
for test_string in test_strings:
print(test_string, ":", is_nice(test_string))
f = open('inputs/input05.txt')
nice_strings = 0
for line in f:
if is_nice(line.strip()):
nice_strings += 1
print("Nice strings:", nice_strings)
f.close()
| from __future__ import print_function
def is_nice(string):
# repeats with exactly one letter between them
if not any([string[i] == string[i+2] for i in range(len(string)-2)]):
return False
# pair appears at least twice
if any([(string.count(string[i:i+2])>=2) for i in range(len(string)-2)]):
return True
return False
test_strings = [
'qjhvhtzxzqqjkmpb',
'xxyxx',
'uurcxstgmygtbstg',
'ieodomkazucvgmuy']
for test_string in test_strings:
print(test_string, ":", is_nice(test_string))
f = open('inputs/input05.txt')
nice_strings = 0
for line in f:
if is_nice(line.strip()):
nice_strings += 1
print("Nice strings:", nice_strings)
f.close()
| en | 0.985873 | # repeats with exactly one letter between them # pair appears at least twice | 3.763869 | 4 |
tests/renderers/httpdomain/test_render_json_schema_description.py | nickswebsite/openapi | 0 | 6624475 | <reponame>nickswebsite/openapi<filename>tests/renderers/httpdomain/test_render_json_schema_description.py
"""OpenAPI spec renderer: render_json_schema_description."""
import textwrap
import pytest
from sphinxcontrib.openapi import renderers
def textify(generator):
return "\n".join(generator)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_root_object(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON object in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
prop_a:
type: string
prop_b:
type: object
properties:
eggs:
type: boolean
prop_c:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop_a:
:{typedirective} prop_a: string
:{directive} prop_b:
:{typedirective} prop_b: object
:{directive} prop_b.eggs:
:{typedirective} prop_b.eggs: boolean
:{directive} prop_c:
:{typedirective} prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjsonarr", "reqjsonarrtype", id="req"),
pytest.param("res", "resjsonarr", "resjsonarrtype", id="res"),
],
)
def test_render_json_schema_description_root_array(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON array in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: array
items:
type: object
properties:
prop:
type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop:
:{typedirective} prop: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
@pytest.mark.parametrize(
["schema_type"],
[
pytest.param("null"),
pytest.param("boolean"),
pytest.param("number"),
pytest.param("string"),
pytest.param("integer"),
],
)
def test_render_json_schema_description_root_unsupported(
testrenderer, oas_fragment, schema_type, req_or_res, directive, typedirective
):
"""JSON schema description is not generated for unsupported type in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
f"""
type: {schema_type}
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
"""\
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_root_any_of_object(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for anyOf JSON object in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
anyOf:
- type: object
properties:
prop_a:
type: string
prop_b:
type: number
- type: object
properties:
prop_c:
type: object
properties:
eggs:
type: boolean
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop_a:
:{typedirective} prop_a: string
:{directive} prop_b:
:{typedirective} prop_b: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjsonarr", "reqjsonarrtype", id="req"),
pytest.param("res", "resjsonarr", "resjsonarrtype", id="res"),
],
)
def test_render_json_schema_description_root_any_of_array(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for anyOf JSON array in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
anyOf:
- type: array
items:
type: object
properties:
prop:
type: string
- type: array
items:
type: object
properties:
prop:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop:
:{typedirective} prop: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
@pytest.mark.parametrize(
["schema_type"],
[
pytest.param("null"),
pytest.param("boolean"),
pytest.param("number"),
pytest.param("string"),
pytest.param("integer"),
],
)
def test_render_json_schema_description_root_any_of_unsupported(
testrenderer, oas_fragment, schema_type, req_or_res, directive, typedirective
):
"""JSON schema description is not generated for anyOf unsupported type in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
f"""
anyOf:
- type: {schema_type}
- type: object
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
"""\
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_root_one_of_object(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for oneOf JSON object in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
oneOf:
- type: object
properties:
prop_a:
type: string
prop_b:
type: number
- type: object
properties:
prop_c:
type: object
properties:
eggs:
type: boolean
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop_a:
:{typedirective} prop_a: string
:{directive} prop_b:
:{typedirective} prop_b: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjsonarr", "reqjsonarrtype", id="req"),
pytest.param("res", "resjsonarr", "resjsonarrtype", id="res"),
],
)
def test_render_json_schema_description_root_one_of_array(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for oneOf JSON array in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
oneOf:
- type: array
items:
type: object
properties:
prop:
type: string
- type: array
items:
type: object
properties:
prop:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop:
:{typedirective} prop: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
@pytest.mark.parametrize(
["schema_type"],
[
pytest.param("null"),
pytest.param("boolean"),
pytest.param("number"),
pytest.param("string"),
pytest.param("integer"),
],
)
def test_render_json_schema_description_root_one_of_unsupported(
testrenderer, oas_fragment, schema_type, req_or_res, directive, typedirective
):
"""JSON schema description is not generated for oneOf unsupported type in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
f"""
oneOf:
- type: {schema_type}
- type: object
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
"""\
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_root_all_of_object(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for allOf in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
allOf:
- properties:
name:
properties:
first:
type: string
age:
type: integer
- properties:
name:
properties:
last:
type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} name:
:{typedirective} name: object
:{directive} name.first:
:{typedirective} name.first: string
:{directive} name.last:
:{typedirective} name.last: string
:{directive} age:
:{typedirective} age: integer
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
@pytest.mark.parametrize(
["schema_type"],
[
pytest.param("null"),
pytest.param("boolean"),
pytest.param("number"),
pytest.param("string"),
pytest.param("integer"),
],
)
def test_render_json_schema_description_primitive(
testrenderer, oas_fragment, schema_type, req_or_res, directive, typedirective
):
"""JSON schema description is generated for primitive types."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
f"""
type: object
properties:
some_key:
type: "{schema_type}"
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: {schema_type}
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_object(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON object."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
type: object
properties:
prop_a:
type: string
prop_b:
type: object
properties:
eggs:
type: boolean
prop_c:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: object
:{directive} root.prop_a:
:{typedirective} root.prop_a: string
:{directive} root.prop_b:
:{typedirective} root.prop_b: object
:{directive} root.prop_b.eggs:
:{typedirective} root.prop_b.eggs: boolean
:{directive} root.prop_c:
:{typedirective} root.prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_object_implicit(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for implicit JSON object."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
properties:
prop_a:
type: string
prop_b:
properties:
eggs:
type: boolean
prop_c:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: object
:{directive} root.prop_a:
:{typedirective} root.prop_a: string
:{directive} root.prop_b:
:{typedirective} root.prop_b: object
:{directive} root.prop_b.eggs:
:{typedirective} root.prop_b.eggs: boolean
:{directive} root.prop_c:
:{typedirective} root.prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_array(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON array."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
type: array
items:
type: object
properties:
prop_a:
type: string
prop_b:
type: array
items:
type: number
prop_c:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root[]:
:{typedirective} root[]: object
:{directive} root[].prop_a:
:{typedirective} root[].prop_a: string
:{directive} root[].prop_b[]:
:{typedirective} root[].prop_b[]: number
:{directive} root[].prop_c:
:{typedirective} root[].prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_array_implicit(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for implicit JSON array."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
items:
type: object
properties:
prop_a:
type: string
prop_b:
items:
type: number
prop_c:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root[]:
:{typedirective} root[]: object
:{directive} root[].prop_a:
:{typedirective} root[].prop_a: string
:{directive} root[].prop_b[]:
:{typedirective} root[].prop_b[]: number
:{directive} root[].prop_c:
:{typedirective} root[].prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_format(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for formatted types."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
created_at:
type: string
format: date-time
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
:{typedirective} created_at: string:date-time
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_deprecated(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated with deprecated marker."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
created_at:
type: string
deprecated: true
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
:{typedirective} created_at: string, deprecated
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_required(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON object w/ required marker."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
type: object
properties:
prop_a:
type: string
prop_b:
type: object
properties:
eggs:
type: boolean
required: [eggs]
prop_c:
type: number
required: [prop_a]
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: object
:{directive} root.prop_a:
:{typedirective} root.prop_a: string, required
:{directive} root.prop_b:
:{typedirective} root.prop_b: object
:{directive} root.prop_b.eggs:
:{typedirective} root.prop_b.eggs: boolean, required
:{directive} root.prop_c:
:{typedirective} root.prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_deprecated_and_required(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON object w/ deprecated & required markers."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
type: object
properties:
prop_a:
type: string
prop_b:
type: object
properties:
eggs:
type: boolean
deprecated: true
required: [eggs]
prop_c:
type: number
required: [prop_a]
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: object
:{directive} root.prop_a:
:{typedirective} root.prop_a: string, required
:{directive} root.prop_b:
:{typedirective} root.prop_b: object
:{directive} root.prop_b.eggs:
:{typedirective} root.prop_b.eggs: boolean, deprecated, required
:{directive} root.prop_c:
:{typedirective} root.prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_description(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated with description."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
description: a resource representation
properties:
created_at:
type: string
description: a resource creation time
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
a resource creation time
:{typedirective} created_at: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_description_commonmark_default(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated with CommonMark description by default."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
description: a resource representation
properties:
created_at:
type: string
description: a `resource` creation __time__
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
a ``resource`` creation **time**
:{typedirective} created_at: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_description_commonmark(
fakestate, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated with CommonMark description."""
testrenderer = renderers.HttpdomainRenderer(fakestate, {"markup": "commonmark"})
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
description: a resource representation
properties:
created_at:
type: string
description: a `resource` creation __time__
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
a ``resource`` creation **time**
:{typedirective} created_at: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_description_restructuredtext(
fakestate, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated with reStructuredText description."""
testrenderer = renderers.HttpdomainRenderer(
fakestate, {"markup": "restructuredtext"}
)
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
description: a resource representation
properties:
created_at:
type: string
description: a `resource` creation __time__
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
a `resource` creation __time__
:{typedirective} created_at: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_any_of(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for anyOf."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
f"""
type: object
properties:
some_key:
anyOf:
- type: integer
- type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: integer
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_one_of(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for oneOf."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
some_key:
oneOf:
- type: integer
- type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: integer
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_all_of(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for allOf."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
person:
allOf:
- properties:
name:
properties:
first:
type: string
age:
type: integer
- properties:
name:
properties:
last:
type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} person:
:{typedirective} person: object
:{directive} person.name:
:{typedirective} person.name: object
:{directive} person.name.first:
:{typedirective} person.name.first: string
:{directive} person.name.last:
:{typedirective} person.name.last: string
:{directive} person.age:
:{typedirective} person.age: integer
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_all_of_logical_impossible(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for allOf that is logical impossible."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
some_key:
allOf:
- type: integer
- type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_any_of_shared_type(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for anyOf w/ shared 'type'."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
some_key:
type: string
anyOf:
- minLength: 3
- maxLength: 5
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_one_of_shared_type(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for oneOf w/ shared 'type'."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
some_key:
type: string
oneOf:
- minLength: 3
- maxLength: 5
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_all_of_shared_type(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for allOf w/ shared 'type'."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
some_key:
type: string
alOf:
- minLength: 3
- maxLength: 5
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_not(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON *not*."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
not:
type: boolean
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_enum(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON enum."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
type: string
enum:
- foo
- bar
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: string:enum
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_enum_wo_type(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON enum wo/ type."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
enum:
- foo
- bar
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: enum
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
@pytest.mark.parametrize(
["schema_type"],
[
pytest.param("null"),
pytest.param("boolean"),
pytest.param("number"),
pytest.param("string"),
pytest.param("integer"),
],
)
def test_render_json_schema_description_with_references(
testrenderer, oas_fragment, schema_type, req_or_res, directive, typedirective
):
schema = oas_fragment(
f"""
definitions:
Foo:
type: "{schema_type}"
"""
)
fragment = oas_fragment(
f"""
type: object
properties:
some_key:
$ref: "#/definitions/Foo"
"""
)
with testrenderer.override_schema(schema):
markup = textify(
testrenderer.render_json_schema_description(
fragment,
req_or_res
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: {schema_type}
"""
).rstrip()
| """OpenAPI spec renderer: render_json_schema_description."""
import textwrap
import pytest
from sphinxcontrib.openapi import renderers
def textify(generator):
return "\n".join(generator)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_root_object(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON object in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
prop_a:
type: string
prop_b:
type: object
properties:
eggs:
type: boolean
prop_c:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop_a:
:{typedirective} prop_a: string
:{directive} prop_b:
:{typedirective} prop_b: object
:{directive} prop_b.eggs:
:{typedirective} prop_b.eggs: boolean
:{directive} prop_c:
:{typedirective} prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjsonarr", "reqjsonarrtype", id="req"),
pytest.param("res", "resjsonarr", "resjsonarrtype", id="res"),
],
)
def test_render_json_schema_description_root_array(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON array in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: array
items:
type: object
properties:
prop:
type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop:
:{typedirective} prop: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
@pytest.mark.parametrize(
["schema_type"],
[
pytest.param("null"),
pytest.param("boolean"),
pytest.param("number"),
pytest.param("string"),
pytest.param("integer"),
],
)
def test_render_json_schema_description_root_unsupported(
testrenderer, oas_fragment, schema_type, req_or_res, directive, typedirective
):
"""JSON schema description is not generated for unsupported type in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
f"""
type: {schema_type}
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
"""\
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_root_any_of_object(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for anyOf JSON object in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
anyOf:
- type: object
properties:
prop_a:
type: string
prop_b:
type: number
- type: object
properties:
prop_c:
type: object
properties:
eggs:
type: boolean
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop_a:
:{typedirective} prop_a: string
:{directive} prop_b:
:{typedirective} prop_b: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjsonarr", "reqjsonarrtype", id="req"),
pytest.param("res", "resjsonarr", "resjsonarrtype", id="res"),
],
)
def test_render_json_schema_description_root_any_of_array(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for anyOf JSON array in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
anyOf:
- type: array
items:
type: object
properties:
prop:
type: string
- type: array
items:
type: object
properties:
prop:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop:
:{typedirective} prop: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
@pytest.mark.parametrize(
["schema_type"],
[
pytest.param("null"),
pytest.param("boolean"),
pytest.param("number"),
pytest.param("string"),
pytest.param("integer"),
],
)
def test_render_json_schema_description_root_any_of_unsupported(
testrenderer, oas_fragment, schema_type, req_or_res, directive, typedirective
):
"""JSON schema description is not generated for anyOf unsupported type in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
f"""
anyOf:
- type: {schema_type}
- type: object
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
"""\
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_root_one_of_object(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for oneOf JSON object in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
oneOf:
- type: object
properties:
prop_a:
type: string
prop_b:
type: number
- type: object
properties:
prop_c:
type: object
properties:
eggs:
type: boolean
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop_a:
:{typedirective} prop_a: string
:{directive} prop_b:
:{typedirective} prop_b: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjsonarr", "reqjsonarrtype", id="req"),
pytest.param("res", "resjsonarr", "resjsonarrtype", id="res"),
],
)
def test_render_json_schema_description_root_one_of_array(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for oneOf JSON array in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
oneOf:
- type: array
items:
type: object
properties:
prop:
type: string
- type: array
items:
type: object
properties:
prop:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} prop:
:{typedirective} prop: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
@pytest.mark.parametrize(
["schema_type"],
[
pytest.param("null"),
pytest.param("boolean"),
pytest.param("number"),
pytest.param("string"),
pytest.param("integer"),
],
)
def test_render_json_schema_description_root_one_of_unsupported(
testrenderer, oas_fragment, schema_type, req_or_res, directive, typedirective
):
"""JSON schema description is not generated for oneOf unsupported type in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
f"""
oneOf:
- type: {schema_type}
- type: object
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
"""\
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_root_all_of_object(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for allOf in root."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
allOf:
- properties:
name:
properties:
first:
type: string
age:
type: integer
- properties:
name:
properties:
last:
type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} name:
:{typedirective} name: object
:{directive} name.first:
:{typedirective} name.first: string
:{directive} name.last:
:{typedirective} name.last: string
:{directive} age:
:{typedirective} age: integer
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
@pytest.mark.parametrize(
["schema_type"],
[
pytest.param("null"),
pytest.param("boolean"),
pytest.param("number"),
pytest.param("string"),
pytest.param("integer"),
],
)
def test_render_json_schema_description_primitive(
testrenderer, oas_fragment, schema_type, req_or_res, directive, typedirective
):
"""JSON schema description is generated for primitive types."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
f"""
type: object
properties:
some_key:
type: "{schema_type}"
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: {schema_type}
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_object(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON object."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
type: object
properties:
prop_a:
type: string
prop_b:
type: object
properties:
eggs:
type: boolean
prop_c:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: object
:{directive} root.prop_a:
:{typedirective} root.prop_a: string
:{directive} root.prop_b:
:{typedirective} root.prop_b: object
:{directive} root.prop_b.eggs:
:{typedirective} root.prop_b.eggs: boolean
:{directive} root.prop_c:
:{typedirective} root.prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_object_implicit(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for implicit JSON object."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
properties:
prop_a:
type: string
prop_b:
properties:
eggs:
type: boolean
prop_c:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: object
:{directive} root.prop_a:
:{typedirective} root.prop_a: string
:{directive} root.prop_b:
:{typedirective} root.prop_b: object
:{directive} root.prop_b.eggs:
:{typedirective} root.prop_b.eggs: boolean
:{directive} root.prop_c:
:{typedirective} root.prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_array(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON array."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
type: array
items:
type: object
properties:
prop_a:
type: string
prop_b:
type: array
items:
type: number
prop_c:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root[]:
:{typedirective} root[]: object
:{directive} root[].prop_a:
:{typedirective} root[].prop_a: string
:{directive} root[].prop_b[]:
:{typedirective} root[].prop_b[]: number
:{directive} root[].prop_c:
:{typedirective} root[].prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_array_implicit(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for implicit JSON array."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
items:
type: object
properties:
prop_a:
type: string
prop_b:
items:
type: number
prop_c:
type: number
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root[]:
:{typedirective} root[]: object
:{directive} root[].prop_a:
:{typedirective} root[].prop_a: string
:{directive} root[].prop_b[]:
:{typedirective} root[].prop_b[]: number
:{directive} root[].prop_c:
:{typedirective} root[].prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_format(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for formatted types."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
created_at:
type: string
format: date-time
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
:{typedirective} created_at: string:date-time
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_deprecated(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated with deprecated marker."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
created_at:
type: string
deprecated: true
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
:{typedirective} created_at: string, deprecated
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_required(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON object w/ required marker."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
type: object
properties:
prop_a:
type: string
prop_b:
type: object
properties:
eggs:
type: boolean
required: [eggs]
prop_c:
type: number
required: [prop_a]
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: object
:{directive} root.prop_a:
:{typedirective} root.prop_a: string, required
:{directive} root.prop_b:
:{typedirective} root.prop_b: object
:{directive} root.prop_b.eggs:
:{typedirective} root.prop_b.eggs: boolean, required
:{directive} root.prop_c:
:{typedirective} root.prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_deprecated_and_required(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON object w/ deprecated & required markers."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
type: object
properties:
prop_a:
type: string
prop_b:
type: object
properties:
eggs:
type: boolean
deprecated: true
required: [eggs]
prop_c:
type: number
required: [prop_a]
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: object
:{directive} root.prop_a:
:{typedirective} root.prop_a: string, required
:{directive} root.prop_b:
:{typedirective} root.prop_b: object
:{directive} root.prop_b.eggs:
:{typedirective} root.prop_b.eggs: boolean, deprecated, required
:{directive} root.prop_c:
:{typedirective} root.prop_c: number
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_description(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated with description."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
description: a resource representation
properties:
created_at:
type: string
description: a resource creation time
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
a resource creation time
:{typedirective} created_at: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_description_commonmark_default(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated with CommonMark description by default."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
description: a resource representation
properties:
created_at:
type: string
description: a `resource` creation __time__
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
a ``resource`` creation **time**
:{typedirective} created_at: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_description_commonmark(
fakestate, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated with CommonMark description."""
testrenderer = renderers.HttpdomainRenderer(fakestate, {"markup": "commonmark"})
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
description: a resource representation
properties:
created_at:
type: string
description: a `resource` creation __time__
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
a ``resource`` creation **time**
:{typedirective} created_at: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_description_restructuredtext(
fakestate, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated with reStructuredText description."""
testrenderer = renderers.HttpdomainRenderer(
fakestate, {"markup": "restructuredtext"}
)
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
description: a resource representation
properties:
created_at:
type: string
description: a `resource` creation __time__
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} created_at:
a `resource` creation __time__
:{typedirective} created_at: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_any_of(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for anyOf."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
f"""
type: object
properties:
some_key:
anyOf:
- type: integer
- type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: integer
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_one_of(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for oneOf."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
some_key:
oneOf:
- type: integer
- type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: integer
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_all_of(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for allOf."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
person:
allOf:
- properties:
name:
properties:
first:
type: string
age:
type: integer
- properties:
name:
properties:
last:
type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} person:
:{typedirective} person: object
:{directive} person.name:
:{typedirective} person.name: object
:{directive} person.name.first:
:{typedirective} person.name.first: string
:{directive} person.name.last:
:{typedirective} person.name.last: string
:{directive} person.age:
:{typedirective} person.age: integer
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_all_of_logical_impossible(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for allOf that is logical impossible."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
some_key:
allOf:
- type: integer
- type: string
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_any_of_shared_type(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for anyOf w/ shared 'type'."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
some_key:
type: string
anyOf:
- minLength: 3
- maxLength: 5
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_one_of_shared_type(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for oneOf w/ shared 'type'."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
some_key:
type: string
oneOf:
- minLength: 3
- maxLength: 5
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_all_of_shared_type(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for allOf w/ shared 'type'."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
some_key:
type: string
alOf:
- minLength: 3
- maxLength: 5
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: string
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_not(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON *not*."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
not:
type: boolean
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_enum(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON enum."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
type: string
enum:
- foo
- bar
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: string:enum
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
def test_render_json_schema_description_enum_wo_type(
testrenderer, oas_fragment, req_or_res, directive, typedirective
):
"""JSON schema description is generated for JSON enum wo/ type."""
markup = textify(
testrenderer.render_json_schema_description(
oas_fragment(
"""
type: object
properties:
root:
enum:
- foo
- bar
"""
),
req_or_res,
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} root:
:{typedirective} root: enum
""".rstrip()
)
@pytest.mark.parametrize(
["req_or_res", "directive", "typedirective"],
[
pytest.param("req", "reqjson", "reqjsonobj", id="req"),
pytest.param("res", "resjson", "resjsonobj", id="res"),
],
)
@pytest.mark.parametrize(
["schema_type"],
[
pytest.param("null"),
pytest.param("boolean"),
pytest.param("number"),
pytest.param("string"),
pytest.param("integer"),
],
)
def test_render_json_schema_description_with_references(
testrenderer, oas_fragment, schema_type, req_or_res, directive, typedirective
):
schema = oas_fragment(
f"""
definitions:
Foo:
type: "{schema_type}"
"""
)
fragment = oas_fragment(
f"""
type: object
properties:
some_key:
$ref: "#/definitions/Foo"
"""
)
with testrenderer.override_schema(schema):
markup = textify(
testrenderer.render_json_schema_description(
fragment,
req_or_res
)
)
assert markup == textwrap.dedent(
f"""\
:{directive} some_key:
:{typedirective} some_key: {schema_type}
"""
).rstrip() | en | 0.498391 | OpenAPI spec renderer: render_json_schema_description. JSON schema description is generated for JSON object in root. type: object properties: prop_a: type: string prop_b: type: object properties: eggs: type: boolean prop_c: type: number \ :{directive} prop_a: :{typedirective} prop_a: string :{directive} prop_b: :{typedirective} prop_b: object :{directive} prop_b.eggs: :{typedirective} prop_b.eggs: boolean :{directive} prop_c: :{typedirective} prop_c: number JSON schema description is generated for JSON array in root. type: array items: type: object properties: prop: type: string \ :{directive} prop: :{typedirective} prop: string JSON schema description is not generated for unsupported type in root. type: {schema_type} \ JSON schema description is generated for anyOf JSON object in root. anyOf: - type: object properties: prop_a: type: string prop_b: type: number - type: object properties: prop_c: type: object properties: eggs: type: boolean \ :{directive} prop_a: :{typedirective} prop_a: string :{directive} prop_b: :{typedirective} prop_b: number JSON schema description is generated for anyOf JSON array in root. anyOf: - type: array items: type: object properties: prop: type: string - type: array items: type: object properties: prop: type: number \ :{directive} prop: :{typedirective} prop: string JSON schema description is not generated for anyOf unsupported type in root. anyOf: - type: {schema_type} - type: object \ JSON schema description is generated for oneOf JSON object in root. oneOf: - type: object properties: prop_a: type: string prop_b: type: number - type: object properties: prop_c: type: object properties: eggs: type: boolean \ :{directive} prop_a: :{typedirective} prop_a: string :{directive} prop_b: :{typedirective} prop_b: number JSON schema description is generated for oneOf JSON array in root. oneOf: - type: array items: type: object properties: prop: type: string - type: array items: type: object properties: prop: type: number \ :{directive} prop: :{typedirective} prop: string JSON schema description is not generated for oneOf unsupported type in root. oneOf: - type: {schema_type} - type: object \ JSON schema description is generated for allOf in root. allOf: - properties: name: properties: first: type: string age: type: integer - properties: name: properties: last: type: string \ :{directive} name: :{typedirective} name: object :{directive} name.first: :{typedirective} name.first: string :{directive} name.last: :{typedirective} name.last: string :{directive} age: :{typedirective} age: integer JSON schema description is generated for primitive types. type: object properties: some_key: type: "{schema_type}" \ :{directive} some_key: :{typedirective} some_key: {schema_type} JSON schema description is generated for JSON object. type: object properties: root: type: object properties: prop_a: type: string prop_b: type: object properties: eggs: type: boolean prop_c: type: number \ :{directive} root: :{typedirective} root: object :{directive} root.prop_a: :{typedirective} root.prop_a: string :{directive} root.prop_b: :{typedirective} root.prop_b: object :{directive} root.prop_b.eggs: :{typedirective} root.prop_b.eggs: boolean :{directive} root.prop_c: :{typedirective} root.prop_c: number JSON schema description is generated for implicit JSON object. type: object properties: root: properties: prop_a: type: string prop_b: properties: eggs: type: boolean prop_c: type: number \ :{directive} root: :{typedirective} root: object :{directive} root.prop_a: :{typedirective} root.prop_a: string :{directive} root.prop_b: :{typedirective} root.prop_b: object :{directive} root.prop_b.eggs: :{typedirective} root.prop_b.eggs: boolean :{directive} root.prop_c: :{typedirective} root.prop_c: number JSON schema description is generated for JSON array. type: object properties: root: type: array items: type: object properties: prop_a: type: string prop_b: type: array items: type: number prop_c: type: number \ :{directive} root[]: :{typedirective} root[]: object :{directive} root[].prop_a: :{typedirective} root[].prop_a: string :{directive} root[].prop_b[]: :{typedirective} root[].prop_b[]: number :{directive} root[].prop_c: :{typedirective} root[].prop_c: number JSON schema description is generated for implicit JSON array. type: object properties: root: items: type: object properties: prop_a: type: string prop_b: items: type: number prop_c: type: number \ :{directive} root[]: :{typedirective} root[]: object :{directive} root[].prop_a: :{typedirective} root[].prop_a: string :{directive} root[].prop_b[]: :{typedirective} root[].prop_b[]: number :{directive} root[].prop_c: :{typedirective} root[].prop_c: number JSON schema description is generated for formatted types. type: object properties: created_at: type: string format: date-time \ :{directive} created_at: :{typedirective} created_at: string:date-time JSON schema description is generated with deprecated marker. type: object properties: created_at: type: string deprecated: true \ :{directive} created_at: :{typedirective} created_at: string, deprecated JSON schema description is generated for JSON object w/ required marker. type: object properties: root: type: object properties: prop_a: type: string prop_b: type: object properties: eggs: type: boolean required: [eggs] prop_c: type: number required: [prop_a] \ :{directive} root: :{typedirective} root: object :{directive} root.prop_a: :{typedirective} root.prop_a: string, required :{directive} root.prop_b: :{typedirective} root.prop_b: object :{directive} root.prop_b.eggs: :{typedirective} root.prop_b.eggs: boolean, required :{directive} root.prop_c: :{typedirective} root.prop_c: number JSON schema description is generated for JSON object w/ deprecated & required markers. type: object properties: root: type: object properties: prop_a: type: string prop_b: type: object properties: eggs: type: boolean deprecated: true required: [eggs] prop_c: type: number required: [prop_a] \ :{directive} root: :{typedirective} root: object :{directive} root.prop_a: :{typedirective} root.prop_a: string, required :{directive} root.prop_b: :{typedirective} root.prop_b: object :{directive} root.prop_b.eggs: :{typedirective} root.prop_b.eggs: boolean, deprecated, required :{directive} root.prop_c: :{typedirective} root.prop_c: number JSON schema description is generated with description. type: object description: a resource representation properties: created_at: type: string description: a resource creation time \ :{directive} created_at: a resource creation time :{typedirective} created_at: string JSON schema description is generated with CommonMark description by default. type: object description: a resource representation properties: created_at: type: string description: a `resource` creation __time__ \ :{directive} created_at: a ``resource`` creation **time** :{typedirective} created_at: string JSON schema description is generated with CommonMark description. type: object description: a resource representation properties: created_at: type: string description: a `resource` creation __time__ \ :{directive} created_at: a ``resource`` creation **time** :{typedirective} created_at: string JSON schema description is generated with reStructuredText description. type: object description: a resource representation properties: created_at: type: string description: a `resource` creation __time__ \ :{directive} created_at: a `resource` creation __time__ :{typedirective} created_at: string JSON schema description is generated for anyOf. type: object properties: some_key: anyOf: - type: integer - type: string \ :{directive} some_key: :{typedirective} some_key: integer JSON schema description is generated for oneOf. type: object properties: some_key: oneOf: - type: integer - type: string \ :{directive} some_key: :{typedirective} some_key: integer JSON schema description is generated for allOf. type: object properties: person: allOf: - properties: name: properties: first: type: string age: type: integer - properties: name: properties: last: type: string \ :{directive} person: :{typedirective} person: object :{directive} person.name: :{typedirective} person.name: object :{directive} person.name.first: :{typedirective} person.name.first: string :{directive} person.name.last: :{typedirective} person.name.last: string :{directive} person.age: :{typedirective} person.age: integer JSON schema description is generated for allOf that is logical impossible. type: object properties: some_key: allOf: - type: integer - type: string \ :{directive} some_key: :{typedirective} some_key: string JSON schema description is generated for anyOf w/ shared 'type'. type: object properties: some_key: type: string anyOf: - minLength: 3 - maxLength: 5 \ :{directive} some_key: :{typedirective} some_key: string JSON schema description is generated for oneOf w/ shared 'type'. type: object properties: some_key: type: string oneOf: - minLength: 3 - maxLength: 5 \ :{directive} some_key: :{typedirective} some_key: string JSON schema description is generated for allOf w/ shared 'type'. type: object properties: some_key: type: string alOf: - minLength: 3 - maxLength: 5 \ :{directive} some_key: :{typedirective} some_key: string JSON schema description is generated for JSON *not*. type: object properties: root: not: type: boolean \ :{directive} root: JSON schema description is generated for JSON enum. type: object properties: root: type: string enum: - foo - bar \ :{directive} root: :{typedirective} root: string:enum JSON schema description is generated for JSON enum wo/ type. type: object properties: root: enum: - foo - bar \ :{directive} root: :{typedirective} root: enum definitions: Foo: type: "{schema_type}" type: object properties: some_key: $ref: "#/definitions/Foo" \ :{directive} some_key: :{typedirective} some_key: {schema_type} | 2.253278 | 2 |
tests/components/test_alert.py | don66/home-assistant | 37 | 6624476 | <gh_stars>10-100
"""The tests for the Alert component."""
# pylint: disable=protected-access
from copy import deepcopy
import unittest
from homeassistant.setup import setup_component
from homeassistant.core import callback
import homeassistant.components.alert as alert
import homeassistant.components.notify as notify
from homeassistant.const import (CONF_ENTITY_ID, STATE_IDLE, CONF_NAME,
CONF_STATE, STATE_ON, STATE_OFF)
from tests.common import get_test_home_assistant
NAME = "alert_test"
DONE_MESSAGE = "alert_gone"
NOTIFIER = 'test'
TEST_CONFIG = \
{alert.DOMAIN: {
NAME: {
CONF_NAME: NAME,
alert.CONF_DONE_MESSAGE: DONE_MESSAGE,
CONF_ENTITY_ID: "sensor.test",
CONF_STATE: STATE_ON,
alert.CONF_REPEAT: 30,
alert.CONF_SKIP_FIRST: False,
alert.CONF_NOTIFIERS: [NOTIFIER]}
}}
TEST_NOACK = [NAME, NAME, DONE_MESSAGE, "sensor.test",
STATE_ON, [30], False, NOTIFIER, False]
ENTITY_ID = alert.ENTITY_ID_FORMAT.format(NAME)
# pylint: disable=invalid-name
class TestAlert(unittest.TestCase):
"""Test the alert module."""
def setUp(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def test_is_on(self):
"""Test is_on method."""
self.hass.states.set(ENTITY_ID, STATE_ON)
self.hass.block_till_done()
self.assertTrue(alert.is_on(self.hass, ENTITY_ID))
self.hass.states.set(ENTITY_ID, STATE_OFF)
self.hass.block_till_done()
self.assertFalse(alert.is_on(self.hass, ENTITY_ID))
def test_setup(self):
"""Test setup method."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.assertEqual(STATE_IDLE, self.hass.states.get(ENTITY_ID).state)
def test_fire(self):
"""Test the alert firing."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(STATE_ON, self.hass.states.get(ENTITY_ID).state)
def test_silence(self):
"""Test silencing the alert."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
alert.turn_off(self.hass, ENTITY_ID)
self.hass.block_till_done()
self.assertEqual(STATE_OFF, self.hass.states.get(ENTITY_ID).state)
# alert should not be silenced on next fire
self.hass.states.set("sensor.test", STATE_OFF)
self.hass.block_till_done()
self.assertEqual(STATE_IDLE, self.hass.states.get(ENTITY_ID).state)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(STATE_ON, self.hass.states.get(ENTITY_ID).state)
def test_reset(self):
"""Test resetting the alert."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
alert.turn_off(self.hass, ENTITY_ID)
self.hass.block_till_done()
self.assertEqual(STATE_OFF, self.hass.states.get(ENTITY_ID).state)
alert.turn_on(self.hass, ENTITY_ID)
self.hass.block_till_done()
self.assertEqual(STATE_ON, self.hass.states.get(ENTITY_ID).state)
def test_toggle(self):
"""Test toggling alert."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(STATE_ON, self.hass.states.get(ENTITY_ID).state)
alert.toggle(self.hass, ENTITY_ID)
self.hass.block_till_done()
self.assertEqual(STATE_OFF, self.hass.states.get(ENTITY_ID).state)
alert.toggle(self.hass, ENTITY_ID)
self.hass.block_till_done()
self.assertEqual(STATE_ON, self.hass.states.get(ENTITY_ID).state)
def test_hidden(self):
"""Test entity hiding."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
hidden = self.hass.states.get(ENTITY_ID).attributes.get('hidden')
self.assertTrue(hidden)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
hidden = self.hass.states.get(ENTITY_ID).attributes.get('hidden')
self.assertFalse(hidden)
alert.turn_off(self.hass, ENTITY_ID)
hidden = self.hass.states.get(ENTITY_ID).attributes.get('hidden')
self.assertFalse(hidden)
def test_notification_no_done_message(self):
"""Test notifications."""
events = []
config = deepcopy(TEST_CONFIG)
del(config[alert.DOMAIN][NAME][alert.CONF_DONE_MESSAGE])
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
self.hass.services.register(
notify.DOMAIN, NOTIFIER, record_event)
assert setup_component(self.hass, alert.DOMAIN, config)
self.assertEqual(0, len(events))
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(1, len(events))
self.hass.states.set("sensor.test", STATE_OFF)
self.hass.block_till_done()
self.assertEqual(1, len(events))
def test_notification(self):
"""Test notifications."""
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
self.hass.services.register(
notify.DOMAIN, NOTIFIER, record_event)
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.assertEqual(0, len(events))
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(1, len(events))
self.hass.states.set("sensor.test", STATE_OFF)
self.hass.block_till_done()
self.assertEqual(2, len(events))
def test_skipfirst(self):
"""Test skipping first notification."""
config = deepcopy(TEST_CONFIG)
config[alert.DOMAIN][NAME][alert.CONF_SKIP_FIRST] = True
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
self.hass.services.register(
notify.DOMAIN, NOTIFIER, record_event)
assert setup_component(self.hass, alert.DOMAIN, config)
self.assertEqual(0, len(events))
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(0, len(events))
def test_noack(self):
"""Test no ack feature."""
entity = alert.Alert(self.hass, *TEST_NOACK)
self.hass.add_job(entity.begin_alerting)
self.hass.block_till_done()
self.assertEqual(True, entity.hidden)
def test_done_message_state_tracker_reset_on_cancel(self):
"""Test that the done message is reset when cancelled."""
entity = alert.Alert(self.hass, *TEST_NOACK)
entity._cancel = lambda *args: None
assert entity._send_done_message is False
entity._send_done_message = True
self.hass.add_job(entity.end_alerting)
self.hass.block_till_done()
assert entity._send_done_message is False
| """The tests for the Alert component."""
# pylint: disable=protected-access
from copy import deepcopy
import unittest
from homeassistant.setup import setup_component
from homeassistant.core import callback
import homeassistant.components.alert as alert
import homeassistant.components.notify as notify
from homeassistant.const import (CONF_ENTITY_ID, STATE_IDLE, CONF_NAME,
CONF_STATE, STATE_ON, STATE_OFF)
from tests.common import get_test_home_assistant
NAME = "alert_test"
DONE_MESSAGE = "alert_gone"
NOTIFIER = 'test'
TEST_CONFIG = \
{alert.DOMAIN: {
NAME: {
CONF_NAME: NAME,
alert.CONF_DONE_MESSAGE: DONE_MESSAGE,
CONF_ENTITY_ID: "sensor.test",
CONF_STATE: STATE_ON,
alert.CONF_REPEAT: 30,
alert.CONF_SKIP_FIRST: False,
alert.CONF_NOTIFIERS: [NOTIFIER]}
}}
TEST_NOACK = [NAME, NAME, DONE_MESSAGE, "sensor.test",
STATE_ON, [30], False, NOTIFIER, False]
ENTITY_ID = alert.ENTITY_ID_FORMAT.format(NAME)
# pylint: disable=invalid-name
class TestAlert(unittest.TestCase):
"""Test the alert module."""
def setUp(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def test_is_on(self):
"""Test is_on method."""
self.hass.states.set(ENTITY_ID, STATE_ON)
self.hass.block_till_done()
self.assertTrue(alert.is_on(self.hass, ENTITY_ID))
self.hass.states.set(ENTITY_ID, STATE_OFF)
self.hass.block_till_done()
self.assertFalse(alert.is_on(self.hass, ENTITY_ID))
def test_setup(self):
"""Test setup method."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.assertEqual(STATE_IDLE, self.hass.states.get(ENTITY_ID).state)
def test_fire(self):
"""Test the alert firing."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(STATE_ON, self.hass.states.get(ENTITY_ID).state)
def test_silence(self):
"""Test silencing the alert."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
alert.turn_off(self.hass, ENTITY_ID)
self.hass.block_till_done()
self.assertEqual(STATE_OFF, self.hass.states.get(ENTITY_ID).state)
# alert should not be silenced on next fire
self.hass.states.set("sensor.test", STATE_OFF)
self.hass.block_till_done()
self.assertEqual(STATE_IDLE, self.hass.states.get(ENTITY_ID).state)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(STATE_ON, self.hass.states.get(ENTITY_ID).state)
def test_reset(self):
"""Test resetting the alert."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
alert.turn_off(self.hass, ENTITY_ID)
self.hass.block_till_done()
self.assertEqual(STATE_OFF, self.hass.states.get(ENTITY_ID).state)
alert.turn_on(self.hass, ENTITY_ID)
self.hass.block_till_done()
self.assertEqual(STATE_ON, self.hass.states.get(ENTITY_ID).state)
def test_toggle(self):
"""Test toggling alert."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(STATE_ON, self.hass.states.get(ENTITY_ID).state)
alert.toggle(self.hass, ENTITY_ID)
self.hass.block_till_done()
self.assertEqual(STATE_OFF, self.hass.states.get(ENTITY_ID).state)
alert.toggle(self.hass, ENTITY_ID)
self.hass.block_till_done()
self.assertEqual(STATE_ON, self.hass.states.get(ENTITY_ID).state)
def test_hidden(self):
"""Test entity hiding."""
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
hidden = self.hass.states.get(ENTITY_ID).attributes.get('hidden')
self.assertTrue(hidden)
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
hidden = self.hass.states.get(ENTITY_ID).attributes.get('hidden')
self.assertFalse(hidden)
alert.turn_off(self.hass, ENTITY_ID)
hidden = self.hass.states.get(ENTITY_ID).attributes.get('hidden')
self.assertFalse(hidden)
def test_notification_no_done_message(self):
"""Test notifications."""
events = []
config = deepcopy(TEST_CONFIG)
del(config[alert.DOMAIN][NAME][alert.CONF_DONE_MESSAGE])
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
self.hass.services.register(
notify.DOMAIN, NOTIFIER, record_event)
assert setup_component(self.hass, alert.DOMAIN, config)
self.assertEqual(0, len(events))
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(1, len(events))
self.hass.states.set("sensor.test", STATE_OFF)
self.hass.block_till_done()
self.assertEqual(1, len(events))
def test_notification(self):
"""Test notifications."""
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
self.hass.services.register(
notify.DOMAIN, NOTIFIER, record_event)
assert setup_component(self.hass, alert.DOMAIN, TEST_CONFIG)
self.assertEqual(0, len(events))
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(1, len(events))
self.hass.states.set("sensor.test", STATE_OFF)
self.hass.block_till_done()
self.assertEqual(2, len(events))
def test_skipfirst(self):
"""Test skipping first notification."""
config = deepcopy(TEST_CONFIG)
config[alert.DOMAIN][NAME][alert.CONF_SKIP_FIRST] = True
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
self.hass.services.register(
notify.DOMAIN, NOTIFIER, record_event)
assert setup_component(self.hass, alert.DOMAIN, config)
self.assertEqual(0, len(events))
self.hass.states.set("sensor.test", STATE_ON)
self.hass.block_till_done()
self.assertEqual(0, len(events))
def test_noack(self):
"""Test no ack feature."""
entity = alert.Alert(self.hass, *TEST_NOACK)
self.hass.add_job(entity.begin_alerting)
self.hass.block_till_done()
self.assertEqual(True, entity.hidden)
def test_done_message_state_tracker_reset_on_cancel(self):
"""Test that the done message is reset when cancelled."""
entity = alert.Alert(self.hass, *TEST_NOACK)
entity._cancel = lambda *args: None
assert entity._send_done_message is False
entity._send_done_message = True
self.hass.add_job(entity.end_alerting)
self.hass.block_till_done()
assert entity._send_done_message is False | en | 0.809073 | The tests for the Alert component. # pylint: disable=protected-access # pylint: disable=invalid-name Test the alert module. Setup things to be run when tests are started. Stop everything that was started. Test is_on method. Test setup method. Test the alert firing. Test silencing the alert. # alert should not be silenced on next fire Test resetting the alert. Test toggling alert. Test entity hiding. Test notifications. Add recorded event to set. Test notifications. Add recorded event to set. Test skipping first notification. Add recorded event to set. Test no ack feature. Test that the done message is reset when cancelled. | 2.42639 | 2 |
samples/coco/coco.py | xman0810/Mask_RCNN | 0 | 6624477 | """
Mask R-CNN
Configurations and data loading code for MS COCO.
Copyright (c) 2017 Matterport, Inc.
Licensed under the MIT License (see LICENSE for details)
Written by <NAME>
------------------------------------------------------------
Usage: import the module (see Jupyter notebooks for examples), or run from
the command line as such:
# Train a new model starting from pre-trained COCO weights
python3 coco.py train --dataset=/path/to/coco/ --model=coco
# Train a new model starting from ImageNet weights. Also auto download COCO dataset
python3 coco.py train --dataset=/path/to/coco/ --model=imagenet --download=True
# Continue training a model that you had trained earlier
python3 coco.py train --dataset=/path/to/coco/ --model=/path/to/weights.h5
# Continue training the last model you trained
python3 coco.py train --dataset=/path/to/coco/ --model=last
# Run COCO evaluatoin on the last model you trained
python3 coco.py evaluate --dataset=/path/to/coco/ --model=last
"""
import os
import sys
import time
import numpy as np
import imgaug # https://github.com/aleju/imgaug (pip3 install imgaug)
# Download and install the Python COCO tools from https://github.com/waleedka/coco
# That's a fork from the original https://github.com/pdollar/coco with a bug
# fix for Python 3.
# I submitted a pull request https://github.com/cocodataset/cocoapi/pull/50
# If the PR is merged then use the original repo.
# Note: Edit PythonAPI/Makefile and replace "python" with "python3".
from pycocotools.coco import COCO
from pycocotools.cocoeval import COCOeval
from pycocotools import mask as maskUtils
import zipfile
import urllib.request
import shutil
# Root directory of the project
ROOT_DIR = os.path.abspath("../../")
# Import Mask RCNN
sys.path.append(ROOT_DIR) # To find local version of the library
from mrcnn.config import Config
from mrcnn import model as modellib, utils
# Path to trained weights file
COCO_MODEL_PATH = os.path.join(ROOT_DIR, "mask_rcnn_coco.h5")
# Directory to save logs and model checkpoints, if not provided
# through the command line argument --logs
DEFAULT_LOGS_DIR = os.path.join(ROOT_DIR, "logs")
DEFAULT_DATASET_YEAR = "2014"
############################################################
# Configurations
############################################################
class CocoConfig(Config):
"""Configuration for training on MS COCO.
Derives from the base Config class and overrides values specific
to the COCO dataset.
"""
# Give the configuration a recognizable name
NAME = "coco"
# We use a GPU with 12GB memory, which can fit two images.
# Adjust down if you use a smaller GPU.
IMAGES_PER_GPU = 2
# Uncomment to train on 8 GPUs (default is 1)
# GPU_COUNT = 8
# Number of classes (including background)
NUM_CLASSES = 1 + 80 # COCO has 80 classes
############################################################
# Dataset
############################################################
class CocoDataset(utils.Dataset):
def load_coco(self, dataset_dir, subset, year=DEFAULT_DATASET_YEAR, class_ids=None,
class_map=None, return_coco=False, auto_download=False):
"""Load a subset of the COCO dataset.
dataset_dir: The root directory of the COCO dataset.
subset: What to load (train, val, minival, valminusminival)
year: What dataset year to load (2014, 2017) as a string, not an integer
class_ids: If provided, only loads images that have the given classes.
class_map: TODO: Not implemented yet. Supports maping classes from
different datasets to the same class ID.
return_coco: If True, returns the COCO object.
auto_download: Automatically download and unzip MS-COCO images and annotations
"""
if auto_download is True:
self.auto_download(dataset_dir, subset, year)
coco = COCO("{}/annotations/instances_{}{}.json".format(dataset_dir, subset, year))
if subset == "minival" or subset == "valminusminival":
subset = "val"
image_dir = "{}/{}{}".format(dataset_dir, subset, year)
# Load all classes or a subset?
if not class_ids:
# All classes
class_ids = sorted(coco.getCatIds())
# All images or a subset?
if class_ids:
image_ids = []
for id in class_ids:
image_ids.extend(list(coco.getImgIds(catIds=[id])))
# Remove duplicates
image_ids = list(set(image_ids))
else:
# All images
image_ids = list(coco.imgs.keys())
# Add classes
for i in class_ids:
self.add_class("coco", i, coco.loadCats(i)[0]["name"])
# Add images
for i in image_ids:
self.add_image(
"coco", image_id=i,
path=os.path.join(image_dir, coco.imgs[i]['file_name']),
width=coco.imgs[i]["width"],
height=coco.imgs[i]["height"],
annotations=coco.loadAnns(coco.getAnnIds(
imgIds=[i], catIds=class_ids, iscrowd=None)))
if return_coco:
return coco
def auto_download(self, dataDir, dataType, dataYear):
"""Download the COCO dataset/annotations if requested.
dataDir: The root directory of the COCO dataset.
dataType: What to load (train, val, minival, valminusminival)
dataYear: What dataset year to load (2014, 2017) as a string, not an integer
Note:
For 2014, use "train", "val", "minival", or "valminusminival"
For 2017, only "train" and "val" annotations are available
"""
# Setup paths and file names
if dataType == "minival" or dataType == "valminusminival":
imgDir = "{}/{}{}".format(dataDir, "val", dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, "val", dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format("val", dataYear)
else:
imgDir = "{}/{}{}".format(dataDir, dataType, dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, dataType, dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format(dataType, dataYear)
# print("Image paths:"); print(imgDir); print(imgZipFile); print(imgURL)
# Create main folder if it doesn't exist yet
if not os.path.exists(dataDir):
os.makedirs(dataDir)
# Download images if not available locally
if not os.path.exists(imgDir):
os.makedirs(imgDir)
print("Downloading images to " + imgZipFile + " ...")
with urllib.request.urlopen(imgURL) as resp, open(imgZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + imgZipFile)
with zipfile.ZipFile(imgZipFile, "r") as zip_ref:
zip_ref.extractall(dataDir)
print("... done unzipping")
print("Will use images in " + imgDir)
# Setup annotations data paths
annDir = "{}/annotations".format(dataDir)
if dataType == "minival":
annZipFile = "{}/instances_minival2014.json.zip".format(dataDir)
annFile = "{}/instances_minival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/o43o90bna78omob/instances_minival2014.json.zip?dl=0"
unZipDir = annDir
elif dataType == "valminusminival":
annZipFile = "{}/instances_valminusminival2014.json.zip".format(dataDir)
annFile = "{}/instances_valminusminival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/s3tw5zcg7395368/instances_valminusminival2014.json.zip?dl=0"
unZipDir = annDir
else:
annZipFile = "{}/annotations_trainval{}.zip".format(dataDir, dataYear)
annFile = "{}/instances_{}{}.json".format(annDir, dataType, dataYear)
annURL = "http://images.cocodataset.org/annotations/annotations_trainval{}.zip".format(dataYear)
unZipDir = dataDir
# print("Annotations paths:"); print(annDir); print(annFile); print(annZipFile); print(annURL)
# Download annotations if not available locally
if not os.path.exists(annDir):
os.makedirs(annDir)
if not os.path.exists(annFile):
if not os.path.exists(annZipFile):
print("Downloading zipped annotations to " + annZipFile + " ...")
with urllib.request.urlopen(annURL) as resp, open(annZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + annZipFile)
with zipfile.ZipFile(annZipFile, "r") as zip_ref:
zip_ref.extractall(unZipDir)
print("... done unzipping")
print("Will use annotations in " + annFile)
def load_mask(self, image_id):
"""Load instance masks for the given image.
Different datasets use different ways to store masks. This
function converts the different mask format to one format
in the form of a bitmap [height, width, instances].
Returns:
masks: A bool array of shape [height, width, instance count] with
one mask per instance.
class_ids: a 1D array of class IDs of the instance masks.
"""
# If not a COCO image, delegate to parent class.
image_info = self.image_info[image_id]
if image_info["source"] != "coco":
return super(CocoDataset, self).load_mask(image_id)
instance_masks = []
class_ids = []
annotations = self.image_info[image_id]["annotations"]
# Build mask of shape [height, width, instance_count] and list
# of class IDs that correspond to each channel of the mask.
for annotation in annotations:
class_id = self.map_source_class_id(
"coco.{}".format(annotation['category_id']))
if class_id:
m = self.annToMask(annotation, image_info["height"],
image_info["width"])
# Some objects are so small that they're less than 1 pixel area
# and end up rounded out. Skip those objects.
if m.max() < 1:
continue
# Is it a crowd? If so, use a negative class ID.
if annotation['iscrowd']:
# Use negative class ID for crowds
class_id *= -1
# For crowd masks, annToMask() sometimes returns a mask
# smaller than the given dimensions. If so, resize it.
if m.shape[0] != image_info["height"] or m.shape[1] != image_info["width"]:
m = np.ones([image_info["height"], image_info["width"]], dtype=bool)
instance_masks.append(m)
class_ids.append(class_id)
# Pack instance masks into an array
if class_ids:
mask = np.stack(instance_masks, axis=2).astype(np.bool)
class_ids = np.array(class_ids, dtype=np.int32)
return mask, class_ids
else:
# Call super class to return an empty mask
return super(CocoDataset, self).load_mask(image_id)
def image_reference(self, image_id):
"""Return a link to the image in the COCO Website."""
info = self.image_info[image_id]
if info["source"] == "coco":
return "http://cocodataset.org/#explore?id={}".format(info["id"])
else:
super(CocoDataset, self).image_reference(image_id)
# The following two functions are from pycocotools with a few changes.
def annToRLE(self, ann, height, width):
"""
Convert annotation which can be polygons, uncompressed RLE to RLE.
:return: binary mask (numpy 2D array)
"""
segm = ann['segmentation']
if isinstance(segm, list):
# polygon -- a single object might consist of multiple parts
# we merge all parts into one mask rle code
rles = maskUtils.frPyObjects(segm, height, width)
rle = maskUtils.merge(rles)
elif isinstance(segm['counts'], list):
# uncompressed RLE
rle = maskUtils.frPyObjects(segm, height, width)
else:
# rle
rle = ann['segmentation']
return rle
def annToMask(self, ann, height, width):
"""
Convert annotation which can be polygons, uncompressed RLE, or RLE to binary mask.
:return: binary mask (numpy 2D array)
"""
rle = self.annToRLE(ann, height, width)
m = maskUtils.decode(rle)
return m
############################################################
# COCO Evaluation
############################################################
def build_coco_results(dataset, image_ids, rois, class_ids, scores, masks):
"""Arrange resutls to match COCO specs in http://cocodataset.org/#format
"""
# If no results, return an empty list
if rois is None:
return []
results = []
for image_id in image_ids:
# Loop through detections
for i in range(rois.shape[0]):
class_id = class_ids[i]
score = scores[i]
bbox = np.around(rois[i], 1)
mask = masks[:, :, i]
result = {
"image_id": image_id,
"category_id": dataset.get_source_class_id(class_id, "coco"),
"bbox": [bbox[1], bbox[0], bbox[3] - bbox[1], bbox[2] - bbox[0]],
"score": score,
"segmentation": maskUtils.encode(np.asfortranarray(mask))
}
results.append(result)
return results
def evaluate_coco(model, dataset, coco, eval_type="bbox", limit=0, image_ids=None):
"""Runs official COCO evaluation.
dataset: A Dataset object with valiadtion data
eval_type: "bbox" or "segm" for bounding box or segmentation evaluation
limit: if not 0, it's the number of images to use for evaluation
"""
# Pick COCO images from the dataset
image_ids = image_ids or dataset.image_ids
# Limit to a subset
if limit:
image_ids = image_ids[:limit]
# Get corresponding COCO image IDs.
coco_image_ids = [dataset.image_info[id]["id"] for id in image_ids]
t_prediction = 0
t_start = time.time()
results = []
for i, image_id in enumerate(image_ids):
# Load image
image = dataset.load_image(image_id)
# Run detection
t = time.time()
r = model.detect([image], verbose=0)[0]
t_prediction += (time.time() - t)
# Convert results to COCO format
# Cast masks to uint8 because COCO tools errors out on bool
image_results = build_coco_results(dataset, coco_image_ids[i:i + 1],
r["rois"], r["class_ids"],
r["scores"],
r["masks"].astype(np.uint8))
results.extend(image_results)
# Load results. This modifies results with additional attributes.
coco_results = coco.loadRes(results)
# Evaluate
cocoEval = COCOeval(coco, coco_results, eval_type)
cocoEval.params.imgIds = coco_image_ids
cocoEval.evaluate()
cocoEval.accumulate()
cocoEval.summarize()
print("Prediction time: {}. Average {}/image".format(
t_prediction, t_prediction / len(image_ids)))
print("Total time: ", time.time() - t_start)
############################################################
# Training
############################################################
if __name__ == '__main__':
import argparse
# Parse command line arguments
parser = argparse.ArgumentParser(
description='Train Mask R-CNN on MS COCO.')
parser.add_argument("command",
metavar="<command>",
help="'train' or 'evaluate' on MS COCO")
parser.add_argument('--dataset', required=True,
metavar="/path/to/coco/",
help='Directory of the MS-COCO dataset')
parser.add_argument('--year', required=False,
default=DEFAULT_DATASET_YEAR,
metavar="<year>",
help='Year of the MS-COCO dataset (2014 or 2017) (default=2014)')
parser.add_argument('--model', required=True,
metavar="/path/to/weights.h5",
help="Path to weights .h5 file or 'coco'")
parser.add_argument('--logs', required=False,
default=DEFAULT_LOGS_DIR,
metavar="/path/to/logs/",
help='Logs and checkpoints directory (default=logs/)')
parser.add_argument('--limit', required=False,
default=500,
metavar="<image count>",
help='Images to use for evaluation (default=500)')
parser.add_argument('--download', required=False,
default=False,
metavar="<True|False>",
help='Automatically download and unzip MS-COCO files (default=False)',
type=bool)
args = parser.parse_args()
print("Command: ", args.command)
print("Model: ", args.model)
print("Dataset: ", args.dataset)
print("Year: ", args.year)
print("Logs: ", args.logs)
print("Auto Download: ", args.download)
# Configurations
if args.command == "train":
config = CocoConfig()
else:
class InferenceConfig(CocoConfig):
# Set batch size to 1 since we'll be running inference on
# one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
GPU_COUNT = 1
IMAGES_PER_GPU = 1
DETECTION_MIN_CONFIDENCE = 0
config = InferenceConfig()
config.display()
# Create model
if args.command == "train":
model = modellib.MaskRCNN(mode="training", config=config,
model_dir=args.logs)
else:
model = modellib.MaskRCNN(mode="inference", config=config,
model_dir=args.logs)
# Select weights file to load
if args.model.lower() == "coco":
model_path = COCO_MODEL_PATH
elif args.model.lower() == "last":
# Find last trained weights
model_path = model.find_last()
elif args.model.lower() == "imagenet":
# Start from ImageNet trained weights
model_path = model.get_imagenet_weights()
else:
model_path = args.model
# Load weights
print("Loading weights ", model_path)
model.load_weights(model_path, by_name=True)
model.keras_model.save("./tmp")
# Train or evaluate
if args.command == "train":
# Training dataset. Use the training set and 35K from the
# validation set, as as in the Mask RCNN paper.
dataset_train = CocoDataset()
dataset_train.load_coco(args.dataset, "train", year=args.year, auto_download=args.download)
if args.year in '2014':
dataset_train.load_coco(args.dataset, "valminusminival", year=args.year, auto_download=args.download)
dataset_train.prepare()
# Validation dataset
dataset_val = CocoDataset()
val_type = "val" if args.year in '2017' else "minival"
dataset_val.load_coco(args.dataset, val_type, year=args.year, auto_download=args.download)
dataset_val.prepare()
# Image Augmentation
# Right/Left flip 50% of the time
augmentation = imgaug.augmenters.Fliplr(0.5)
# *** This training schedule is an example. Update to your needs ***
# Training - Stage 1
print("Training network heads")
model.train(dataset_train, dataset_val,
learning_rate=config.LEARNING_RATE,
epochs=40,
layers='heads',
augmentation=augmentation)
# Training - Stage 2
# Finetune layers from ResNet stage 4 and up
print("Fine tune Resnet stage 4 and up")
model.train(dataset_train, dataset_val,
learning_rate=config.LEARNING_RATE,
epochs=120,
layers='4+',
augmentation=augmentation)
# Training - Stage 3
# Fine tune all layers
print("Fine tune all layers")
model.train(dataset_train, dataset_val,
learning_rate=config.LEARNING_RATE / 10,
epochs=160,
layers='all',
augmentation=augmentation)
elif args.command == "evaluate":
# Validation dataset
dataset_val = CocoDataset()
val_type = "val" if args.year in '2017' else "minival"
coco = dataset_val.load_coco(args.dataset, val_type, year=args.year, return_coco=True, auto_download=args.download)
dataset_val.prepare()
print("Running COCO evaluation on {} images.".format(args.limit))
evaluate_coco(model, dataset_val, coco, "bbox", limit=int(args.limit))
else:
print("'{}' is not recognized. "
"Use 'train' or 'evaluate'".format(args.command))
| """
Mask R-CNN
Configurations and data loading code for MS COCO.
Copyright (c) 2017 Matterport, Inc.
Licensed under the MIT License (see LICENSE for details)
Written by <NAME>
------------------------------------------------------------
Usage: import the module (see Jupyter notebooks for examples), or run from
the command line as such:
# Train a new model starting from pre-trained COCO weights
python3 coco.py train --dataset=/path/to/coco/ --model=coco
# Train a new model starting from ImageNet weights. Also auto download COCO dataset
python3 coco.py train --dataset=/path/to/coco/ --model=imagenet --download=True
# Continue training a model that you had trained earlier
python3 coco.py train --dataset=/path/to/coco/ --model=/path/to/weights.h5
# Continue training the last model you trained
python3 coco.py train --dataset=/path/to/coco/ --model=last
# Run COCO evaluatoin on the last model you trained
python3 coco.py evaluate --dataset=/path/to/coco/ --model=last
"""
import os
import sys
import time
import numpy as np
import imgaug # https://github.com/aleju/imgaug (pip3 install imgaug)
# Download and install the Python COCO tools from https://github.com/waleedka/coco
# That's a fork from the original https://github.com/pdollar/coco with a bug
# fix for Python 3.
# I submitted a pull request https://github.com/cocodataset/cocoapi/pull/50
# If the PR is merged then use the original repo.
# Note: Edit PythonAPI/Makefile and replace "python" with "python3".
from pycocotools.coco import COCO
from pycocotools.cocoeval import COCOeval
from pycocotools import mask as maskUtils
import zipfile
import urllib.request
import shutil
# Root directory of the project
ROOT_DIR = os.path.abspath("../../")
# Import Mask RCNN
sys.path.append(ROOT_DIR) # To find local version of the library
from mrcnn.config import Config
from mrcnn import model as modellib, utils
# Path to trained weights file
COCO_MODEL_PATH = os.path.join(ROOT_DIR, "mask_rcnn_coco.h5")
# Directory to save logs and model checkpoints, if not provided
# through the command line argument --logs
DEFAULT_LOGS_DIR = os.path.join(ROOT_DIR, "logs")
DEFAULT_DATASET_YEAR = "2014"
############################################################
# Configurations
############################################################
class CocoConfig(Config):
"""Configuration for training on MS COCO.
Derives from the base Config class and overrides values specific
to the COCO dataset.
"""
# Give the configuration a recognizable name
NAME = "coco"
# We use a GPU with 12GB memory, which can fit two images.
# Adjust down if you use a smaller GPU.
IMAGES_PER_GPU = 2
# Uncomment to train on 8 GPUs (default is 1)
# GPU_COUNT = 8
# Number of classes (including background)
NUM_CLASSES = 1 + 80 # COCO has 80 classes
############################################################
# Dataset
############################################################
class CocoDataset(utils.Dataset):
def load_coco(self, dataset_dir, subset, year=DEFAULT_DATASET_YEAR, class_ids=None,
class_map=None, return_coco=False, auto_download=False):
"""Load a subset of the COCO dataset.
dataset_dir: The root directory of the COCO dataset.
subset: What to load (train, val, minival, valminusminival)
year: What dataset year to load (2014, 2017) as a string, not an integer
class_ids: If provided, only loads images that have the given classes.
class_map: TODO: Not implemented yet. Supports maping classes from
different datasets to the same class ID.
return_coco: If True, returns the COCO object.
auto_download: Automatically download and unzip MS-COCO images and annotations
"""
if auto_download is True:
self.auto_download(dataset_dir, subset, year)
coco = COCO("{}/annotations/instances_{}{}.json".format(dataset_dir, subset, year))
if subset == "minival" or subset == "valminusminival":
subset = "val"
image_dir = "{}/{}{}".format(dataset_dir, subset, year)
# Load all classes or a subset?
if not class_ids:
# All classes
class_ids = sorted(coco.getCatIds())
# All images or a subset?
if class_ids:
image_ids = []
for id in class_ids:
image_ids.extend(list(coco.getImgIds(catIds=[id])))
# Remove duplicates
image_ids = list(set(image_ids))
else:
# All images
image_ids = list(coco.imgs.keys())
# Add classes
for i in class_ids:
self.add_class("coco", i, coco.loadCats(i)[0]["name"])
# Add images
for i in image_ids:
self.add_image(
"coco", image_id=i,
path=os.path.join(image_dir, coco.imgs[i]['file_name']),
width=coco.imgs[i]["width"],
height=coco.imgs[i]["height"],
annotations=coco.loadAnns(coco.getAnnIds(
imgIds=[i], catIds=class_ids, iscrowd=None)))
if return_coco:
return coco
def auto_download(self, dataDir, dataType, dataYear):
"""Download the COCO dataset/annotations if requested.
dataDir: The root directory of the COCO dataset.
dataType: What to load (train, val, minival, valminusminival)
dataYear: What dataset year to load (2014, 2017) as a string, not an integer
Note:
For 2014, use "train", "val", "minival", or "valminusminival"
For 2017, only "train" and "val" annotations are available
"""
# Setup paths and file names
if dataType == "minival" or dataType == "valminusminival":
imgDir = "{}/{}{}".format(dataDir, "val", dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, "val", dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format("val", dataYear)
else:
imgDir = "{}/{}{}".format(dataDir, dataType, dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, dataType, dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format(dataType, dataYear)
# print("Image paths:"); print(imgDir); print(imgZipFile); print(imgURL)
# Create main folder if it doesn't exist yet
if not os.path.exists(dataDir):
os.makedirs(dataDir)
# Download images if not available locally
if not os.path.exists(imgDir):
os.makedirs(imgDir)
print("Downloading images to " + imgZipFile + " ...")
with urllib.request.urlopen(imgURL) as resp, open(imgZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + imgZipFile)
with zipfile.ZipFile(imgZipFile, "r") as zip_ref:
zip_ref.extractall(dataDir)
print("... done unzipping")
print("Will use images in " + imgDir)
# Setup annotations data paths
annDir = "{}/annotations".format(dataDir)
if dataType == "minival":
annZipFile = "{}/instances_minival2014.json.zip".format(dataDir)
annFile = "{}/instances_minival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/o43o90bna78omob/instances_minival2014.json.zip?dl=0"
unZipDir = annDir
elif dataType == "valminusminival":
annZipFile = "{}/instances_valminusminival2014.json.zip".format(dataDir)
annFile = "{}/instances_valminusminival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/s3tw5zcg7395368/instances_valminusminival2014.json.zip?dl=0"
unZipDir = annDir
else:
annZipFile = "{}/annotations_trainval{}.zip".format(dataDir, dataYear)
annFile = "{}/instances_{}{}.json".format(annDir, dataType, dataYear)
annURL = "http://images.cocodataset.org/annotations/annotations_trainval{}.zip".format(dataYear)
unZipDir = dataDir
# print("Annotations paths:"); print(annDir); print(annFile); print(annZipFile); print(annURL)
# Download annotations if not available locally
if not os.path.exists(annDir):
os.makedirs(annDir)
if not os.path.exists(annFile):
if not os.path.exists(annZipFile):
print("Downloading zipped annotations to " + annZipFile + " ...")
with urllib.request.urlopen(annURL) as resp, open(annZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + annZipFile)
with zipfile.ZipFile(annZipFile, "r") as zip_ref:
zip_ref.extractall(unZipDir)
print("... done unzipping")
print("Will use annotations in " + annFile)
def load_mask(self, image_id):
"""Load instance masks for the given image.
Different datasets use different ways to store masks. This
function converts the different mask format to one format
in the form of a bitmap [height, width, instances].
Returns:
masks: A bool array of shape [height, width, instance count] with
one mask per instance.
class_ids: a 1D array of class IDs of the instance masks.
"""
# If not a COCO image, delegate to parent class.
image_info = self.image_info[image_id]
if image_info["source"] != "coco":
return super(CocoDataset, self).load_mask(image_id)
instance_masks = []
class_ids = []
annotations = self.image_info[image_id]["annotations"]
# Build mask of shape [height, width, instance_count] and list
# of class IDs that correspond to each channel of the mask.
for annotation in annotations:
class_id = self.map_source_class_id(
"coco.{}".format(annotation['category_id']))
if class_id:
m = self.annToMask(annotation, image_info["height"],
image_info["width"])
# Some objects are so small that they're less than 1 pixel area
# and end up rounded out. Skip those objects.
if m.max() < 1:
continue
# Is it a crowd? If so, use a negative class ID.
if annotation['iscrowd']:
# Use negative class ID for crowds
class_id *= -1
# For crowd masks, annToMask() sometimes returns a mask
# smaller than the given dimensions. If so, resize it.
if m.shape[0] != image_info["height"] or m.shape[1] != image_info["width"]:
m = np.ones([image_info["height"], image_info["width"]], dtype=bool)
instance_masks.append(m)
class_ids.append(class_id)
# Pack instance masks into an array
if class_ids:
mask = np.stack(instance_masks, axis=2).astype(np.bool)
class_ids = np.array(class_ids, dtype=np.int32)
return mask, class_ids
else:
# Call super class to return an empty mask
return super(CocoDataset, self).load_mask(image_id)
def image_reference(self, image_id):
"""Return a link to the image in the COCO Website."""
info = self.image_info[image_id]
if info["source"] == "coco":
return "http://cocodataset.org/#explore?id={}".format(info["id"])
else:
super(CocoDataset, self).image_reference(image_id)
# The following two functions are from pycocotools with a few changes.
def annToRLE(self, ann, height, width):
"""
Convert annotation which can be polygons, uncompressed RLE to RLE.
:return: binary mask (numpy 2D array)
"""
segm = ann['segmentation']
if isinstance(segm, list):
# polygon -- a single object might consist of multiple parts
# we merge all parts into one mask rle code
rles = maskUtils.frPyObjects(segm, height, width)
rle = maskUtils.merge(rles)
elif isinstance(segm['counts'], list):
# uncompressed RLE
rle = maskUtils.frPyObjects(segm, height, width)
else:
# rle
rle = ann['segmentation']
return rle
def annToMask(self, ann, height, width):
"""
Convert annotation which can be polygons, uncompressed RLE, or RLE to binary mask.
:return: binary mask (numpy 2D array)
"""
rle = self.annToRLE(ann, height, width)
m = maskUtils.decode(rle)
return m
############################################################
# COCO Evaluation
############################################################
def build_coco_results(dataset, image_ids, rois, class_ids, scores, masks):
"""Arrange resutls to match COCO specs in http://cocodataset.org/#format
"""
# If no results, return an empty list
if rois is None:
return []
results = []
for image_id in image_ids:
# Loop through detections
for i in range(rois.shape[0]):
class_id = class_ids[i]
score = scores[i]
bbox = np.around(rois[i], 1)
mask = masks[:, :, i]
result = {
"image_id": image_id,
"category_id": dataset.get_source_class_id(class_id, "coco"),
"bbox": [bbox[1], bbox[0], bbox[3] - bbox[1], bbox[2] - bbox[0]],
"score": score,
"segmentation": maskUtils.encode(np.asfortranarray(mask))
}
results.append(result)
return results
def evaluate_coco(model, dataset, coco, eval_type="bbox", limit=0, image_ids=None):
"""Runs official COCO evaluation.
dataset: A Dataset object with valiadtion data
eval_type: "bbox" or "segm" for bounding box or segmentation evaluation
limit: if not 0, it's the number of images to use for evaluation
"""
# Pick COCO images from the dataset
image_ids = image_ids or dataset.image_ids
# Limit to a subset
if limit:
image_ids = image_ids[:limit]
# Get corresponding COCO image IDs.
coco_image_ids = [dataset.image_info[id]["id"] for id in image_ids]
t_prediction = 0
t_start = time.time()
results = []
for i, image_id in enumerate(image_ids):
# Load image
image = dataset.load_image(image_id)
# Run detection
t = time.time()
r = model.detect([image], verbose=0)[0]
t_prediction += (time.time() - t)
# Convert results to COCO format
# Cast masks to uint8 because COCO tools errors out on bool
image_results = build_coco_results(dataset, coco_image_ids[i:i + 1],
r["rois"], r["class_ids"],
r["scores"],
r["masks"].astype(np.uint8))
results.extend(image_results)
# Load results. This modifies results with additional attributes.
coco_results = coco.loadRes(results)
# Evaluate
cocoEval = COCOeval(coco, coco_results, eval_type)
cocoEval.params.imgIds = coco_image_ids
cocoEval.evaluate()
cocoEval.accumulate()
cocoEval.summarize()
print("Prediction time: {}. Average {}/image".format(
t_prediction, t_prediction / len(image_ids)))
print("Total time: ", time.time() - t_start)
############################################################
# Training
############################################################
if __name__ == '__main__':
import argparse
# Parse command line arguments
parser = argparse.ArgumentParser(
description='Train Mask R-CNN on MS COCO.')
parser.add_argument("command",
metavar="<command>",
help="'train' or 'evaluate' on MS COCO")
parser.add_argument('--dataset', required=True,
metavar="/path/to/coco/",
help='Directory of the MS-COCO dataset')
parser.add_argument('--year', required=False,
default=DEFAULT_DATASET_YEAR,
metavar="<year>",
help='Year of the MS-COCO dataset (2014 or 2017) (default=2014)')
parser.add_argument('--model', required=True,
metavar="/path/to/weights.h5",
help="Path to weights .h5 file or 'coco'")
parser.add_argument('--logs', required=False,
default=DEFAULT_LOGS_DIR,
metavar="/path/to/logs/",
help='Logs and checkpoints directory (default=logs/)')
parser.add_argument('--limit', required=False,
default=500,
metavar="<image count>",
help='Images to use for evaluation (default=500)')
parser.add_argument('--download', required=False,
default=False,
metavar="<True|False>",
help='Automatically download and unzip MS-COCO files (default=False)',
type=bool)
args = parser.parse_args()
print("Command: ", args.command)
print("Model: ", args.model)
print("Dataset: ", args.dataset)
print("Year: ", args.year)
print("Logs: ", args.logs)
print("Auto Download: ", args.download)
# Configurations
if args.command == "train":
config = CocoConfig()
else:
class InferenceConfig(CocoConfig):
# Set batch size to 1 since we'll be running inference on
# one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
GPU_COUNT = 1
IMAGES_PER_GPU = 1
DETECTION_MIN_CONFIDENCE = 0
config = InferenceConfig()
config.display()
# Create model
if args.command == "train":
model = modellib.MaskRCNN(mode="training", config=config,
model_dir=args.logs)
else:
model = modellib.MaskRCNN(mode="inference", config=config,
model_dir=args.logs)
# Select weights file to load
if args.model.lower() == "coco":
model_path = COCO_MODEL_PATH
elif args.model.lower() == "last":
# Find last trained weights
model_path = model.find_last()
elif args.model.lower() == "imagenet":
# Start from ImageNet trained weights
model_path = model.get_imagenet_weights()
else:
model_path = args.model
# Load weights
print("Loading weights ", model_path)
model.load_weights(model_path, by_name=True)
model.keras_model.save("./tmp")
# Train or evaluate
if args.command == "train":
# Training dataset. Use the training set and 35K from the
# validation set, as as in the Mask RCNN paper.
dataset_train = CocoDataset()
dataset_train.load_coco(args.dataset, "train", year=args.year, auto_download=args.download)
if args.year in '2014':
dataset_train.load_coco(args.dataset, "valminusminival", year=args.year, auto_download=args.download)
dataset_train.prepare()
# Validation dataset
dataset_val = CocoDataset()
val_type = "val" if args.year in '2017' else "minival"
dataset_val.load_coco(args.dataset, val_type, year=args.year, auto_download=args.download)
dataset_val.prepare()
# Image Augmentation
# Right/Left flip 50% of the time
augmentation = imgaug.augmenters.Fliplr(0.5)
# *** This training schedule is an example. Update to your needs ***
# Training - Stage 1
print("Training network heads")
model.train(dataset_train, dataset_val,
learning_rate=config.LEARNING_RATE,
epochs=40,
layers='heads',
augmentation=augmentation)
# Training - Stage 2
# Finetune layers from ResNet stage 4 and up
print("Fine tune Resnet stage 4 and up")
model.train(dataset_train, dataset_val,
learning_rate=config.LEARNING_RATE,
epochs=120,
layers='4+',
augmentation=augmentation)
# Training - Stage 3
# Fine tune all layers
print("Fine tune all layers")
model.train(dataset_train, dataset_val,
learning_rate=config.LEARNING_RATE / 10,
epochs=160,
layers='all',
augmentation=augmentation)
elif args.command == "evaluate":
# Validation dataset
dataset_val = CocoDataset()
val_type = "val" if args.year in '2017' else "minival"
coco = dataset_val.load_coco(args.dataset, val_type, year=args.year, return_coco=True, auto_download=args.download)
dataset_val.prepare()
print("Running COCO evaluation on {} images.".format(args.limit))
evaluate_coco(model, dataset_val, coco, "bbox", limit=int(args.limit))
else:
print("'{}' is not recognized. "
"Use 'train' or 'evaluate'".format(args.command))
| en | 0.659127 | Mask R-CNN Configurations and data loading code for MS COCO. Copyright (c) 2017 Matterport, Inc. Licensed under the MIT License (see LICENSE for details) Written by <NAME> ------------------------------------------------------------ Usage: import the module (see Jupyter notebooks for examples), or run from the command line as such: # Train a new model starting from pre-trained COCO weights python3 coco.py train --dataset=/path/to/coco/ --model=coco # Train a new model starting from ImageNet weights. Also auto download COCO dataset python3 coco.py train --dataset=/path/to/coco/ --model=imagenet --download=True # Continue training a model that you had trained earlier python3 coco.py train --dataset=/path/to/coco/ --model=/path/to/weights.h5 # Continue training the last model you trained python3 coco.py train --dataset=/path/to/coco/ --model=last # Run COCO evaluatoin on the last model you trained python3 coco.py evaluate --dataset=/path/to/coco/ --model=last # https://github.com/aleju/imgaug (pip3 install imgaug) # Download and install the Python COCO tools from https://github.com/waleedka/coco # That's a fork from the original https://github.com/pdollar/coco with a bug # fix for Python 3. # I submitted a pull request https://github.com/cocodataset/cocoapi/pull/50 # If the PR is merged then use the original repo. # Note: Edit PythonAPI/Makefile and replace "python" with "python3". # Root directory of the project # Import Mask RCNN # To find local version of the library # Path to trained weights file # Directory to save logs and model checkpoints, if not provided # through the command line argument --logs ############################################################ # Configurations ############################################################ Configuration for training on MS COCO. Derives from the base Config class and overrides values specific to the COCO dataset. # Give the configuration a recognizable name # We use a GPU with 12GB memory, which can fit two images. # Adjust down if you use a smaller GPU. # Uncomment to train on 8 GPUs (default is 1) # GPU_COUNT = 8 # Number of classes (including background) # COCO has 80 classes ############################################################ # Dataset ############################################################ Load a subset of the COCO dataset. dataset_dir: The root directory of the COCO dataset. subset: What to load (train, val, minival, valminusminival) year: What dataset year to load (2014, 2017) as a string, not an integer class_ids: If provided, only loads images that have the given classes. class_map: TODO: Not implemented yet. Supports maping classes from different datasets to the same class ID. return_coco: If True, returns the COCO object. auto_download: Automatically download and unzip MS-COCO images and annotations # Load all classes or a subset? # All classes # All images or a subset? # Remove duplicates # All images # Add classes # Add images Download the COCO dataset/annotations if requested. dataDir: The root directory of the COCO dataset. dataType: What to load (train, val, minival, valminusminival) dataYear: What dataset year to load (2014, 2017) as a string, not an integer Note: For 2014, use "train", "val", "minival", or "valminusminival" For 2017, only "train" and "val" annotations are available # Setup paths and file names # print("Image paths:"); print(imgDir); print(imgZipFile); print(imgURL) # Create main folder if it doesn't exist yet # Download images if not available locally # Setup annotations data paths # print("Annotations paths:"); print(annDir); print(annFile); print(annZipFile); print(annURL) # Download annotations if not available locally Load instance masks for the given image. Different datasets use different ways to store masks. This function converts the different mask format to one format in the form of a bitmap [height, width, instances]. Returns: masks: A bool array of shape [height, width, instance count] with one mask per instance. class_ids: a 1D array of class IDs of the instance masks. # If not a COCO image, delegate to parent class. # Build mask of shape [height, width, instance_count] and list # of class IDs that correspond to each channel of the mask. # Some objects are so small that they're less than 1 pixel area # and end up rounded out. Skip those objects. # Is it a crowd? If so, use a negative class ID. # Use negative class ID for crowds # For crowd masks, annToMask() sometimes returns a mask # smaller than the given dimensions. If so, resize it. # Pack instance masks into an array # Call super class to return an empty mask Return a link to the image in the COCO Website. #explore?id={}".format(info["id"]) # The following two functions are from pycocotools with a few changes. Convert annotation which can be polygons, uncompressed RLE to RLE. :return: binary mask (numpy 2D array) # polygon -- a single object might consist of multiple parts # we merge all parts into one mask rle code # uncompressed RLE # rle Convert annotation which can be polygons, uncompressed RLE, or RLE to binary mask. :return: binary mask (numpy 2D array) ############################################################ # COCO Evaluation ############################################################ Arrange resutls to match COCO specs in http://cocodataset.org/#format # If no results, return an empty list # Loop through detections Runs official COCO evaluation. dataset: A Dataset object with valiadtion data eval_type: "bbox" or "segm" for bounding box or segmentation evaluation limit: if not 0, it's the number of images to use for evaluation # Pick COCO images from the dataset # Limit to a subset # Get corresponding COCO image IDs. # Load image # Run detection # Convert results to COCO format # Cast masks to uint8 because COCO tools errors out on bool # Load results. This modifies results with additional attributes. # Evaluate ############################################################ # Training ############################################################ # Parse command line arguments # Configurations # Set batch size to 1 since we'll be running inference on # one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU # Create model # Select weights file to load # Find last trained weights # Start from ImageNet trained weights # Load weights # Train or evaluate # Training dataset. Use the training set and 35K from the # validation set, as as in the Mask RCNN paper. # Validation dataset # Image Augmentation # Right/Left flip 50% of the time # *** This training schedule is an example. Update to your needs *** # Training - Stage 1 # Training - Stage 2 # Finetune layers from ResNet stage 4 and up # Training - Stage 3 # Fine tune all layers # Validation dataset | 2.262744 | 2 |
src/simple_tools/p4a_term.py | DVSR1966/par4all | 51 | 6624478 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Set ANSI Terminal Color and Attributes.
Originally found on http://code.activestate.com/recipes/574451.
'''
import os
# Set to True to disable module (coloring etc.).
disabled = False
esc = '%s[' % chr(27)
reset = '%s0m' % esc
format = '1;%dm'
fgoffset, bgoffset = 30, 40
attrs = { 'none': 0, 'bold': 1, 'faint': 2, 'italic': 3, 'underline': 4, 'blink': 5, 'fast': 6, 'reverse': 7, 'concealed': 8 }
colors = { 'grey': 0, 'red': 1, 'green': 2, 'yellow': 3, 'blue': 4, 'magenta': 5, 'cyan': 6, 'white': 7 }
def escape(arg = '', sep = ' ', end = '\n', if_tty_fd = -1):
'''
"arg" is a string or None
If "arg" is None : the terminal is reset to his default values.
If "arg" is a string it must contain "sep" separated values.
If args are found in globals "attrs" or "colors", or start with "@"
they are interpreted as ANSI commands else they are output as text.
colors, if any, must be first (foreground first then background)
you can not specify a background color alone ;
if you specify only one color, it will be the foreground one.
@* commands handle the screen and the cursor :
@x;y : go to xy
@ : go to 1;1
@@ : clear screen and go to 1;1
Examples:
escape('red') : set red as the foreground color
escape('red blue') : red on blue
escape('red blink') : blinking red
escape() : restore terminal default values
escape('reverse') : swap default colors
escape('cyan blue reverse') : blue on cyan <=> escape('blue cyan')
escape('red reverse') : a way to set up the background only
escape('red reverse blink') : you can specify any combinaison of
attributes in any order with or without colors
escape('blink Python') : output a blinking 'Python'
escape('@@ hello') : clear the screen and print 'hello' at 1;1
'''
global disabled, esc, reset, format, fgoffset, bgoffset, attrs, colors
# If we are disabled or if destination stream fd is not a TTY,
# return an empty string.
if disabled or (if_tty_fd != -1 and not os.isatty(if_tty_fd)):
return ""
cmd, txt = [reset], []
if arg:
arglist = arg.split(sep)
for offset in (fgoffset, bgoffset):
if arglist and arglist[0] in colors:
cmd.append(format % (colors[arglist.pop(0)] + offset))
for a in arglist:
c = None
if a in attrs:
c = format % attrs[a]
if c and c not in cmd:
cmd.append(c)
else:
if a.startswith('@'):
a = a[1:]
if a == '@':
cmd.append('2J')
cmd.append('H')
else:
cmd.append('%sH' % a)
else:
txt.append(a)
if txt and end:
txt[-1] += end
return esc.join(cmd) + sep.join(txt)
if __name__ == "__main__":
print(__doc__)
print("This module is not directly executable")
# Some Emacs stuff:
### Local Variables:
### mode: python
### mode: flyspell
### ispell-local-dictionary: "american"
### tab-width: 4
### End:
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Set ANSI Terminal Color and Attributes.
Originally found on http://code.activestate.com/recipes/574451.
'''
import os
# Set to True to disable module (coloring etc.).
disabled = False
esc = '%s[' % chr(27)
reset = '%s0m' % esc
format = '1;%dm'
fgoffset, bgoffset = 30, 40
attrs = { 'none': 0, 'bold': 1, 'faint': 2, 'italic': 3, 'underline': 4, 'blink': 5, 'fast': 6, 'reverse': 7, 'concealed': 8 }
colors = { 'grey': 0, 'red': 1, 'green': 2, 'yellow': 3, 'blue': 4, 'magenta': 5, 'cyan': 6, 'white': 7 }
def escape(arg = '', sep = ' ', end = '\n', if_tty_fd = -1):
'''
"arg" is a string or None
If "arg" is None : the terminal is reset to his default values.
If "arg" is a string it must contain "sep" separated values.
If args are found in globals "attrs" or "colors", or start with "@"
they are interpreted as ANSI commands else they are output as text.
colors, if any, must be first (foreground first then background)
you can not specify a background color alone ;
if you specify only one color, it will be the foreground one.
@* commands handle the screen and the cursor :
@x;y : go to xy
@ : go to 1;1
@@ : clear screen and go to 1;1
Examples:
escape('red') : set red as the foreground color
escape('red blue') : red on blue
escape('red blink') : blinking red
escape() : restore terminal default values
escape('reverse') : swap default colors
escape('cyan blue reverse') : blue on cyan <=> escape('blue cyan')
escape('red reverse') : a way to set up the background only
escape('red reverse blink') : you can specify any combinaison of
attributes in any order with or without colors
escape('blink Python') : output a blinking 'Python'
escape('@@ hello') : clear the screen and print 'hello' at 1;1
'''
global disabled, esc, reset, format, fgoffset, bgoffset, attrs, colors
# If we are disabled or if destination stream fd is not a TTY,
# return an empty string.
if disabled or (if_tty_fd != -1 and not os.isatty(if_tty_fd)):
return ""
cmd, txt = [reset], []
if arg:
arglist = arg.split(sep)
for offset in (fgoffset, bgoffset):
if arglist and arglist[0] in colors:
cmd.append(format % (colors[arglist.pop(0)] + offset))
for a in arglist:
c = None
if a in attrs:
c = format % attrs[a]
if c and c not in cmd:
cmd.append(c)
else:
if a.startswith('@'):
a = a[1:]
if a == '@':
cmd.append('2J')
cmd.append('H')
else:
cmd.append('%sH' % a)
else:
txt.append(a)
if txt and end:
txt[-1] += end
return esc.join(cmd) + sep.join(txt)
if __name__ == "__main__":
print(__doc__)
print("This module is not directly executable")
# Some Emacs stuff:
### Local Variables:
### mode: python
### mode: flyspell
### ispell-local-dictionary: "american"
### tab-width: 4
### End:
| en | 0.589847 | #!/usr/bin/env python # -*- coding: utf-8 -*- Set ANSI Terminal Color and Attributes. Originally found on http://code.activestate.com/recipes/574451. # Set to True to disable module (coloring etc.). "arg" is a string or None If "arg" is None : the terminal is reset to his default values. If "arg" is a string it must contain "sep" separated values. If args are found in globals "attrs" or "colors", or start with "@" they are interpreted as ANSI commands else they are output as text. colors, if any, must be first (foreground first then background) you can not specify a background color alone ; if you specify only one color, it will be the foreground one. @* commands handle the screen and the cursor : @x;y : go to xy @ : go to 1;1 @@ : clear screen and go to 1;1 Examples: escape('red') : set red as the foreground color escape('red blue') : red on blue escape('red blink') : blinking red escape() : restore terminal default values escape('reverse') : swap default colors escape('cyan blue reverse') : blue on cyan <=> escape('blue cyan') escape('red reverse') : a way to set up the background only escape('red reverse blink') : you can specify any combinaison of attributes in any order with or without colors escape('blink Python') : output a blinking 'Python' escape('@@ hello') : clear the screen and print 'hello' at 1;1 # If we are disabled or if destination stream fd is not a TTY, # return an empty string. # Some Emacs stuff: ### Local Variables: ### mode: python ### mode: flyspell ### ispell-local-dictionary: "american" ### tab-width: 4 ### End: | 3.804882 | 4 |
docker/dstat/plugins/dstat_fan.py | hzy9819/GreenPlum_WooKongDB | 34 | 6624479 | ### Author: <NAME> <<EMAIL>>
class dstat_plugin(dstat):
"""
Fan speed in RPM (rotations per minute) as reported by ACPI.
"""
def __init__(self):
self.name = 'fan'
self.type = 'd'
self.width = 4
self.scale = 500
self.open('/proc/acpi/ibm/fan')
def vars(self):
ret = None
for l in self.splitlines():
if l[0] == 'speed:':
ret = ('speed',)
return ret
def check(self):
if not os.path.exists('/proc/acpi/ibm/fan'):
raise Exception('Needs kernel IBM-ACPI support')
def extract(self):
if os.path.exists('/proc/acpi/ibm/fan'):
for l in self.splitlines():
if l[0] == 'speed:':
self.val['speed'] = int(l[1])
# vim:ts=4:sw=4:et
| ### Author: <NAME> <<EMAIL>>
class dstat_plugin(dstat):
"""
Fan speed in RPM (rotations per minute) as reported by ACPI.
"""
def __init__(self):
self.name = 'fan'
self.type = 'd'
self.width = 4
self.scale = 500
self.open('/proc/acpi/ibm/fan')
def vars(self):
ret = None
for l in self.splitlines():
if l[0] == 'speed:':
ret = ('speed',)
return ret
def check(self):
if not os.path.exists('/proc/acpi/ibm/fan'):
raise Exception('Needs kernel IBM-ACPI support')
def extract(self):
if os.path.exists('/proc/acpi/ibm/fan'):
for l in self.splitlines():
if l[0] == 'speed:':
self.val['speed'] = int(l[1])
# vim:ts=4:sw=4:et
| en | 0.694383 | ### Author: <NAME> <<EMAIL>> Fan speed in RPM (rotations per minute) as reported by ACPI. # vim:ts=4:sw=4:et | 2.578612 | 3 |
mods-config/python/utils/config_compose.py | enckse/freeradius | 9 | 6624480 | <filename>mods-config/python/utils/config_compose.py
#!/usr/bin/python
"""composes the config from user definitions."""
import json
import argparse
import os
import users
import users.__config__
import wrapper
import importlib
import csv
# file indicators
IND_DELIM = "_"
USER_INDICATOR = "user" + IND_DELIM
VLAN_INDICATOR = "vlan" + IND_DELIM
class ConfigMeta(object):
"""configuration meta information."""
def __init__(self):
"""init the instance."""
self.passwords = []
self.macs = []
self.bypasses = []
self.vlans = []
self.all_vlans = []
self.user_name = []
self.vlan_users = []
self.attrs = []
self.vlan_initiate = []
def password(self, password):
"""password group validation(s)."""
if password in self.passwords:
print("password duplicated")
exit(-1)
self.passwords.append(password)
def bypassed(self, macs):
"""bypass management."""
for mac in macs:
if mac in self.bypasses:
print("already bypassed")
exit(-1)
self.bypasses.append(mac)
def user_macs(self, macs):
"""user+mac combos."""
self.macs = self.macs + macs
self.macs = list(set(self.macs))
def attributes(self, attrs):
"""set attributes."""
self.attrs = self.attrs + attrs
self.attrs = list(set(self.attrs))
def verify(self):
"""verify meta data."""
for mac in self.macs:
if mac in self.bypasses:
print("mac is globally bypassed: " + mac)
exit(-1)
for mac in self.bypasses:
if mac in self.macs:
print("mac is user assigned: " + mac)
exit(-1)
used_vlans = set(self.vlans + self.vlan_initiate)
if len(used_vlans) != len(set(self.all_vlans)):
print("unused vlans detected")
exit(-1)
for ref in used_vlans:
if ref not in self.all_vlans:
print("reference to unknown vlan: " + ref)
exit(-1)
def vlan_user(self, vlan, user):
"""indicate a vlan was used."""
self.vlans.append(vlan)
self.vlan_users.append(vlan + "." + user)
self.user_name.append(user)
def vlan_to_vlan(self, vlan_to):
"""VLAN to VLAN mappings."""
self.vlan_initiate.append(vlan_to)
def _create_obj(macs, password, attrs, port_bypassed, wildcards):
"""create a user definition."""
return {wrapper.freepydius.MAC_KEY: macs,
wrapper.freepydius.PASS_KEY: password,
wrapper.freepydius.ATTR_KEY: attrs,
wrapper.freepydius.PORT_BYPASS_KEY: port_bypassed,
wrapper.freepydius.WILDCARD_KEY: wildcards}
def _get_mod(name):
"""import the module dynamically."""
return importlib.import_module("users." + name)
def _load_objs(name, typed):
mod = _get_mod(name)
for key in dir(mod):
obj = getattr(mod, key)
if not isinstance(obj, typed):
continue
yield obj
def _get_by_indicator(indicator):
"""get by a file type indicator."""
return [x for x in sorted(users.__all__) if x.startswith(indicator)]
def _common_call(common, method, entity):
"""make a common mod call."""
obj = entity
if common is not None and method in dir(common):
call = getattr(common, method)
if call is not None:
obj = call(obj)
return obj
def check_object(obj):
"""Check an object."""
return obj.check(wrapper)
def _process(output, audit):
"""process the composition of users."""
common_mod = None
try:
common_mod = _get_mod("common")
print("loaded common definitions...")
except Exception as e:
print("defaults only...")
user_objs = {}
vlans = None
bypass_objs = {}
meta = ConfigMeta()
for v_name in _get_by_indicator(VLAN_INDICATOR):
print("loading vlan..." + v_name)
for obj in _load_objs(v_name, users.__config__.VLAN):
if vlans is None:
vlans = {}
if not check_object(obj):
exit(-1)
num_str = str(obj.num)
for vk in vlans.keys():
if num_str == vlans[vk]:
print("vlan number defined multiple times...")
exit(-1)
vlans[obj.name] = num_str
if obj.initiate is not None and len(obj.initiate) > 0:
for init_to in obj.initiate:
meta.vlan_to_vlan(init_to)
if vlans is None:
raise Exception("missing required config settings...")
meta.all_vlans = vlans.keys()
vlans_with_users = {}
user_macs = {}
for f_name in _get_by_indicator(USER_INDICATOR):
print("composing..." + f_name)
for obj in _load_objs(f_name, users.__config__.Assignment):
obj = _common_call(common_mod, 'ready', obj)
key = f_name.replace(USER_INDICATOR, "")
if not key.isalnum():
print("does not meet naming requirements...")
exit(-1)
vlan = obj.vlan
if vlan not in vlans:
raise Exception("no vlan defined for " + key)
vlans_with_users[vlan] = vlans[vlan]
meta.vlan_user(vlan, key)
fqdn = vlan + "." + key
if not check_object(obj):
print("did not pass check...")
exit(-1)
if obj.disabled:
print("account is disabled or has expired...")
continue
macs = sorted(obj.macs)
password = <PASSWORD>
bypass = sorted(obj.bypass)
port_bypassed = sorted(obj.port_bypass)
wildcards = sorted(obj.wildcard)
attrs = []
if obj.attrs:
attrs = sorted(obj.attrs)
meta.attributes(attrs)
# meta checks
meta.user_macs(macs)
if not obj.inherits:
meta.password(password)
meta.bypassed(bypass)
if fqdn in user_objs:
raise Exception(fqdn + " previously defined")
# use config definitions here
if not obj.no_login:
user_objs[fqdn] = _create_obj(macs,
password,
attrs,
port_bypassed,
wildcards)
if bypass is not None and len(bypass) > 0:
for mac_bypass in bypass:
if mac_bypass in bypass_objs:
raise Exception(mac_bypass + " previously defined")
bypass_objs[mac_bypass] = vlan
user_all = []
for l in [obj.macs, obj.port_bypass, obj.bypass]:
user_all += list(l)
if key not in user_macs:
user_macs[key] = []
user_macs[key].append((vlan, sorted(set(user_all))))
meta.verify()
full = {}
full[wrapper.freepydius.USER_KEY] = user_objs
full[wrapper.freepydius.VLAN_KEY] = vlans_with_users
full[wrapper.freepydius.BYPASS_KEY] = bypass_objs
with open(output, 'w') as f:
f.write(json.dumps(full, sort_keys=True,
indent=4, separators=[",", ": "]))
with open(audit, 'w') as f:
csv_writer = csv.writer(f, lineterminator=os.linesep)
for u in user_macs:
for obj in user_macs[u]:
vlan = obj[0]
macs = obj[1]
for m in macs:
csv_writer.writerow([u, vlan, m])
def main():
"""main entry."""
success = False
try:
parser = argparse.ArgumentParser()
parser.add_argument("--output", type=str, required=True)
parser.add_argument("--audit", type=str, required=True)
args = parser.parse_args()
_process(args.output, args.audit)
success = True
except Exception as e:
print('unable to compose')
print(str(e))
if success:
print("success")
exit(0)
else:
print("failure")
exit(1)
if __name__ == "__main__":
main()
| <filename>mods-config/python/utils/config_compose.py
#!/usr/bin/python
"""composes the config from user definitions."""
import json
import argparse
import os
import users
import users.__config__
import wrapper
import importlib
import csv
# file indicators
IND_DELIM = "_"
USER_INDICATOR = "user" + IND_DELIM
VLAN_INDICATOR = "vlan" + IND_DELIM
class ConfigMeta(object):
"""configuration meta information."""
def __init__(self):
"""init the instance."""
self.passwords = []
self.macs = []
self.bypasses = []
self.vlans = []
self.all_vlans = []
self.user_name = []
self.vlan_users = []
self.attrs = []
self.vlan_initiate = []
def password(self, password):
"""password group validation(s)."""
if password in self.passwords:
print("password duplicated")
exit(-1)
self.passwords.append(password)
def bypassed(self, macs):
"""bypass management."""
for mac in macs:
if mac in self.bypasses:
print("already bypassed")
exit(-1)
self.bypasses.append(mac)
def user_macs(self, macs):
"""user+mac combos."""
self.macs = self.macs + macs
self.macs = list(set(self.macs))
def attributes(self, attrs):
"""set attributes."""
self.attrs = self.attrs + attrs
self.attrs = list(set(self.attrs))
def verify(self):
"""verify meta data."""
for mac in self.macs:
if mac in self.bypasses:
print("mac is globally bypassed: " + mac)
exit(-1)
for mac in self.bypasses:
if mac in self.macs:
print("mac is user assigned: " + mac)
exit(-1)
used_vlans = set(self.vlans + self.vlan_initiate)
if len(used_vlans) != len(set(self.all_vlans)):
print("unused vlans detected")
exit(-1)
for ref in used_vlans:
if ref not in self.all_vlans:
print("reference to unknown vlan: " + ref)
exit(-1)
def vlan_user(self, vlan, user):
"""indicate a vlan was used."""
self.vlans.append(vlan)
self.vlan_users.append(vlan + "." + user)
self.user_name.append(user)
def vlan_to_vlan(self, vlan_to):
"""VLAN to VLAN mappings."""
self.vlan_initiate.append(vlan_to)
def _create_obj(macs, password, attrs, port_bypassed, wildcards):
"""create a user definition."""
return {wrapper.freepydius.MAC_KEY: macs,
wrapper.freepydius.PASS_KEY: password,
wrapper.freepydius.ATTR_KEY: attrs,
wrapper.freepydius.PORT_BYPASS_KEY: port_bypassed,
wrapper.freepydius.WILDCARD_KEY: wildcards}
def _get_mod(name):
"""import the module dynamically."""
return importlib.import_module("users." + name)
def _load_objs(name, typed):
mod = _get_mod(name)
for key in dir(mod):
obj = getattr(mod, key)
if not isinstance(obj, typed):
continue
yield obj
def _get_by_indicator(indicator):
"""get by a file type indicator."""
return [x for x in sorted(users.__all__) if x.startswith(indicator)]
def _common_call(common, method, entity):
"""make a common mod call."""
obj = entity
if common is not None and method in dir(common):
call = getattr(common, method)
if call is not None:
obj = call(obj)
return obj
def check_object(obj):
"""Check an object."""
return obj.check(wrapper)
def _process(output, audit):
"""process the composition of users."""
common_mod = None
try:
common_mod = _get_mod("common")
print("loaded common definitions...")
except Exception as e:
print("defaults only...")
user_objs = {}
vlans = None
bypass_objs = {}
meta = ConfigMeta()
for v_name in _get_by_indicator(VLAN_INDICATOR):
print("loading vlan..." + v_name)
for obj in _load_objs(v_name, users.__config__.VLAN):
if vlans is None:
vlans = {}
if not check_object(obj):
exit(-1)
num_str = str(obj.num)
for vk in vlans.keys():
if num_str == vlans[vk]:
print("vlan number defined multiple times...")
exit(-1)
vlans[obj.name] = num_str
if obj.initiate is not None and len(obj.initiate) > 0:
for init_to in obj.initiate:
meta.vlan_to_vlan(init_to)
if vlans is None:
raise Exception("missing required config settings...")
meta.all_vlans = vlans.keys()
vlans_with_users = {}
user_macs = {}
for f_name in _get_by_indicator(USER_INDICATOR):
print("composing..." + f_name)
for obj in _load_objs(f_name, users.__config__.Assignment):
obj = _common_call(common_mod, 'ready', obj)
key = f_name.replace(USER_INDICATOR, "")
if not key.isalnum():
print("does not meet naming requirements...")
exit(-1)
vlan = obj.vlan
if vlan not in vlans:
raise Exception("no vlan defined for " + key)
vlans_with_users[vlan] = vlans[vlan]
meta.vlan_user(vlan, key)
fqdn = vlan + "." + key
if not check_object(obj):
print("did not pass check...")
exit(-1)
if obj.disabled:
print("account is disabled or has expired...")
continue
macs = sorted(obj.macs)
password = <PASSWORD>
bypass = sorted(obj.bypass)
port_bypassed = sorted(obj.port_bypass)
wildcards = sorted(obj.wildcard)
attrs = []
if obj.attrs:
attrs = sorted(obj.attrs)
meta.attributes(attrs)
# meta checks
meta.user_macs(macs)
if not obj.inherits:
meta.password(password)
meta.bypassed(bypass)
if fqdn in user_objs:
raise Exception(fqdn + " previously defined")
# use config definitions here
if not obj.no_login:
user_objs[fqdn] = _create_obj(macs,
password,
attrs,
port_bypassed,
wildcards)
if bypass is not None and len(bypass) > 0:
for mac_bypass in bypass:
if mac_bypass in bypass_objs:
raise Exception(mac_bypass + " previously defined")
bypass_objs[mac_bypass] = vlan
user_all = []
for l in [obj.macs, obj.port_bypass, obj.bypass]:
user_all += list(l)
if key not in user_macs:
user_macs[key] = []
user_macs[key].append((vlan, sorted(set(user_all))))
meta.verify()
full = {}
full[wrapper.freepydius.USER_KEY] = user_objs
full[wrapper.freepydius.VLAN_KEY] = vlans_with_users
full[wrapper.freepydius.BYPASS_KEY] = bypass_objs
with open(output, 'w') as f:
f.write(json.dumps(full, sort_keys=True,
indent=4, separators=[",", ": "]))
with open(audit, 'w') as f:
csv_writer = csv.writer(f, lineterminator=os.linesep)
for u in user_macs:
for obj in user_macs[u]:
vlan = obj[0]
macs = obj[1]
for m in macs:
csv_writer.writerow([u, vlan, m])
def main():
"""main entry."""
success = False
try:
parser = argparse.ArgumentParser()
parser.add_argument("--output", type=str, required=True)
parser.add_argument("--audit", type=str, required=True)
args = parser.parse_args()
_process(args.output, args.audit)
success = True
except Exception as e:
print('unable to compose')
print(str(e))
if success:
print("success")
exit(0)
else:
print("failure")
exit(1)
if __name__ == "__main__":
main()
| en | 0.732484 | #!/usr/bin/python composes the config from user definitions. # file indicators configuration meta information. init the instance. password group validation(s). bypass management. user+mac combos. set attributes. verify meta data. indicate a vlan was used. VLAN to VLAN mappings. create a user definition. import the module dynamically. get by a file type indicator. make a common mod call. Check an object. process the composition of users. # meta checks # use config definitions here main entry. | 2.715481 | 3 |
ch08/attention_seq2seq.py | hyesungKomet/deep-learning-from-scratch-2 | 775 | 6624481 | <reponame>hyesungKomet/deep-learning-from-scratch-2
# coding: utf-8
import sys
sys.path.append('..')
from common.time_layers import *
from ch07.seq2seq import Encoder, Seq2seq
from ch08.attention_layer import TimeAttention
class AttentionEncoder(Encoder):
def forward(self, xs):
xs = self.embed.forward(xs)
hs = self.lstm.forward(xs)
return hs
def backward(self, dhs):
dout = self.lstm.backward(dhs)
dout = self.embed.backward(dout)
return dout
class AttentionDecoder:
def __init__(self, vocab_size, wordvec_size, hidden_size):
V, D, H = vocab_size, wordvec_size, hidden_size
rn = np.random.randn
embed_W = (rn(V, D) / 100).astype('f')
lstm_Wx = (rn(D, 4 * H) / np.sqrt(D)).astype('f')
lstm_Wh = (rn(H, 4 * H) / np.sqrt(H)).astype('f')
lstm_b = np.zeros(4 * H).astype('f')
affine_W = (rn(2*H, V) / np.sqrt(2*H)).astype('f')
affine_b = np.zeros(V).astype('f')
self.embed = TimeEmbedding(embed_W)
self.lstm = TimeLSTM(lstm_Wx, lstm_Wh, lstm_b, stateful=True)
self.attention = TimeAttention()
self.affine = TimeAffine(affine_W, affine_b)
layers = [self.embed, self.lstm, self.attention, self.affine]
self.params, self.grads = [], []
for layer in layers:
self.params += layer.params
self.grads += layer.grads
def forward(self, xs, enc_hs):
h = enc_hs[:,-1]
self.lstm.set_state(h)
out = self.embed.forward(xs)
dec_hs = self.lstm.forward(out)
c = self.attention.forward(enc_hs, dec_hs)
out = np.concatenate((c, dec_hs), axis=2)
score = self.affine.forward(out)
return score
def backward(self, dscore):
dout = self.affine.backward(dscore)
N, T, H2 = dout.shape
H = H2 // 2
dc, ddec_hs0 = dout[:,:,:H], dout[:,:,H:]
denc_hs, ddec_hs1 = self.attention.backward(dc)
ddec_hs = ddec_hs0 + ddec_hs1
dout = self.lstm.backward(ddec_hs)
dh = self.lstm.dh
denc_hs[:, -1] += dh
self.embed.backward(dout)
return denc_hs
def generate(self, enc_hs, start_id, sample_size):
sampled = []
sample_id = start_id
h = enc_hs[:, -1]
self.lstm.set_state(h)
for _ in range(sample_size):
x = np.array([sample_id]).reshape((1, 1))
out = self.embed.forward(x)
dec_hs = self.lstm.forward(out)
c = self.attention.forward(enc_hs, dec_hs)
out = np.concatenate((c, dec_hs), axis=2)
score = self.affine.forward(out)
sample_id = np.argmax(score.flatten())
sampled.append(sample_id)
return sampled
class AttentionSeq2seq(Seq2seq):
def __init__(self, vocab_size, wordvec_size, hidden_size):
args = vocab_size, wordvec_size, hidden_size
self.encoder = AttentionEncoder(*args)
self.decoder = AttentionDecoder(*args)
self.softmax = TimeSoftmaxWithLoss()
self.params = self.encoder.params + self.decoder.params
self.grads = self.encoder.grads + self.decoder.grads
| # coding: utf-8
import sys
sys.path.append('..')
from common.time_layers import *
from ch07.seq2seq import Encoder, Seq2seq
from ch08.attention_layer import TimeAttention
class AttentionEncoder(Encoder):
def forward(self, xs):
xs = self.embed.forward(xs)
hs = self.lstm.forward(xs)
return hs
def backward(self, dhs):
dout = self.lstm.backward(dhs)
dout = self.embed.backward(dout)
return dout
class AttentionDecoder:
def __init__(self, vocab_size, wordvec_size, hidden_size):
V, D, H = vocab_size, wordvec_size, hidden_size
rn = np.random.randn
embed_W = (rn(V, D) / 100).astype('f')
lstm_Wx = (rn(D, 4 * H) / np.sqrt(D)).astype('f')
lstm_Wh = (rn(H, 4 * H) / np.sqrt(H)).astype('f')
lstm_b = np.zeros(4 * H).astype('f')
affine_W = (rn(2*H, V) / np.sqrt(2*H)).astype('f')
affine_b = np.zeros(V).astype('f')
self.embed = TimeEmbedding(embed_W)
self.lstm = TimeLSTM(lstm_Wx, lstm_Wh, lstm_b, stateful=True)
self.attention = TimeAttention()
self.affine = TimeAffine(affine_W, affine_b)
layers = [self.embed, self.lstm, self.attention, self.affine]
self.params, self.grads = [], []
for layer in layers:
self.params += layer.params
self.grads += layer.grads
def forward(self, xs, enc_hs):
h = enc_hs[:,-1]
self.lstm.set_state(h)
out = self.embed.forward(xs)
dec_hs = self.lstm.forward(out)
c = self.attention.forward(enc_hs, dec_hs)
out = np.concatenate((c, dec_hs), axis=2)
score = self.affine.forward(out)
return score
def backward(self, dscore):
dout = self.affine.backward(dscore)
N, T, H2 = dout.shape
H = H2 // 2
dc, ddec_hs0 = dout[:,:,:H], dout[:,:,H:]
denc_hs, ddec_hs1 = self.attention.backward(dc)
ddec_hs = ddec_hs0 + ddec_hs1
dout = self.lstm.backward(ddec_hs)
dh = self.lstm.dh
denc_hs[:, -1] += dh
self.embed.backward(dout)
return denc_hs
def generate(self, enc_hs, start_id, sample_size):
sampled = []
sample_id = start_id
h = enc_hs[:, -1]
self.lstm.set_state(h)
for _ in range(sample_size):
x = np.array([sample_id]).reshape((1, 1))
out = self.embed.forward(x)
dec_hs = self.lstm.forward(out)
c = self.attention.forward(enc_hs, dec_hs)
out = np.concatenate((c, dec_hs), axis=2)
score = self.affine.forward(out)
sample_id = np.argmax(score.flatten())
sampled.append(sample_id)
return sampled
class AttentionSeq2seq(Seq2seq):
def __init__(self, vocab_size, wordvec_size, hidden_size):
args = vocab_size, wordvec_size, hidden_size
self.encoder = AttentionEncoder(*args)
self.decoder = AttentionDecoder(*args)
self.softmax = TimeSoftmaxWithLoss()
self.params = self.encoder.params + self.decoder.params
self.grads = self.encoder.grads + self.decoder.grads | en | 0.833554 | # coding: utf-8 | 2.728901 | 3 |
datasets/fruits.py | YorkSu/hat | 1 | 6624482 | # pylint: disable=attribute-defined-outside-init
from hat.datasets.Dataset import Dataset
from hat.datasets.utils import DSBuilder
class fruits(Dataset):
"""
Fruits 数据集
"""
def args(self):
self._MISSION_LIST = ['classfication']
self.SHUFFLE = True
self.NUM_TRAIN = 58266
self.NUM_VAL = 19548
self.NUM_TEST = 0
self.NUM_CLASSES = 114
self.INPUT_SHAPE = (100, 100, 3)
self.DATA_DIR = 'datasets/fruits'
self.dsb = DSBuilder(
self.DATA_DIR,
self.INPUT_SHAPE[0:2],
shuffle=self.SHUFFLE)
self.CLASSES_DICT = self.dsb.get_classes_dict()
(self.train_x, self.train_y), (self.val_x, self.val_y), self.test_x = self.dsb.get_all('ignore')
# test part
if __name__ == "__main__":
from pprint import pprint
data = fruits()
pprint(data.CLASSES_DICT)
print(data.train_x.shape)
from PIL import Image
Image.fromarray(data.train_x[5]).show()
| # pylint: disable=attribute-defined-outside-init
from hat.datasets.Dataset import Dataset
from hat.datasets.utils import DSBuilder
class fruits(Dataset):
"""
Fruits 数据集
"""
def args(self):
self._MISSION_LIST = ['classfication']
self.SHUFFLE = True
self.NUM_TRAIN = 58266
self.NUM_VAL = 19548
self.NUM_TEST = 0
self.NUM_CLASSES = 114
self.INPUT_SHAPE = (100, 100, 3)
self.DATA_DIR = 'datasets/fruits'
self.dsb = DSBuilder(
self.DATA_DIR,
self.INPUT_SHAPE[0:2],
shuffle=self.SHUFFLE)
self.CLASSES_DICT = self.dsb.get_classes_dict()
(self.train_x, self.train_y), (self.val_x, self.val_y), self.test_x = self.dsb.get_all('ignore')
# test part
if __name__ == "__main__":
from pprint import pprint
data = fruits()
pprint(data.CLASSES_DICT)
print(data.train_x.shape)
from PIL import Image
Image.fromarray(data.train_x[5]).show()
| en | 0.270329 | # pylint: disable=attribute-defined-outside-init Fruits 数据集 # test part | 2.533804 | 3 |
tests/filters/test_uri.py | pabarros/asgard-api | 3 | 6624483 | <gh_stars>1-10
import unittest
from copy import copy
from hollowman.filters.uri import AddURIFilter
from hollowman.marathonapp import AsgardApp
from tests.utils import with_json_fixture
class AddURIFilterTest(unittest.TestCase):
@with_json_fixture("../fixtures/single_full_app.json")
def setUp(self, single_full_app_fixture):
self.docker_auth_uri = "file:///etc/docker.tar.bz2"
self.base_uris = ["http://google.com", "file://etc/file.txt"]
self.single_full_app_fixture = single_full_app_fixture
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
self.original_app = AsgardApp.from_json(self.single_full_app_fixture)
self.filter = AddURIFilter()
def test_update_app_do_not_add_uri_if_exist(self):
self.single_full_app_fixture["uris"] = copy(self.base_uris) + [
self.docker_auth_uri
]
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(
None, self.request_app, self.original_app
)
self.assertEqual(3, len(filtered_app.uris))
self.assertEqual(
self.base_uris + [self.docker_auth_uri], filtered_app.uris
)
def test_update_app_do_not_add_uri_if_exist_with_spaces(self):
"""
Não precisamos fazer o strip nos valores originais pois o Marathon já faz isso pra nós.
"""
self.single_full_app_fixture["uris"] = copy(self.base_uris) + [
" " + self.docker_auth_uri
]
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(
None, self.request_app, self.original_app
)
self.assertEqual(3, len(filtered_app.uris))
self.assertEqual(
self.base_uris + [" " + self.docker_auth_uri],
filtered_app.uris,
)
def test_update_app_add_uri_with_other_existing_uris(self):
"""
Mesmo se a app já tiver utras uris, temos que adicionar a nossa
"""
self.single_full_app_fixture["uris"] = copy(self.base_uris)
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(
None, self.request_app, self.original_app
)
self.assertEqual(3, len(filtered_app.uris))
self.assertEqual(
self.base_uris + [self.docker_auth_uri], filtered_app.uris
)
def test_update_app_add_uri_if_not_exist(self):
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(
None, self.request_app, self.original_app
)
self.assertEqual(1, len(filtered_app.uris))
self.assertEqual([self.docker_auth_uri], filtered_app.uris)
def test_create_app_add_uri_if_not_exist(self):
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(None, self.request_app, AsgardApp())
self.assertEqual(1, len(filtered_app.uris))
self.assertEqual([self.docker_auth_uri], filtered_app.uris)
def test_create_app_do_not_add_uri_if_exist(self):
self.single_full_app_fixture["uris"] = copy(self.base_uris) + [
self.docker_auth_uri
]
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(None, self.request_app, AsgardApp())
self.assertEqual(3, len(filtered_app.uris))
self.assertEqual(
self.base_uris + [self.docker_auth_uri], filtered_app.uris
)
def test_create_app_add_uri_with_other_existing_uris(self):
"""
Mesmo se a app já tiver utras uris, temos que adicionar a nossa
"""
self.single_full_app_fixture["uris"] = copy(self.base_uris)
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(None, self.request_app, AsgardApp())
self.assertEqual(3, len(filtered_app.uris))
self.assertEqual(
self.base_uris + [self.docker_auth_uri], filtered_app.uris
)
| import unittest
from copy import copy
from hollowman.filters.uri import AddURIFilter
from hollowman.marathonapp import AsgardApp
from tests.utils import with_json_fixture
class AddURIFilterTest(unittest.TestCase):
@with_json_fixture("../fixtures/single_full_app.json")
def setUp(self, single_full_app_fixture):
self.docker_auth_uri = "file:///etc/docker.tar.bz2"
self.base_uris = ["http://google.com", "file://etc/file.txt"]
self.single_full_app_fixture = single_full_app_fixture
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
self.original_app = AsgardApp.from_json(self.single_full_app_fixture)
self.filter = AddURIFilter()
def test_update_app_do_not_add_uri_if_exist(self):
self.single_full_app_fixture["uris"] = copy(self.base_uris) + [
self.docker_auth_uri
]
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(
None, self.request_app, self.original_app
)
self.assertEqual(3, len(filtered_app.uris))
self.assertEqual(
self.base_uris + [self.docker_auth_uri], filtered_app.uris
)
def test_update_app_do_not_add_uri_if_exist_with_spaces(self):
"""
Não precisamos fazer o strip nos valores originais pois o Marathon já faz isso pra nós.
"""
self.single_full_app_fixture["uris"] = copy(self.base_uris) + [
" " + self.docker_auth_uri
]
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(
None, self.request_app, self.original_app
)
self.assertEqual(3, len(filtered_app.uris))
self.assertEqual(
self.base_uris + [" " + self.docker_auth_uri],
filtered_app.uris,
)
def test_update_app_add_uri_with_other_existing_uris(self):
"""
Mesmo se a app já tiver utras uris, temos que adicionar a nossa
"""
self.single_full_app_fixture["uris"] = copy(self.base_uris)
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(
None, self.request_app, self.original_app
)
self.assertEqual(3, len(filtered_app.uris))
self.assertEqual(
self.base_uris + [self.docker_auth_uri], filtered_app.uris
)
def test_update_app_add_uri_if_not_exist(self):
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(
None, self.request_app, self.original_app
)
self.assertEqual(1, len(filtered_app.uris))
self.assertEqual([self.docker_auth_uri], filtered_app.uris)
def test_create_app_add_uri_if_not_exist(self):
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(None, self.request_app, AsgardApp())
self.assertEqual(1, len(filtered_app.uris))
self.assertEqual([self.docker_auth_uri], filtered_app.uris)
def test_create_app_do_not_add_uri_if_exist(self):
self.single_full_app_fixture["uris"] = copy(self.base_uris) + [
self.docker_auth_uri
]
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(None, self.request_app, AsgardApp())
self.assertEqual(3, len(filtered_app.uris))
self.assertEqual(
self.base_uris + [self.docker_auth_uri], filtered_app.uris
)
def test_create_app_add_uri_with_other_existing_uris(self):
"""
Mesmo se a app já tiver utras uris, temos que adicionar a nossa
"""
self.single_full_app_fixture["uris"] = copy(self.base_uris)
self.request_app = AsgardApp.from_json(self.single_full_app_fixture)
filtered_app = self.filter.write(None, self.request_app, AsgardApp())
self.assertEqual(3, len(filtered_app.uris))
self.assertEqual(
self.base_uris + [self.docker_auth_uri], filtered_app.uris
) | pt | 0.950721 | Não precisamos fazer o strip nos valores originais pois o Marathon já faz isso pra nós. Mesmo se a app já tiver utras uris, temos que adicionar a nossa Mesmo se a app já tiver utras uris, temos que adicionar a nossa | 2.343627 | 2 |
tester/test_handlers/test_static_handler.py | bukun/TorCMS | 243 | 6624484 | # -*- coding:utf-8 -*-
'''
Test
'''
from torcms.handlers.static_handler import StaticHandler
def test_zl():
'''
Test
'''
urls = [
("/label/(.*)", StaticHandler, {}),
]
assert urls
| # -*- coding:utf-8 -*-
'''
Test
'''
from torcms.handlers.static_handler import StaticHandler
def test_zl():
'''
Test
'''
urls = [
("/label/(.*)", StaticHandler, {}),
]
assert urls
| en | 0.640147 | # -*- coding:utf-8 -*- Test Test | 1.834207 | 2 |
flowws/internal.py | gitter-badger/flowws | 0 | 6624485 | <reponame>gitter-badger/flowws<filename>flowws/internal.py
import importlib
import logging
logger = logging.getLogger(__name__)
class FailedImport:
"""Module that failed to import."""
def __init__(self, exception):
self.exception = exception
def __getattr__(self, *args, **kwargs):
return self
def __call__(self, *args, **kwargs):
raise self.exception
def try_to_import(pkg, name, current_pkg=None):
"""Import an attribute from a module, or return an error-producing fake.
This method is provided as a convenience for libraries that want
to easily expose modules with a variety of prerequisite libraries
without forcing the user to install prerequisites for the modules
they do not use. The fake is produced if an import fails while
importing the given package.
:param name: Name of the attribute to return from the module
:param pkg: Package name (can be relative)
:param current_pkg: Name of the current package to use (i.e. if `pkg` is relative)
:returns: Either the attribute from the successfully-imported module, or a fake module object that will produce an error if evaluated
"""
try:
mod = importlib.import_module(pkg, current_pkg)
result = getattr(mod, name)
except ImportError as e:
result = FailedImport(e)
return result
| import importlib
import logging
logger = logging.getLogger(__name__)
class FailedImport:
"""Module that failed to import."""
def __init__(self, exception):
self.exception = exception
def __getattr__(self, *args, **kwargs):
return self
def __call__(self, *args, **kwargs):
raise self.exception
def try_to_import(pkg, name, current_pkg=None):
"""Import an attribute from a module, or return an error-producing fake.
This method is provided as a convenience for libraries that want
to easily expose modules with a variety of prerequisite libraries
without forcing the user to install prerequisites for the modules
they do not use. The fake is produced if an import fails while
importing the given package.
:param name: Name of the attribute to return from the module
:param pkg: Package name (can be relative)
:param current_pkg: Name of the current package to use (i.e. if `pkg` is relative)
:returns: Either the attribute from the successfully-imported module, or a fake module object that will produce an error if evaluated
"""
try:
mod = importlib.import_module(pkg, current_pkg)
result = getattr(mod, name)
except ImportError as e:
result = FailedImport(e)
return result | en | 0.785047 | Module that failed to import. Import an attribute from a module, or return an error-producing fake. This method is provided as a convenience for libraries that want to easily expose modules with a variety of prerequisite libraries without forcing the user to install prerequisites for the modules they do not use. The fake is produced if an import fails while importing the given package. :param name: Name of the attribute to return from the module :param pkg: Package name (can be relative) :param current_pkg: Name of the current package to use (i.e. if `pkg` is relative) :returns: Either the attribute from the successfully-imported module, or a fake module object that will produce an error if evaluated | 3.415144 | 3 |
intro-python/parsing-json/nested_data.py | sandhjos/dne-dna-code | 0 | 6624486 | #!/usr/bin/env python
"""Working with nested data hands-on exercise / coding challenge."""
import json
import os
# Get the absolute path for the directory where this file is located "here"
from pprint import pprint
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "interfaces.json")) as file:
# TODO: Parse the contents of the JSON file into a variable
outputStr = file.read(0)
# TODO: Loop through the interfaces in the JSON data and print out each
outputData = json.loads(outputStr)
# interface's name, ip, and netmask.
pprint(outputData)
| #!/usr/bin/env python
"""Working with nested data hands-on exercise / coding challenge."""
import json
import os
# Get the absolute path for the directory where this file is located "here"
from pprint import pprint
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "interfaces.json")) as file:
# TODO: Parse the contents of the JSON file into a variable
outputStr = file.read(0)
# TODO: Loop through the interfaces in the JSON data and print out each
outputData = json.loads(outputStr)
# interface's name, ip, and netmask.
pprint(outputData)
| en | 0.822053 | #!/usr/bin/env python Working with nested data hands-on exercise / coding challenge. # Get the absolute path for the directory where this file is located "here" # TODO: Parse the contents of the JSON file into a variable # TODO: Loop through the interfaces in the JSON data and print out each # interface's name, ip, and netmask. | 3.367939 | 3 |
ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py | fangxingli/mambari | 0 | 6624487 | #!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
import os
from mock.mock import MagicMock, call, patch
from stacks.utils.RMFTestCase import *
from resource_management.core.exceptions import Fail
from resource_management.core import shell
import resource_management.libraries.functions
origin_exists = os.path.exists
@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
@patch.object(os.path, "exists", new=MagicMock(
side_effect=lambda *args: origin_exists(args[0])
if args[0][-2:] == "j2" else True))
class TestNodeManager(RMFTestCase):
COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
STACK_VERSION = "2.0.6"
def test_configure_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="configure",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
self.assertNoMoreResources()
def test_start_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="start",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1'
self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
action = ['delete'],
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
)
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start nodemanager',
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
user = 'yarn',
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
tries = 5,
try_sleep = 1,
)
self.assertNoMoreResources()
def test_stop_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="stop",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf stop nodemanager',
user='yarn')
self.assertNoMoreResources()
def test_configure_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="configure",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_secured()
self.assertNoMoreResources()
def test_start_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="start",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_secured()
pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1'
self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
action = ['delete'],
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
)
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start nodemanager',
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
user = 'yarn',
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
tries = 5,
try_sleep = 1,
)
self.assertNoMoreResources()
def test_stop_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="stop",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf stop nodemanager',
user='yarn')
self.assertNoMoreResources()
def assert_configure_default(self):
self.assertResourceCalled('Directory', '/hadoop/yarn/log',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
ignore_failures = True,
mode = 0775,
cd_access='a'
)
self.assertResourceCalled('Directory', '/hadoop/yarn/log1',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
ignore_failures = True,
mode = 0775,
cd_access='a'
)
self.assertResourceCalled('Directory', '/hadoop/yarn/local',
owner = 'yarn',
group = 'hadoop',
mode = 0775,
create_parents = True,
ignore_failures = True,
cd_access='a',
recursive_mode_flags = {'d': 'a+rwx', 'f': 'a+rw'},
)
self.assertResourceCalled('Directory', '/hadoop/yarn/local1',
owner = 'yarn',
create_parents = True,
group = 'hadoop',
ignore_failures = True,
mode = 0775,
cd_access='a',
recursive_mode_flags = {'d': 'a+rwx', 'f': 'a+rw'}
)
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
owner = 'yarn',
create_parents = True,
ignore_failures = True,
cd_access = 'a',
)
self.assertResourceCalled('XmlConfig', 'core-site.xml',
owner = 'hdfs',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['core-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['hdfs-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
)
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['yarn-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['yarn-site']
)
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['capacity-scheduler'],
configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
)
self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
owner = 'yarn',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
content = Template('yarn.conf.j2'),
mode = 0644,
)
self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
content = Template('mapreduce.conf.j2'),
mode = 0644,
)
self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
content = InlineTemplate(self.getConfig()['configurations']['yarn-env']['content']),
owner = 'yarn',
group = 'hadoop',
mode = 0755,
)
self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
group = 'hadoop',
mode = 02050,
)
self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
content = Template('container-executor.cfg.j2'),
group = 'hadoop',
mode = 0644,
)
self.assertResourceCalled('Directory', '/cgroups_test/cpu',
group = 'hadoop',
create_parents = True,
mode = 0755,
cd_access="a"
)
self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-env.sh',
content = InlineTemplate(self.getConfig()['configurations']['mapred-env']['content']),
mode = 0755,
owner = 'hdfs',
)
self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
content = Template('taskcontroller.cfg.j2'),
owner = 'hdfs',
)
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'mapred',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['capacity-scheduler'],
configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
)
self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
owner = 'mapred',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
owner = 'mapred',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
owner = 'mapred',
group = 'hadoop',
)
def assert_configure_secured(self):
self.assertResourceCalled('Directory', '/hadoop/yarn/local',
action = ['delete']
)
self.assertResourceCalled('Directory', '/hadoop/yarn/log',
action = ['delete']
)
self.assertResourceCalled('Directory', '/var/lib/hadoop-yarn',)
self.assertResourceCalled('File', '/var/lib/hadoop-yarn/nm_security_enabled',
content= 'Marker file to track first start after enabling/disabling security. During first start yarn local, log dirs are removed and recreated'
)
self.assertResourceCalled('Directory', '/hadoop/yarn/log',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
ignore_failures = True,
mode = 0775,
cd_access='a',
)
self.assertResourceCalled('Directory', '/hadoop/yarn/local',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
ignore_failures = True,
mode = 0775,
cd_access='a',
recursive_mode_flags = {'d': 'a+rwx', 'f': 'a+rw'},
)
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
owner = 'yarn',
create_parents = True,
ignore_failures = True,
cd_access = 'a',
)
self.assertResourceCalled('XmlConfig', 'core-site.xml',
owner = 'hdfs',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['core-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['hdfs-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
)
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['yarn-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['yarn-site']
)
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['capacity-scheduler'],
configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
)
self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
owner = 'yarn',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
content = Template('yarn.conf.j2'),
mode = 0644,
)
self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
content = Template('mapreduce.conf.j2'),
mode = 0644,
)
self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
content = InlineTemplate(self.getConfig()['configurations']['yarn-env']['content']),
owner = 'yarn',
group = 'hadoop',
mode = 0755,
)
self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
group = 'hadoop',
mode = 06050,
)
self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
content = Template('container-executor.cfg.j2'),
group = 'hadoop',
mode = 0644,
)
self.assertResourceCalled('Directory', '/cgroups_test/cpu',
group = 'hadoop',
create_parents = True,
mode = 0755,
cd_access="a"
)
self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-env.sh',
content = InlineTemplate(self.getConfig()['configurations']['mapred-env']['content']),
mode = 0755,
owner = 'root',
)
self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
owner = 'root',
group = 'hadoop',
mode = 06050,
)
self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
content = Template('taskcontroller.cfg.j2'),
owner = 'root',
group = 'hadoop',
mode = 0644,
)
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'mapred',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['capacity-scheduler'],
configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
)
self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
owner = 'mapred',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
owner = 'mapred',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
owner = 'mapred',
group = 'hadoop',
)
@patch("socket.gethostbyname")
@patch('time.sleep')
@patch.object(resource_management.libraries.functions, "get_hdp_version", new = MagicMock(return_value='2.3.0.0-1234'))
def test_post_upgrade_restart(self, time_mock, socket_gethostbyname_mock):
process_output = """
c6401.ambari.apache.org:45454 RUNNING c6401.ambari.apache.org:8042 0
"""
mocks_dict = {}
socket_gethostbyname_mock.return_value = "test_host"
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname = "Nodemanager",
command = "post_upgrade_restart",
config_file = "default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, process_output)],
mocks_dict = mocks_dict
)
self.assertTrue(mocks_dict['call'].called)
self.assertEqual(mocks_dict['call'].call_count,1)
self.assertEquals(
"yarn node -list -states=RUNNING",
mocks_dict['call'].call_args_list[0][0][0])
self.assertEquals( {'user': u'yarn'}, mocks_dict['call'].call_args_list[0][1])
@patch('time.sleep')
def test_post_upgrade_restart_nodemanager_not_ready(self, time_mock):
process_output = """
c9999.ambari.apache.org:45454 RUNNING c9999.ambari.apache.org:8042 0
"""
mocks_dict = {}
try:
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command = "post_upgrade_restart",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, process_output)],
mocks_dict = mocks_dict,
)
self.fail('Missing NodeManager should have caused a failure')
except Fail,fail:
self.assertTrue(mocks_dict['call'].called)
self.assertEqual(mocks_dict['call'].call_count,12)
@patch('time.sleep')
def test_post_upgrade_restart_nodemanager_not_ready(self, time_mock):
process_output = """
c6401.ambari.apache.org:45454 RUNNING c6401.ambari.apache.org:8042 0
"""
mocks_dict = {}
try:
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command = "post_upgrade_restart",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(999, process_output)],
mocks_dict = mocks_dict,
)
self.fail('Invalid return code should cause a failure')
except Fail,fail:
self.assertTrue(mocks_dict['call'].called)
self.assertEqual(mocks_dict['call'].call_count,1)
@patch("resource_management.libraries.functions.security_commons.build_expectations")
@patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
@patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
@patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
@patch("resource_management.libraries.script.Script.put_structured_out")
def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
# Test that function works when is called with correct parameters
security_params = {
'yarn-site': {
'yarn.nodemanager.keytab': 'path/to/nodemanager/keytab',
'yarn.nodemanager.principal': 'nodemanager_principal',
'yarn.nodemanager.webapp.spnego-keytab-file': 'path/to/nodemanager/webapp/keytab',
'yarn.nodemanager.webapp.spnego-principal': 'nodemanager_webapp_principal'
}
}
result_issues = []
props_value_check = {"yarn.timeline-service.http-authentication.type": "kerberos",
"yarn.acl.enable": "true"}
props_empty_check = ["yarn.nodemanager.principal",
"yarn.nodemanager.keytab",
"yarn.nodemanager.webapp.spnego-principal",
"yarn.nodemanager.webapp.spnego-keytab-file"]
props_read_check = ["yarn.nodemanager.keytab",
"yarn.nodemanager.webapp.spnego-keytab-file"]
get_params_mock.return_value = security_params
validate_security_config_mock.return_value = result_issues
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
build_exp_mock.assert_called_with('yarn-site', props_value_check, props_empty_check, props_read_check)
put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
self.assertTrue(cached_kinit_executor_mock.call_count, 2)
cached_kinit_executor_mock.assert_called_with('/usr/bin/kinit',
self.config_dict['configurations']['yarn-env']['yarn_user'],
security_params['yarn-site']['yarn.nodemanager.webapp.spnego-keytab-file'],
security_params['yarn-site']['yarn.nodemanager.webapp.spnego-principal'],
self.config_dict['hostname'],
'/tmp')
# Testing that the exception throw by cached_executor is caught
cached_kinit_executor_mock.reset_mock()
cached_kinit_executor_mock.side_effect = Exception("Invalid command")
try:
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
except:
self.assertTrue(True)
# Testing with a security_params which doesn't contains yarn-site
empty_security_params = {}
cached_kinit_executor_mock.reset_mock()
get_params_mock.reset_mock()
put_structured_out_mock.reset_mock()
get_params_mock.return_value = empty_security_params
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
# Testing with not empty result_issues
result_issues_with_params = {
'yarn-site': "Something bad happened"
}
validate_security_config_mock.reset_mock()
get_params_mock.reset_mock()
validate_security_config_mock.return_value = result_issues_with_params
get_params_mock.return_value = security_params
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
# Testing with security_enable = false
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="security_status",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
@patch.object(resource_management.libraries.functions, "get_hdp_version", new = MagicMock(return_value='2.3.0.0-1234'))
def test_pre_upgrade_restart_23(self):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
with open(config_file, "r") as f:
json_content = json.load(f)
version = '2.3.0.0-1234'
json_content['commandParams']['version'] = version
mocks_dict = {}
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname = "Nodemanager",
command = "pre_upgrade_restart",
config_dict = json_content,
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, None, ''), (0, None)],
mocks_dict = mocks_dict)
self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-yarn-nodemanager', version), sudo=True)
self.assertNoMoreResources()
self.assertEquals(1, mocks_dict['call'].call_count)
self.assertEquals(1, mocks_dict['checked_call'].call_count)
self.assertEquals(
('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
mocks_dict['checked_call'].call_args_list[0][0][0])
self.assertEquals(
('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
mocks_dict['call'].call_args_list[0][0][0])
| #!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
import os
from mock.mock import MagicMock, call, patch
from stacks.utils.RMFTestCase import *
from resource_management.core.exceptions import Fail
from resource_management.core import shell
import resource_management.libraries.functions
origin_exists = os.path.exists
@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
@patch.object(os.path, "exists", new=MagicMock(
side_effect=lambda *args: origin_exists(args[0])
if args[0][-2:] == "j2" else True))
class TestNodeManager(RMFTestCase):
COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
STACK_VERSION = "2.0.6"
def test_configure_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="configure",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
self.assertNoMoreResources()
def test_start_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="start",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1'
self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
action = ['delete'],
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
)
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start nodemanager',
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
user = 'yarn',
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
tries = 5,
try_sleep = 1,
)
self.assertNoMoreResources()
def test_stop_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="stop",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf stop nodemanager',
user='yarn')
self.assertNoMoreResources()
def test_configure_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="configure",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_secured()
self.assertNoMoreResources()
def test_start_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="start",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_secured()
pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid` >/dev/null 2>&1'
self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid',
action = ['delete'],
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
)
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start nodemanager',
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
user = 'yarn',
)
self.assertResourceCalled('Execute', "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
not_if = "ambari-sudo.sh su yarn -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-nodemanager.pid`'",
tries = 5,
try_sleep = 1,
)
self.assertNoMoreResources()
def test_stop_secured(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="stop",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf stop nodemanager',
user='yarn')
self.assertNoMoreResources()
def assert_configure_default(self):
self.assertResourceCalled('Directory', '/hadoop/yarn/log',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
ignore_failures = True,
mode = 0775,
cd_access='a'
)
self.assertResourceCalled('Directory', '/hadoop/yarn/log1',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
ignore_failures = True,
mode = 0775,
cd_access='a'
)
self.assertResourceCalled('Directory', '/hadoop/yarn/local',
owner = 'yarn',
group = 'hadoop',
mode = 0775,
create_parents = True,
ignore_failures = True,
cd_access='a',
recursive_mode_flags = {'d': 'a+rwx', 'f': 'a+rw'},
)
self.assertResourceCalled('Directory', '/hadoop/yarn/local1',
owner = 'yarn',
create_parents = True,
group = 'hadoop',
ignore_failures = True,
mode = 0775,
cd_access='a',
recursive_mode_flags = {'d': 'a+rwx', 'f': 'a+rw'}
)
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
owner = 'yarn',
create_parents = True,
ignore_failures = True,
cd_access = 'a',
)
self.assertResourceCalled('XmlConfig', 'core-site.xml',
owner = 'hdfs',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['core-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['hdfs-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
)
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['yarn-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['yarn-site']
)
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['capacity-scheduler'],
configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
)
self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
owner = 'yarn',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
content = Template('yarn.conf.j2'),
mode = 0644,
)
self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
content = Template('mapreduce.conf.j2'),
mode = 0644,
)
self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
content = InlineTemplate(self.getConfig()['configurations']['yarn-env']['content']),
owner = 'yarn',
group = 'hadoop',
mode = 0755,
)
self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
group = 'hadoop',
mode = 02050,
)
self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
content = Template('container-executor.cfg.j2'),
group = 'hadoop',
mode = 0644,
)
self.assertResourceCalled('Directory', '/cgroups_test/cpu',
group = 'hadoop',
create_parents = True,
mode = 0755,
cd_access="a"
)
self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-env.sh',
content = InlineTemplate(self.getConfig()['configurations']['mapred-env']['content']),
mode = 0755,
owner = 'hdfs',
)
self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
content = Template('taskcontroller.cfg.j2'),
owner = 'hdfs',
)
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'mapred',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['capacity-scheduler'],
configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
)
self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
owner = 'mapred',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
owner = 'mapred',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
owner = 'mapred',
group = 'hadoop',
)
def assert_configure_secured(self):
self.assertResourceCalled('Directory', '/hadoop/yarn/local',
action = ['delete']
)
self.assertResourceCalled('Directory', '/hadoop/yarn/log',
action = ['delete']
)
self.assertResourceCalled('Directory', '/var/lib/hadoop-yarn',)
self.assertResourceCalled('File', '/var/lib/hadoop-yarn/nm_security_enabled',
content= 'Marker file to track first start after enabling/disabling security. During first start yarn local, log dirs are removed and recreated'
)
self.assertResourceCalled('Directory', '/hadoop/yarn/log',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
ignore_failures = True,
mode = 0775,
cd_access='a',
)
self.assertResourceCalled('Directory', '/hadoop/yarn/local',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
ignore_failures = True,
mode = 0775,
cd_access='a',
recursive_mode_flags = {'d': 'a+rwx', 'f': 'a+rw'},
)
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
owner = 'yarn',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
owner = 'mapred',
group = 'hadoop',
create_parents = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
owner = 'yarn',
create_parents = True,
ignore_failures = True,
cd_access = 'a',
)
self.assertResourceCalled('XmlConfig', 'core-site.xml',
owner = 'hdfs',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['core-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
)
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['hdfs-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
)
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['yarn-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['yarn-site']
)
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'yarn',
group = 'hadoop',
mode = 0644,
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['capacity-scheduler'],
configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
)
self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
owner = 'yarn',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
content = Template('yarn.conf.j2'),
mode = 0644,
)
self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
content = Template('mapreduce.conf.j2'),
mode = 0644,
)
self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
content = InlineTemplate(self.getConfig()['configurations']['yarn-env']['content']),
owner = 'yarn',
group = 'hadoop',
mode = 0755,
)
self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
group = 'hadoop',
mode = 06050,
)
self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
content = Template('container-executor.cfg.j2'),
group = 'hadoop',
mode = 0644,
)
self.assertResourceCalled('Directory', '/cgroups_test/cpu',
group = 'hadoop',
create_parents = True,
mode = 0755,
cd_access="a"
)
self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-env.sh',
content = InlineTemplate(self.getConfig()['configurations']['mapred-env']['content']),
mode = 0755,
owner = 'root',
)
self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
owner = 'root',
group = 'hadoop',
mode = 06050,
)
self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
content = Template('taskcontroller.cfg.j2'),
owner = 'root',
group = 'hadoop',
mode = 0644,
)
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'mapred',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
conf_dir = '/etc/hadoop/conf',
configurations = self.getConfig()['configurations']['capacity-scheduler'],
configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
)
self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
owner = 'mapred',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
owner = 'mapred',
group = 'hadoop',
)
self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
owner = 'mapred',
group = 'hadoop',
)
@patch("socket.gethostbyname")
@patch('time.sleep')
@patch.object(resource_management.libraries.functions, "get_hdp_version", new = MagicMock(return_value='2.3.0.0-1234'))
def test_post_upgrade_restart(self, time_mock, socket_gethostbyname_mock):
process_output = """
c6401.ambari.apache.org:45454 RUNNING c6401.ambari.apache.org:8042 0
"""
mocks_dict = {}
socket_gethostbyname_mock.return_value = "test_host"
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname = "Nodemanager",
command = "post_upgrade_restart",
config_file = "default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, process_output)],
mocks_dict = mocks_dict
)
self.assertTrue(mocks_dict['call'].called)
self.assertEqual(mocks_dict['call'].call_count,1)
self.assertEquals(
"yarn node -list -states=RUNNING",
mocks_dict['call'].call_args_list[0][0][0])
self.assertEquals( {'user': u'yarn'}, mocks_dict['call'].call_args_list[0][1])
@patch('time.sleep')
def test_post_upgrade_restart_nodemanager_not_ready(self, time_mock):
process_output = """
c9999.ambari.apache.org:45454 RUNNING c9999.ambari.apache.org:8042 0
"""
mocks_dict = {}
try:
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command = "post_upgrade_restart",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, process_output)],
mocks_dict = mocks_dict,
)
self.fail('Missing NodeManager should have caused a failure')
except Fail,fail:
self.assertTrue(mocks_dict['call'].called)
self.assertEqual(mocks_dict['call'].call_count,12)
@patch('time.sleep')
def test_post_upgrade_restart_nodemanager_not_ready(self, time_mock):
process_output = """
c6401.ambari.apache.org:45454 RUNNING c6401.ambari.apache.org:8042 0
"""
mocks_dict = {}
try:
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command = "post_upgrade_restart",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(999, process_output)],
mocks_dict = mocks_dict,
)
self.fail('Invalid return code should cause a failure')
except Fail,fail:
self.assertTrue(mocks_dict['call'].called)
self.assertEqual(mocks_dict['call'].call_count,1)
@patch("resource_management.libraries.functions.security_commons.build_expectations")
@patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
@patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
@patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
@patch("resource_management.libraries.script.Script.put_structured_out")
def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
# Test that function works when is called with correct parameters
security_params = {
'yarn-site': {
'yarn.nodemanager.keytab': 'path/to/nodemanager/keytab',
'yarn.nodemanager.principal': 'nodemanager_principal',
'yarn.nodemanager.webapp.spnego-keytab-file': 'path/to/nodemanager/webapp/keytab',
'yarn.nodemanager.webapp.spnego-principal': 'nodemanager_webapp_principal'
}
}
result_issues = []
props_value_check = {"yarn.timeline-service.http-authentication.type": "kerberos",
"yarn.acl.enable": "true"}
props_empty_check = ["yarn.nodemanager.principal",
"yarn.nodemanager.keytab",
"yarn.nodemanager.webapp.spnego-principal",
"yarn.nodemanager.webapp.spnego-keytab-file"]
props_read_check = ["yarn.nodemanager.keytab",
"yarn.nodemanager.webapp.spnego-keytab-file"]
get_params_mock.return_value = security_params
validate_security_config_mock.return_value = result_issues
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
build_exp_mock.assert_called_with('yarn-site', props_value_check, props_empty_check, props_read_check)
put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
self.assertTrue(cached_kinit_executor_mock.call_count, 2)
cached_kinit_executor_mock.assert_called_with('/usr/bin/kinit',
self.config_dict['configurations']['yarn-env']['yarn_user'],
security_params['yarn-site']['yarn.nodemanager.webapp.spnego-keytab-file'],
security_params['yarn-site']['yarn.nodemanager.webapp.spnego-principal'],
self.config_dict['hostname'],
'/tmp')
# Testing that the exception throw by cached_executor is caught
cached_kinit_executor_mock.reset_mock()
cached_kinit_executor_mock.side_effect = Exception("Invalid command")
try:
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
except:
self.assertTrue(True)
# Testing with a security_params which doesn't contains yarn-site
empty_security_params = {}
cached_kinit_executor_mock.reset_mock()
get_params_mock.reset_mock()
put_structured_out_mock.reset_mock()
get_params_mock.return_value = empty_security_params
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
# Testing with not empty result_issues
result_issues_with_params = {
'yarn-site': "Something bad happened"
}
validate_security_config_mock.reset_mock()
get_params_mock.reset_mock()
validate_security_config_mock.return_value = result_issues_with_params
get_params_mock.return_value = security_params
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="security_status",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
# Testing with security_enable = false
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname="Nodemanager",
command="security_status",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
@patch.object(resource_management.libraries.functions, "get_hdp_version", new = MagicMock(return_value='2.3.0.0-1234'))
def test_pre_upgrade_restart_23(self):
config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
with open(config_file, "r") as f:
json_content = json.load(f)
version = '2.3.0.0-1234'
json_content['commandParams']['version'] = version
mocks_dict = {}
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/nodemanager.py",
classname = "Nodemanager",
command = "pre_upgrade_restart",
config_dict = json_content,
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0, None, ''), (0, None)],
mocks_dict = mocks_dict)
self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-yarn-nodemanager', version), sudo=True)
self.assertNoMoreResources()
self.assertEquals(1, mocks_dict['call'].call_count)
self.assertEquals(1, mocks_dict['checked_call'].call_count)
self.assertEquals(
('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
mocks_dict['checked_call'].call_args_list[0][0][0])
self.assertEquals(
('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
mocks_dict['call'].call_args_list[0][0][0])
| en | 0.815619 | #!/usr/bin/env python Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. c6401.ambari.apache.org:45454 RUNNING c6401.ambari.apache.org:8042 0 c9999.ambari.apache.org:45454 RUNNING c9999.ambari.apache.org:8042 0 c6401.ambari.apache.org:45454 RUNNING c6401.ambari.apache.org:8042 0 # Test that function works when is called with correct parameters # Testing that the exception throw by cached_executor is caught # Testing with a security_params which doesn't contains yarn-site # Testing with not empty result_issues # Testing with security_enable = false | 1.78063 | 2 |
Data Science With Python/07-cleaning-data-in-python/4-cleaning-data-for-analysis/06-custom-functions-to-clean-data.py | aimanahmedmoin1997/DataCamp | 5 | 6624488 | <filename>Data Science With Python/07-cleaning-data-in-python/4-cleaning-data-for-analysis/06-custom-functions-to-clean-data.py
'''
Custom functions to clean data
You'll now practice writing functions to clean data.
The tips dataset has been pre-loaded into a DataFrame called tips. It has a 'sex' column that contains the values 'Male' or 'Female'. Your job is to write a function that will recode 'Male' to 1, 'Female' to 0, and return np.nan for all entries of 'sex' that are neither 'Male' nor 'Female'.
Recoding variables like this is a common data cleaning task. Functions provide a mechanism for you to abstract away complex bits of code as well as reuse code. This makes your code more readable and less error prone.
As Dan showed you in the videos, you can use the .apply() method to apply a function across entire rows or columns of DataFrames. However, note that each column of a DataFrame is a pandas Series. Functions can also be applied across Series. Here, you will apply your function over the 'sex' column.
INSTRUCTIONS
100XP
INSTRUCTIONS
100XP
-Define a function named recode_sex() that has one parameter: sex_value.
-If sex_value equals 'Male', return 1.
-Else, if sex_value equals 'Female', return 0.
-If sex_value does not equal 'Male' or 'Female', return np.nan. NumPy has been pre-imported for you.
-Apply your recode_sex() function over tips.sex using the .apply() method to create a new column: 'sex_recode'. Note that when passing in a function inside the .apply() method, you don't need to specify the parentheses after the function name.
-Hit 'Submit Answer' and take note of the new 'sex_recode' column in the tips DataFrame!
'''
import pandas as pd
import re
tips = pd.read_csv('../_datasets/tips.csv')
# Define recode_sex()
def recode_sex(sex_value):
# Return 1 if sex_value is 'Male'
if sex_value == 'Male':
return 1
# Return 0 if sex_value is 'Female'
elif sex_value == 'Female':
return 0
# Return np.nan
else:
return np.nan
# Apply the function to the sex column
tips['sex_recode'] = tips.sex.apply(recode_sex)
# Print the first five rows of tips
print(tips.head())
| <filename>Data Science With Python/07-cleaning-data-in-python/4-cleaning-data-for-analysis/06-custom-functions-to-clean-data.py
'''
Custom functions to clean data
You'll now practice writing functions to clean data.
The tips dataset has been pre-loaded into a DataFrame called tips. It has a 'sex' column that contains the values 'Male' or 'Female'. Your job is to write a function that will recode 'Male' to 1, 'Female' to 0, and return np.nan for all entries of 'sex' that are neither 'Male' nor 'Female'.
Recoding variables like this is a common data cleaning task. Functions provide a mechanism for you to abstract away complex bits of code as well as reuse code. This makes your code more readable and less error prone.
As Dan showed you in the videos, you can use the .apply() method to apply a function across entire rows or columns of DataFrames. However, note that each column of a DataFrame is a pandas Series. Functions can also be applied across Series. Here, you will apply your function over the 'sex' column.
INSTRUCTIONS
100XP
INSTRUCTIONS
100XP
-Define a function named recode_sex() that has one parameter: sex_value.
-If sex_value equals 'Male', return 1.
-Else, if sex_value equals 'Female', return 0.
-If sex_value does not equal 'Male' or 'Female', return np.nan. NumPy has been pre-imported for you.
-Apply your recode_sex() function over tips.sex using the .apply() method to create a new column: 'sex_recode'. Note that when passing in a function inside the .apply() method, you don't need to specify the parentheses after the function name.
-Hit 'Submit Answer' and take note of the new 'sex_recode' column in the tips DataFrame!
'''
import pandas as pd
import re
tips = pd.read_csv('../_datasets/tips.csv')
# Define recode_sex()
def recode_sex(sex_value):
# Return 1 if sex_value is 'Male'
if sex_value == 'Male':
return 1
# Return 0 if sex_value is 'Female'
elif sex_value == 'Female':
return 0
# Return np.nan
else:
return np.nan
# Apply the function to the sex column
tips['sex_recode'] = tips.sex.apply(recode_sex)
# Print the first five rows of tips
print(tips.head())
| en | 0.77672 | Custom functions to clean data You'll now practice writing functions to clean data. The tips dataset has been pre-loaded into a DataFrame called tips. It has a 'sex' column that contains the values 'Male' or 'Female'. Your job is to write a function that will recode 'Male' to 1, 'Female' to 0, and return np.nan for all entries of 'sex' that are neither 'Male' nor 'Female'. Recoding variables like this is a common data cleaning task. Functions provide a mechanism for you to abstract away complex bits of code as well as reuse code. This makes your code more readable and less error prone. As Dan showed you in the videos, you can use the .apply() method to apply a function across entire rows or columns of DataFrames. However, note that each column of a DataFrame is a pandas Series. Functions can also be applied across Series. Here, you will apply your function over the 'sex' column. INSTRUCTIONS 100XP INSTRUCTIONS 100XP -Define a function named recode_sex() that has one parameter: sex_value. -If sex_value equals 'Male', return 1. -Else, if sex_value equals 'Female', return 0. -If sex_value does not equal 'Male' or 'Female', return np.nan. NumPy has been pre-imported for you. -Apply your recode_sex() function over tips.sex using the .apply() method to create a new column: 'sex_recode'. Note that when passing in a function inside the .apply() method, you don't need to specify the parentheses after the function name. -Hit 'Submit Answer' and take note of the new 'sex_recode' column in the tips DataFrame! # Define recode_sex() # Return 1 if sex_value is 'Male' # Return 0 if sex_value is 'Female' # Return np.nan # Apply the function to the sex column # Print the first five rows of tips | 4.261292 | 4 |
LCOF/21-30/22/22.py | xuychen/Leetcode | 0 | 6624489 | <reponame>xuychen/Leetcode
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def getKthFromEnd(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
stack = []
node = head
while node:
stack.append(node)
node = node.next
result = None
for _ in range(k):
result = stack.pop()
return result
def getKthFromEnd2(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
fast = slow = head
for _ in range(k):
fast = fast.next
while fast:
fast = fast.next
slow = slow.next
return slow | # Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def getKthFromEnd(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
stack = []
node = head
while node:
stack.append(node)
node = node.next
result = None
for _ in range(k):
result = stack.pop()
return result
def getKthFromEnd2(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
fast = slow = head
for _ in range(k):
fast = fast.next
while fast:
fast = fast.next
slow = slow.next
return slow | en | 0.41379 | # Definition for singly-linked list. # class ListNode(object): # def __init__(self, x): # self.val = x # self.next = None :type head: ListNode :type k: int :rtype: ListNode :type head: ListNode :type k: int :rtype: ListNode | 3.724325 | 4 |
examples/FlexibleSwingingPendulumCase/flexible_swinging_pendulum.py | L5vD5/PyElastica | 0 | 6624490 | """ Flexible swinging pendulum test-case
isort:skip_file
"""
# FIXME without appending sys.path make it more generic
import sys
sys.path.append("../../") # isort:skip
# from collections import defaultdict
# import numpy as np
from matplotlib import pyplot as plt
from elastica import *
class SwingingFlexiblePendulumSimulator(
BaseSystemCollection, Constraints, Forcing, CallBacks
):
pass
# Options
PLOT_FIGURE = False
PLOT_VIDEO = False
SAVE_FIGURE = False
SAVE_RESULTS = True
# For 10 elements, the prefac is 0.0007
pendulum_sim = SwingingFlexiblePendulumSimulator()
final_time = 1.0 if SAVE_RESULTS else 5.0
# setting up test params
n_elem = 10 if SAVE_RESULTS else 50
start = np.zeros((3,))
direction = np.array([0.0, 0.0, 1.0])
normal = np.array([1.0, 0.0, 0.0])
base_length = 1.0
base_radius = 0.005
base_area = np.pi * base_radius ** 2
density = 1100.0
nu = 0.0
youngs_modulus = 5e6
# For shear modulus of 1e4, nu is 99!
poisson_ratio = 0.5
pendulum_rod = CosseratRod.straight_rod(
n_elem,
start,
direction,
normal,
base_length,
base_radius,
density,
nu,
youngs_modulus,
poisson_ratio,
)
pendulum_sim.append(pendulum_rod)
# Bad name : whats a FreeRod anyway?
class HingeBC(ConstraintBase):
"""
the end of the rod fixed x[0]
"""
def __init__(self, fixed_position, fixed_directors):
ConstraintBase.__init__(self, fixed_position, fixed_directors)
def constrain_values(self, rod, time):
rod.position_collection[..., 0] = self.fixed_position
def constrain_rates(self, rod, time):
rod.velocity_collection[..., 0] = 0.0
pendulum_sim.constrain(pendulum_rod).using(
HingeBC, constrained_position_idx=(0,), constrained_director_idx=(0,)
)
# Add gravitational forces
gravitational_acc = -9.80665
pendulum_sim.add_forcing_to(pendulum_rod).using(
GravityForces, acc_gravity=np.array([gravitational_acc, 0.0, 0.0])
)
# Add call backs
class PendulumCallBack(CallBackBaseClass):
"""
Call back function for continuum snake
"""
def __init__(self, step_skip: int, callback_params: dict):
CallBackBaseClass.__init__(self)
self.every = step_skip
self.callback_params = callback_params
def make_callback(self, system, time, current_step: int):
if current_step % self.every == 0:
self.callback_params["time"].append(time)
self.callback_params["position"].append(system.position_collection.copy())
self.callback_params["directors"].append(system.director_collection.copy())
if time > 0.0:
self.callback_params["internal_stress"].append(
system.internal_stress.copy()
)
self.callback_params["internal_couple"].append(
system.internal_couple.copy()
)
return
dl = base_length / n_elem
dt = (0.0007 if SAVE_RESULTS else 0.002) * dl
total_steps = int(final_time / dt)
print("Total steps", total_steps)
recorded_history = defaultdict(list)
step_skip = (
60
if PLOT_VIDEO
else (int(total_steps / 10) if PLOT_FIGURE else int(total_steps / 200))
)
pendulum_sim.collect_diagnostics(pendulum_rod).using(
PendulumCallBack, step_skip=step_skip, callback_params=recorded_history
)
pendulum_sim.finalize()
timestepper = PositionVerlet()
# timestepper = PEFRL()
integrate(timestepper, pendulum_sim, final_time, total_steps)
if PLOT_VIDEO:
def plot_video(
plot_params: dict,
video_name="video.mp4",
margin=0.2,
fps=60,
step=1,
*args,
**kwargs
): # (time step, x/y/z, node)
import matplotlib.animation as manimation
plt.rcParams.update({"font.size": 22})
# Should give a (n_time, 3, n_elem) array
positions = np.array(plot_params["position"])
print("plot video")
FFMpegWriter = manimation.writers["ffmpeg"]
metadata = dict(
title="Movie Test", artist="Matplotlib", comment="Movie support!"
)
writer = FFMpegWriter(fps=fps, metadata=metadata)
dpi = 300
fig = plt.figure(figsize=(10, 8), frameon=True, dpi=dpi)
ax = fig.add_subplot(111)
ax.set_aspect("equal", adjustable="box")
# plt.axis("square")
i = 0
(rod_line,) = ax.plot(positions[i, 2], positions[i, 0], lw=3.0)
(tip_line,) = ax.plot(positions[:i, 2, -1], positions[:i, 0, -1], "k--")
ax.set_aspect("equal", adjustable="box")
ax.set_xlim([-1.0 - margin, 1.0 + margin])
ax.set_ylim([-1.0 - margin, 0.0 + margin])
with writer.saving(fig, video_name, dpi):
with plt.style.context("seaborn-white"):
for i in range(0, positions.shape[0], int(step)):
rod_line.set_xdata(positions[i, 2])
rod_line.set_ydata(positions[i, 0])
tip_line.set_xdata(positions[:i, 2, -1])
tip_line.set_ydata(positions[:i, 0, -1])
writer.grab_frame()
plot_video(recorded_history, "swinging_flexible_pendulum.mp4")
if PLOT_FIGURE:
fig = plt.figure(figsize=(10, 8), frameon=True, dpi=150)
ax = fig.add_subplot(111)
ax.set_aspect("equal", adjustable="box")
# Should give a (n_time, 3, n_elem) array
positions = np.array(recorded_history["position"])
for i in range(positions.shape[0]):
ax.plot(positions[i, 2], positions[i, 0], lw=2.0)
fig.show()
plt.show()
if SAVE_RESULTS:
import pickle as pickle
filename = "flexible_swinging_pendulum.dat"
with open(filename, "wb") as file:
pickle.dump(recorded_history, file)
| """ Flexible swinging pendulum test-case
isort:skip_file
"""
# FIXME without appending sys.path make it more generic
import sys
sys.path.append("../../") # isort:skip
# from collections import defaultdict
# import numpy as np
from matplotlib import pyplot as plt
from elastica import *
class SwingingFlexiblePendulumSimulator(
BaseSystemCollection, Constraints, Forcing, CallBacks
):
pass
# Options
PLOT_FIGURE = False
PLOT_VIDEO = False
SAVE_FIGURE = False
SAVE_RESULTS = True
# For 10 elements, the prefac is 0.0007
pendulum_sim = SwingingFlexiblePendulumSimulator()
final_time = 1.0 if SAVE_RESULTS else 5.0
# setting up test params
n_elem = 10 if SAVE_RESULTS else 50
start = np.zeros((3,))
direction = np.array([0.0, 0.0, 1.0])
normal = np.array([1.0, 0.0, 0.0])
base_length = 1.0
base_radius = 0.005
base_area = np.pi * base_radius ** 2
density = 1100.0
nu = 0.0
youngs_modulus = 5e6
# For shear modulus of 1e4, nu is 99!
poisson_ratio = 0.5
pendulum_rod = CosseratRod.straight_rod(
n_elem,
start,
direction,
normal,
base_length,
base_radius,
density,
nu,
youngs_modulus,
poisson_ratio,
)
pendulum_sim.append(pendulum_rod)
# Bad name : whats a FreeRod anyway?
class HingeBC(ConstraintBase):
"""
the end of the rod fixed x[0]
"""
def __init__(self, fixed_position, fixed_directors):
ConstraintBase.__init__(self, fixed_position, fixed_directors)
def constrain_values(self, rod, time):
rod.position_collection[..., 0] = self.fixed_position
def constrain_rates(self, rod, time):
rod.velocity_collection[..., 0] = 0.0
pendulum_sim.constrain(pendulum_rod).using(
HingeBC, constrained_position_idx=(0,), constrained_director_idx=(0,)
)
# Add gravitational forces
gravitational_acc = -9.80665
pendulum_sim.add_forcing_to(pendulum_rod).using(
GravityForces, acc_gravity=np.array([gravitational_acc, 0.0, 0.0])
)
# Add call backs
class PendulumCallBack(CallBackBaseClass):
"""
Call back function for continuum snake
"""
def __init__(self, step_skip: int, callback_params: dict):
CallBackBaseClass.__init__(self)
self.every = step_skip
self.callback_params = callback_params
def make_callback(self, system, time, current_step: int):
if current_step % self.every == 0:
self.callback_params["time"].append(time)
self.callback_params["position"].append(system.position_collection.copy())
self.callback_params["directors"].append(system.director_collection.copy())
if time > 0.0:
self.callback_params["internal_stress"].append(
system.internal_stress.copy()
)
self.callback_params["internal_couple"].append(
system.internal_couple.copy()
)
return
dl = base_length / n_elem
dt = (0.0007 if SAVE_RESULTS else 0.002) * dl
total_steps = int(final_time / dt)
print("Total steps", total_steps)
recorded_history = defaultdict(list)
step_skip = (
60
if PLOT_VIDEO
else (int(total_steps / 10) if PLOT_FIGURE else int(total_steps / 200))
)
pendulum_sim.collect_diagnostics(pendulum_rod).using(
PendulumCallBack, step_skip=step_skip, callback_params=recorded_history
)
pendulum_sim.finalize()
timestepper = PositionVerlet()
# timestepper = PEFRL()
integrate(timestepper, pendulum_sim, final_time, total_steps)
if PLOT_VIDEO:
def plot_video(
plot_params: dict,
video_name="video.mp4",
margin=0.2,
fps=60,
step=1,
*args,
**kwargs
): # (time step, x/y/z, node)
import matplotlib.animation as manimation
plt.rcParams.update({"font.size": 22})
# Should give a (n_time, 3, n_elem) array
positions = np.array(plot_params["position"])
print("plot video")
FFMpegWriter = manimation.writers["ffmpeg"]
metadata = dict(
title="Movie Test", artist="Matplotlib", comment="Movie support!"
)
writer = FFMpegWriter(fps=fps, metadata=metadata)
dpi = 300
fig = plt.figure(figsize=(10, 8), frameon=True, dpi=dpi)
ax = fig.add_subplot(111)
ax.set_aspect("equal", adjustable="box")
# plt.axis("square")
i = 0
(rod_line,) = ax.plot(positions[i, 2], positions[i, 0], lw=3.0)
(tip_line,) = ax.plot(positions[:i, 2, -1], positions[:i, 0, -1], "k--")
ax.set_aspect("equal", adjustable="box")
ax.set_xlim([-1.0 - margin, 1.0 + margin])
ax.set_ylim([-1.0 - margin, 0.0 + margin])
with writer.saving(fig, video_name, dpi):
with plt.style.context("seaborn-white"):
for i in range(0, positions.shape[0], int(step)):
rod_line.set_xdata(positions[i, 2])
rod_line.set_ydata(positions[i, 0])
tip_line.set_xdata(positions[:i, 2, -1])
tip_line.set_ydata(positions[:i, 0, -1])
writer.grab_frame()
plot_video(recorded_history, "swinging_flexible_pendulum.mp4")
if PLOT_FIGURE:
fig = plt.figure(figsize=(10, 8), frameon=True, dpi=150)
ax = fig.add_subplot(111)
ax.set_aspect("equal", adjustable="box")
# Should give a (n_time, 3, n_elem) array
positions = np.array(recorded_history["position"])
for i in range(positions.shape[0]):
ax.plot(positions[i, 2], positions[i, 0], lw=2.0)
fig.show()
plt.show()
if SAVE_RESULTS:
import pickle as pickle
filename = "flexible_swinging_pendulum.dat"
with open(filename, "wb") as file:
pickle.dump(recorded_history, file)
| en | 0.567295 | Flexible swinging pendulum test-case isort:skip_file # FIXME without appending sys.path make it more generic # isort:skip # from collections import defaultdict # import numpy as np # Options # For 10 elements, the prefac is 0.0007 # setting up test params # For shear modulus of 1e4, nu is 99! # Bad name : whats a FreeRod anyway? the end of the rod fixed x[0] # Add gravitational forces # Add call backs Call back function for continuum snake # timestepper = PEFRL() # (time step, x/y/z, node) # Should give a (n_time, 3, n_elem) array # plt.axis("square") # Should give a (n_time, 3, n_elem) array | 2.568344 | 3 |
Leetcode/0354. Russian Doll Envelopes/0354.py | Next-Gen-UI/Code-Dynamics | 0 | 6624491 | <filename>Leetcode/0354. Russian Doll Envelopes/0354.py
class Solution:
def maxEnvelopes(self, envelopes: List[List[int]]) -> int:
envelopes.sort(key=lambda x: (x[0], -x[1]))
# same as 300. Longest Increasing Subsequence
ans = 0
dp = [0] * len(envelopes)
for _, h in envelopes:
l = 0
r = ans
while l < r:
m = (l + r) // 2
if dp[m] >= h:
r = m
else:
l = m + 1
dp[l] = h
if l == ans:
ans += 1
return ans
| <filename>Leetcode/0354. Russian Doll Envelopes/0354.py
class Solution:
def maxEnvelopes(self, envelopes: List[List[int]]) -> int:
envelopes.sort(key=lambda x: (x[0], -x[1]))
# same as 300. Longest Increasing Subsequence
ans = 0
dp = [0] * len(envelopes)
for _, h in envelopes:
l = 0
r = ans
while l < r:
m = (l + r) // 2
if dp[m] >= h:
r = m
else:
l = m + 1
dp[l] = h
if l == ans:
ans += 1
return ans
| en | 0.958281 | # same as 300. Longest Increasing Subsequence | 3.284009 | 3 |
maskrcnn_benchmark/utils/metric_logger.py | chenzhutian/auto-infog-timeline | 10 | 6624492 | <filename>maskrcnn_benchmark/utils/metric_logger.py<gh_stars>1-10
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from collections import defaultdict
from collections import deque
import torch
class SmoothedValue(object):
"""Track a series of values and provide access to smoothed values over a
window or the global series average.
"""
def __init__(self, window_size=20):
self.deque = deque(maxlen=window_size)
self.series = []
self.total = 0.0
self.count = 0
def update(self, value):
self.deque.append(value)
self.series.append(value)
self.count += 1
self.total += value
@property
def median(self):
d = torch.tensor(list(self.deque))
return d.median().item()
@property
def avg(self):
d = torch.tensor(list(self.deque))
return d.mean().item()
@property
def global_avg(self):
return self.total / self.count
class MetricLogger(object):
def __init__(self, delimiter="\t"):
self.meters = defaultdict(SmoothedValue)
self.delimiter = delimiter
def update(self, **kwargs):
for k, v in kwargs.items():
if isinstance(v, torch.Tensor):
v = v.item()
assert isinstance(v, (float, int))
self.meters[k].update(v)
def __getattr__(self, attr):
if attr in self.meters:
return self.meters[attr]
if attr in self.__dict__:
return self.__dict__[attr]
raise AttributeError("'{}' object has no attribute '{}'".format(
type(self).__name__, attr))
def __str__(self):
loss_str = []
for name, meter in self.meters.items():
loss_str.append(
"{}: {:.4f} ({:.4f})".format(name, meter.median, meter.global_avg)
)
return self.delimiter.join(loss_str)
| <filename>maskrcnn_benchmark/utils/metric_logger.py<gh_stars>1-10
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from collections import defaultdict
from collections import deque
import torch
class SmoothedValue(object):
"""Track a series of values and provide access to smoothed values over a
window or the global series average.
"""
def __init__(self, window_size=20):
self.deque = deque(maxlen=window_size)
self.series = []
self.total = 0.0
self.count = 0
def update(self, value):
self.deque.append(value)
self.series.append(value)
self.count += 1
self.total += value
@property
def median(self):
d = torch.tensor(list(self.deque))
return d.median().item()
@property
def avg(self):
d = torch.tensor(list(self.deque))
return d.mean().item()
@property
def global_avg(self):
return self.total / self.count
class MetricLogger(object):
def __init__(self, delimiter="\t"):
self.meters = defaultdict(SmoothedValue)
self.delimiter = delimiter
def update(self, **kwargs):
for k, v in kwargs.items():
if isinstance(v, torch.Tensor):
v = v.item()
assert isinstance(v, (float, int))
self.meters[k].update(v)
def __getattr__(self, attr):
if attr in self.meters:
return self.meters[attr]
if attr in self.__dict__:
return self.__dict__[attr]
raise AttributeError("'{}' object has no attribute '{}'".format(
type(self).__name__, attr))
def __str__(self):
loss_str = []
for name, meter in self.meters.items():
loss_str.append(
"{}: {:.4f} ({:.4f})".format(name, meter.median, meter.global_avg)
)
return self.delimiter.join(loss_str)
| en | 0.865088 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. Track a series of values and provide access to smoothed values over a window or the global series average. | 2.215059 | 2 |
UI/ControlRigUI.py | jazzboysc/SERiggingTools | 4 | 6624493 | <filename>UI/ControlRigUI.py
import maya.OpenMayaMPx as OpenMayaMPx
import maya.OpenMaya as OpenMaya
import maya.OpenMayaAnim as OpenMayaAnim
import maya.mel
import sys
import maya.cmds as cmds
import maya.OpenMayaUI as mui
from PySide2 import QtCore, QtGui, QtWidgets , QtUiTools
import shiboken2
import os
import time
import functools
import cPickle
from ..Character import SECharacter
from ..Base import SERigNaming
from ..Utils import SERigObjectTypeHelper as RigObjectHelper
from ..Rig import SERigBipedLimbComponent
import UIConfig
#"E:/Users/admin/Documents/GitHub/SERiggingTools/UI/LoadRiggingUI.ui"
uiRootFile = os.path.dirname(UIConfig.__file__)
uifile_path = uiRootFile + "/Control2Rig.ui"
def openControlRigWindow():
''' todo: stop open more than one window'''
global ui
ui = loadUI(uifile_path)
ui.show()
def loadUI(uifile_path):
#QtCore.QResource.addSearchPath("E:/Users/admin/Documents/GitHub/SERiggingTools/UI")
uifile = QtCore.QFile(uifile_path)
print(uifile)
uifile.open(QtCore.QFile.ReadOnly)
#QtCore.QResource.registerResource("E:/Users/admin/Documents/GitHub/SERiggingTools/UI/UIResource.qrc")
uiWindow = QtUiTools.QUiLoader().load(uifile)
uifile.close()
uiWindow.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
setControlToButtonMap(uiWindow)
setSelectorCallBack(uiWindow)
setButtonCallback(uiWindow)
setMutiSelectedButtonCallback(uiWindow)
setResetToModelBasePose(uiWindow)
setIKFKShow(uiWindow)
uiWindow.BodyBG.setPixmap(QtGui.QPixmap((uiRootFile +"/ControllUIBG.png")))
uiWindow.RHandBG.setPixmap(QtGui.QPixmap((uiRootFile +"/ControllUIRHandBG.png")))
uiWindow.LHandBG.setPixmap(QtGui.QPixmap((uiRootFile +"/ControllUILHandBG.png")))
uiWindow.FootBG.setPixmap(QtGui.QPixmap((uiRootFile +"/Foot.png")))
refreshCharacterInSelector(uiWindow)
return uiWindow
def setSelectorCallBack(uiWindow):
def selecterChangeCallback(index):
name = uiWindow.characterSelector.currentText()
getAllControllRigByName(name)
uiWindow.characterSelector.currentIndexChanged.connect(selecterChangeCallback)
def refreshCharacterInSelector(uiWindow):
CharacterArray = getCurrentHaveCharacter()
uiWindow.characterSelector.addItem("None")
for x in range(len(CharacterArray)):
uiWindow.characterSelector.addItem(CharacterArray[x])
index = uiWindow.characterSelector.currentText()
def getCurrentHaveCharacter():
cha = RigObjectHelper.listRigCharacters()
return cha
def getAllControllRigByName(charName):
if charName != "None":
characterControls = RigObjectHelper.listRigCharacterControls(charName)
return characterControls
return None
def setControlToButtonMap(uiWindow):
'''{(u'RS_Center', u'RT_Global', 1): u'Global_01_Ctrl', (u'RS_Center', u'RT_Global', 0): u'Main_Ctrl', (u'RS_Center', u'RT_Global', 2): u'Global_02_Ctrl'}'''
# global MainControllToButton
# MainControllToButton = {(u'RS_Center', u'RT_Global', 1): uiWindow.Global_01_Ctrl, (u'RS_Center', u'RT_Global', 0): uiWindow.Main_Ctrl, (u'RS_Center', u'RT_Global', 2): uiWindow.Global_02_Ctrl}
# global ButtonToMainControll
# ButtonToMainControll = {uiWindow.Global_01_Ctrl:(u'RS_Center', u'RT_Global', 1),
# uiWindow.Main_Ctrl:(u'RS_Center', u'RT_Global', 0),
# uiWindow.Global_02_Ctrl:(u'RS_Center', u'RT_Global', 2)}
global ControlToButton
ControlToButton = {
(u'RS_Center', u'RT_SpineFK', 0): uiWindow.FK_C_Spine_0_Ctrl,
(u'RS_Right', u'RT_WristFK', 0): uiWindow.FK_R_Arm2_Ctrl,
(u'RS_Center', u'RT_SpineFK', 1): uiWindow.FK_C_Spine_1_Ctrl,
(u'RS_Right', u'RT_Clavicle', 0): uiWindow.R_Arm_Clav_Rotation_Ctrl,
(u'RS_Center', u'RT_NeckFK', 0): uiWindow.FK_C_Neck_0_Ctrl,
(u'RS_Left', u'RT_ShoulderFK', 0): uiWindow.FK_L_Arm0_Ctrl,
(u'RS_Left', u'RT_LegFK', 1): uiWindow.FK_L_Leg1_Ctrl,
(u'RS_Left', u'RT_LegFK', 0): uiWindow.FK_L_Leg0_Ctrl,
(u'RS_Left', u'RT_LegFK', 3): uiWindow.FK_L_Leg3_Ctrl,
(u'RS_Left', u'RT_LegFK', 2): uiWindow.FK_L_Leg2_Ctrl,
(u'RS_Right', u'RT_LegFK', 3): uiWindow.FK_R_Leg3_Ctrl,
(u'RS_Right', u'RT_LegFK', 2): uiWindow.FK_R_Leg2_Ctrl,
(u'RS_Right', u'RT_LegFK', 1): uiWindow.FK_R_Leg1_Ctrl,
(u'RS_Right', u'RT_LegFK', 0): uiWindow.FK_R_Leg0_Ctrl,
(u'RS_Right', u'RT_ShoulderFK', 0): uiWindow.FK_R_Arm0_Ctrl,
(u'RS_Center', u'RT_SpineUpperBody', 0): uiWindow.C_SpineUpperBody_Ctrl,
(u'RS_Center', u'RT_HeadFK', 0): uiWindow.FK_C_Head_Ctrl,
(u'RS_Center', u'RT_NeckFK', 1): uiWindow.FK_C_Neck_1_Ctrl,
(u'RS_Left', u'RT_ElbowFK', 0): uiWindow.FK_L_Arm1_Ctrl,
(u'RS_Left', u'RT_Clavicle', 0): uiWindow.L_Arm_Clav_Rotation_Ctrl,
(u'RS_Left', u'RT_WristFK', 0): uiWindow.FK_L_Arm2_Ctrl,
(u'RS_Right', u'RT_ElbowFK', 0): uiWindow.FK_R_Arm1_Ctrl,
(u'RS_Left', u'RT_MiddleFK', 2): uiWindow.FK_L_Middle_02_Ctrl,
(u'RS_Right', u'RT_PinkyFK', 2): uiWindow.FK_R_Pinky_02_Ctrl,
(u'RS_Right', u'RT_ThumbFK', 2): uiWindow.FK_R_Thumb_02_Ctrl,
(u'RS_Left', u'RT_PinkyFK', 2): uiWindow.FK_L_Pinky_02_Ctrl,
(u'RS_Left', u'RT_MiddleFK', 1): uiWindow.FK_L_Middle_01_Ctrl,
(u'RS_Right', u'RT_PinkyFK', 3): uiWindow.FK_R_Pinky_03_Ctrl,
(u'RS_Left', u'RT_PinkyFK', 3): uiWindow.FK_L_Pinky_03_Ctrl,
(u'RS_Left', u'RT_MiddleFK', 0): uiWindow.FK_L_Middle_00_Ctrl,
(u'RS_Left', u'RT_ThumbFK', 2): uiWindow.FK_L_Thumb_02_Ctrl,
(u'RS_Left', u'RT_PinkyFK', 1): uiWindow.FK_L_Pinky_01_Ctrl,
(u'RS_Right', u'RT_PinkyFK', 1): uiWindow.FK_R_Pinky_01_Ctrl,
(u'RS_Left', u'RT_ThumbFK', 1): uiWindow.FK_L_Thumb_01_Ctrl,
(u'RS_Left', u'RT_IndexFK', 3): uiWindow.FK_L_Index_03_Ctrl,
(u'RS_Left', u'RT_ThumbFK', 0): uiWindow.FK_L_Thumb_00_Ctrl,
(u'RS_Left', u'RT_RingFK', 0): uiWindow.FK_L_Ring_00_Ctrl,
(u'RS_Left', u'RT_IndexFK', 2): uiWindow.FK_L_Index_02_Ctrl,
(u'RS_Right', u'RT_MiddleFK', 0): uiWindow.FK_R_Middle_00_Ctrl,
(u'RS_Left', u'RT_RingFK', 1): uiWindow.FK_L_Ring_01_Ctrl,
(u'RS_Left', u'RT_IndexFK', 1): uiWindow.FK_L_Index_01_Ctrl,
(u'RS_Left', u'RT_RingFK', 3): uiWindow.FK_L_Ring_03_Ctrl,
(u'RS_Right', u'RT_MiddleFK', 3): uiWindow.FK_R_Middle_03_Ctrl,
(u'RS_Left', u'RT_IndexFK', 0): uiWindow.FK_L_Index_00_Ctrl,
(u'RS_Right', u'RT_MiddleFK', 2): uiWindow.FK_R_Middle_02_Ctrl,
(u'RS_Right', u'RT_ThumbFK', 0): uiWindow.FK_R_Thumb_00_Ctrl,
(u'RS_Left', u'RT_PinkyFK', 0): uiWindow.FK_L_Pinky_00_Ctrl,
(u'RS_Right', u'RT_IndexFK', 3): uiWindow.FK_R_Index_03_Ctrl,
(u'RS_Left', u'RT_RingFK', 2): uiWindow.FK_L_Ring_02_Ctrl,
(u'RS_Right', u'RT_ThumbFK', 1): uiWindow.FK_R_Thumb_01_Ctrl,
(u'RS_Right', u'RT_MiddleFK', 1): uiWindow.FK_R_Middle_01_Ctrl,
(u'RS_Right', u'RT_PinkyFK', 0): uiWindow.FK_R_Pinky_00_Ctrl,
(u'RS_Right', u'RT_RingFK', 0): uiWindow.FK_R_Ring_00_Ctrl,
(u'RS_Right', u'RT_RingFK', 2): uiWindow.FK_R_Ring_02_Ctrl,
(u'RS_Right', u'RT_RingFK', 1): uiWindow.FK_R_Ring_01_Ctrl,
(u'RS_Right', u'RT_IndexFK', 2): uiWindow.FK_R_Index_02_Ctrl,
(u'RS_Right', u'RT_RingFK', 3): uiWindow.FK_R_Ring_03_Ctrl,
(u'RS_Left', u'RT_MiddleFK', 3): uiWindow.FK_L_Middle_03_Ctrl,
(u'RS_Right', u'RT_IndexFK', 0): uiWindow.FK_R_Index_00_Ctrl,
(u'RS_Right', u'RT_IndexFK', 1): uiWindow.FK_R_Index_01_Ctrl,
(u'RS_Right', u'RT_FootToeSwive', 0): uiWindow.R_Leg_ToeSwive_Ctrl,
(u'RS_Center', u'RT_SpinePelvis', 0): uiWindow.C_SpinePelvis_Ctrl,
(u'RS_Right', u'RT_FootRotation', 0): uiWindow.R_Leg_Rotation_Ctrl,
(u'RS_Right', u'RT_FootBaseSwive', 0): uiWindow.R_Leg_FootBaseSwive_Ctrl,
(u'RS_Left', u'RT_FootBaseSwive', 0): uiWindow.L_Leg_FootBaseSwive_Ctrl,
(u'RS_Right', u'RT_FootIKMain', 0): uiWindow.R_Leg_IK_Main_Ctrl,
(u'RS_Left', u'RT_LegPV', 0): uiWindow.L_Leg_PV_Ctrl,
(u'RS_Left', u'RT_FootRotation', 0): uiWindow.L_Leg_Rotation_Ctrl,
(u'RS_Center', u'RT_SpineChest', 0): uiWindow.C_SpineChest_Ctrl,
(u'RS_Left', u'RT_ArmIKMain', 0): uiWindow.L_Arm_IK_Main_Ctrl,
(u'RS_Left', u'RT_FootToeSwive', 0): uiWindow.L_Leg_ToeSwive_Ctrl,
(u'RS_Right', u'RT_LegPV', 0): uiWindow.R_Leg_PV_Ctrl,
(u'RS_Right', u'RT_ArmPV', 0): uiWindow.R_Arm_PV_Ctrl,
(u'RS_Right', u'RT_AnkleIKRotation', 0): uiWindow.R_Leg_IK_Rotation_Ctrl,
(u'RS_Left', u'RT_AnkleIKRotation', 0): uiWindow.L_Leg_IK_Rotation_Ctrl,
(u'RS_Right', u'RT_ArmIKMain', 0): uiWindow.R_Arm_IK_Main_Ctrl,
(u'RS_Left', u'RT_ArmPV', 0): uiWindow.L_Arm_PV_Ctrl,
(u'RS_Left', u'RT_FootIKMain', 0): uiWindow.L_Leg_IK_Main_Ctrl,
}
global ButtonToControl
ButtonToControl = {
uiWindow.FK_C_Neck_0_Ctrl:(u'RS_Center', u'RT_NeckFK', 0),
uiWindow.FK_C_Neck_1_Ctrl:(u'RS_Center', u'RT_NeckFK', 1),
uiWindow.FK_C_Head_Ctrl:(u'RS_Center', u'RT_HeadFK', 0),
uiWindow.FK_C_Spine_1_Ctrl:(u'RS_Center', u'RT_SpineFK', 1),
uiWindow.FK_C_Spine_0_Ctrl:(u'RS_Center', u'RT_SpineFK', 0),
uiWindow.C_SpineUpperBody_Ctrl:(u'RS_Center', u'RT_SpineUpperBody', 0),
uiWindow.FK_L_Leg0_Ctrl:(u'RS_Left', u'RT_LegFK', 0),
uiWindow.FK_L_Leg1_Ctrl :(u'RS_Left', u'RT_LegFK', 1),
uiWindow.FK_L_Leg2_Ctrl :(u'RS_Left', u'RT_LegFK', 2),
uiWindow.FK_L_Leg3_Ctrl:(u'RS_Left', u'RT_LegFK', 3),
uiWindow.FK_R_Leg0_Ctrl:(u'RS_Right', u'RT_LegFK', 0),
uiWindow.FK_R_Leg1_Ctrl:(u'RS_Right', u'RT_LegFK', 1),
uiWindow.FK_R_Leg2_Ctrl:(u'RS_Right', u'RT_LegFK', 2),
uiWindow.FK_R_Leg3_Ctrl:(u'RS_Right', u'RT_LegFK', 3),
uiWindow.FK_R_Arm1_Ctrl:(u'RS_Right', u'RT_ElbowFK', 0),
uiWindow.FK_L_Arm2_Ctrl:(u'RS_Left', u'RT_WristFK', 0),
uiWindow.FK_L_Arm1_Ctrl:(u'RS_Left', u'RT_ElbowFK', 0),
uiWindow.FK_R_Arm2_Ctrl:(u'RS_Right', u'RT_WristFK', 0),
uiWindow.FK_R_Arm0_Ctrl:(u'RS_Right', u'RT_ShoulderFK', 0),
uiWindow.FK_L_Arm0_Ctrl:(u'RS_Left', u'RT_ShoulderFK', 0),
uiWindow.L_Arm_Clav_Rotation_Ctrl:(u'RS_Left', u'RT_Clavicle', 0),
uiWindow.R_Arm_Clav_Rotation_Ctrl:(u'RS_Right', u'RT_Clavicle', 0),
uiWindow.FK_L_Thumb_00_Ctrl:(u'RS_Left', u'RT_ThumbFK', 0),
uiWindow.FK_L_Thumb_01_Ctrl:(u'RS_Left', u'RT_ThumbFK', 1),
uiWindow.FK_L_Thumb_02_Ctrl:(u'RS_Left', u'RT_ThumbFK', 2),
uiWindow.FK_L_Index_03_Ctrl:(u'RS_Left', u'RT_IndexFK', 3),
uiWindow.FK_L_Index_02_Ctrl:(u'RS_Left', u'RT_IndexFK', 2),
uiWindow.FK_L_Index_01_Ctrl:(u'RS_Left', u'RT_IndexFK', 1),
uiWindow.FK_L_Index_00_Ctrl:(u'RS_Left', u'RT_IndexFK', 0),
uiWindow.FK_L_Middle_02_Ctrl:(u'RS_Left', u'RT_MiddleFK', 2),
uiWindow.FK_L_Middle_01_Ctrl:(u'RS_Left', u'RT_MiddleFK', 1),
uiWindow.FK_L_Middle_00_Ctrl:(u'RS_Left', u'RT_MiddleFK', 0),
uiWindow.FK_L_Middle_03_Ctrl:(u'RS_Left', u'RT_MiddleFK', 3),
uiWindow.FK_L_Ring_00_Ctrl:(u'RS_Left', u'RT_RingFK', 0),
uiWindow.FK_L_Ring_01_Ctrl:(u'RS_Left', u'RT_RingFK', 1),
uiWindow.FK_L_Ring_03_Ctrl:(u'RS_Left', u'RT_RingFK', 3),
uiWindow.FK_L_Ring_02_Ctrl:(u'RS_Left', u'RT_RingFK', 2),
uiWindow.FK_R_Thumb_02_Ctrl:(u'RS_Right', u'RT_ThumbFK', 2),
uiWindow.FK_R_Thumb_00_Ctrl:(u'RS_Right', u'RT_ThumbFK', 0),
uiWindow.FK_R_Thumb_01_Ctrl:(u'RS_Right', u'RT_ThumbFK', 1),
uiWindow.FK_R_Index_03_Ctrl:(u'RS_Right', u'RT_IndexFK', 3),
uiWindow.FK_R_Index_02_Ctrl:(u'RS_Right', u'RT_IndexFK', 2),
uiWindow.FK_R_Index_00_Ctrl:(u'RS_Right', u'RT_IndexFK', 0),
uiWindow.FK_R_Index_01_Ctrl:(u'RS_Right', u'RT_IndexFK', 1),
uiWindow.FK_R_Middle_00_Ctrl:(u'RS_Right', u'RT_MiddleFK', 0),
uiWindow.FK_R_Middle_03_Ctrl:(u'RS_Right', u'RT_MiddleFK', 3),
uiWindow.FK_R_Middle_02_Ctrl:(u'RS_Right', u'RT_MiddleFK', 2),
uiWindow.FK_R_Middle_01_Ctrl:(u'RS_Right', u'RT_MiddleFK', 1),
uiWindow.FK_R_Ring_00_Ctrl:(u'RS_Right', u'RT_RingFK', 0),
uiWindow.FK_R_Ring_02_Ctrl:(u'RS_Right', u'RT_RingFK', 2),
uiWindow.FK_R_Ring_01_Ctrl:(u'RS_Right', u'RT_RingFK', 1),
uiWindow.FK_R_Ring_03_Ctrl:(u'RS_Right', u'RT_RingFK', 3),
uiWindow.FK_R_Pinky_02_Ctrl:(u'RS_Right', u'RT_PinkyFK', 2),
uiWindow.FK_R_Pinky_03_Ctrl:(u'RS_Right', u'RT_PinkyFK', 3),
uiWindow.FK_R_Pinky_01_Ctrl:(u'RS_Right', u'RT_PinkyFK', 1),
uiWindow.FK_R_Pinky_00_Ctrl:(u'RS_Right', u'RT_PinkyFK', 0),
uiWindow.FK_L_Pinky_02_Ctrl:(u'RS_Left', u'RT_PinkyFK', 2),
uiWindow.FK_L_Pinky_03_Ctrl:(u'RS_Left', u'RT_PinkyFK', 3),
uiWindow.FK_L_Pinky_01_Ctrl:(u'RS_Left', u'RT_PinkyFK', 1),
uiWindow.FK_L_Pinky_00_Ctrl:(u'RS_Left', u'RT_PinkyFK', 0),
uiWindow.L_Leg_PV_Ctrl:(u'RS_Left', u'RT_LegPV', 0),
uiWindow.R_Leg_PV_Ctrl:(u'RS_Right', u'RT_LegPV', 0),
uiWindow.R_Arm_PV_Ctrl:(u'RS_Right', u'RT_ArmPV', 0),
uiWindow.L_Arm_PV_Ctrl:(u'RS_Left', u'RT_ArmPV', 0),
uiWindow.L_Leg_IK_Main_Ctrl:(u'RS_Left', u'RT_FootIKMain', 0),
uiWindow.L_Arm_IK_Main_Ctrl:(u'RS_Left', u'RT_ArmIKMain', 0),
uiWindow.R_Leg_IK_Main_Ctrl:(u'RS_Right', u'RT_FootIKMain', 0),
uiWindow.R_Arm_IK_Main_Ctrl:(u'RS_Right', u'RT_ArmIKMain', 0),
uiWindow.R_Leg_Rotation_Ctrl:(u'RS_Right', u'RT_FootRotation', 0),
uiWindow.L_Leg_Rotation_Ctrl:(u'RS_Left', u'RT_FootRotation', 0),
uiWindow.R_Leg_ToeSwive_Ctrl:(u'RS_Right', u'RT_FootToeSwive', 0),
uiWindow.L_Leg_ToeSwive_Ctrl:(u'RS_Left', u'RT_FootToeSwive', 0),
uiWindow.R_Leg_FootBaseSwive_Ctrl:(u'RS_Right', u'RT_FootBaseSwive', 0),
uiWindow.L_Leg_FootBaseSwive_Ctrl:(u'RS_Left', u'RT_FootBaseSwive', 0),
uiWindow.R_Leg_IK_Rotation_Ctrl:(u'RS_Right', u'RT_AnkleIKRotation', 0),
uiWindow.L_Leg_IK_Rotation_Ctrl:(u'RS_Left', u'RT_AnkleIKRotation', 0),
uiWindow.C_SpinePelvis_Ctrl:(u'RS_Center', u'RT_SpinePelvis', 0),
uiWindow.C_SpineChest_Ctrl:(u'RS_Center', u'RT_SpineChest', 0),
}
global MultiVerticalButtonToControl
MultiVerticalButtonToControl = {
uiWindow.FK_L_Pinky_Ctrl:[(u'RS_Left', u'RT_PinkyFK', 0),(u'RS_Left', u'RT_PinkyFK', 1),(u'RS_Left', u'RT_PinkyFK', 2),(u'RS_Left', u'RT_PinkyFK', 3)],
uiWindow.FK_R_Pinky_Ctrl:[(u'RS_Right', u'RT_PinkyFK', 0),(u'RS_Right', u'RT_PinkyFK', 1),(u'RS_Right', u'RT_PinkyFK', 2),(u'RS_Right', u'RT_PinkyFK', 3)],
uiWindow.FK_R_Ring_Ctrl:[(u'RS_Right', u'RT_RingFK', 0),(u'RS_Right', u'RT_RingFK', 1),(u'RS_Right', u'RT_RingFK', 2),(u'RS_Right', u'RT_RingFK', 3)],
uiWindow.FK_L_Ring_Ctrl:[(u'RS_Left', u'RT_RingFK', 0),(u'RS_Left', u'RT_RingFK', 1),(u'RS_Left', u'RT_RingFK', 2),(u'RS_Left', u'RT_RingFK', 3)],
uiWindow.FK_R_Index_Ctrl:[(u'RS_Right', u'RT_IndexFK', 0),(u'RS_Right', u'RT_IndexFK', 1),(u'RS_Right', u'RT_IndexFK', 2),(u'RS_Right', u'RT_IndexFK', 3)],
uiWindow.FK_L_Index_Ctrl:[(u'RS_Left', u'RT_IndexFK', 0),(u'RS_Left', u'RT_IndexFK', 1),(u'RS_Left', u'RT_IndexFK', 2),(u'RS_Left', u'RT_IndexFK', 3)],
uiWindow.FK_R_Middle_Ctrl:[(u'RS_Right', u'RT_MiddleFK', 0),(u'RS_Right', u'RT_MiddleFK', 1),(u'RS_Right', u'RT_MiddleFK', 2),(u'RS_Right', u'RT_MiddleFK', 3)],
uiWindow.FK_L_Middle_Ctrl:[(u'RS_Left', u'RT_MiddleFK', 0),(u'RS_Left', u'RT_MiddleFK', 1),(u'RS_Left', u'RT_MiddleFK', 2),(u'RS_Left', u'RT_MiddleFK', 3)],
uiWindow.FK_L_Finger_00_Ctrl:[(u'RS_Left', u'RT_IndexFK', 0),(u'RS_Left', u'RT_MiddleFK', 0),(u'RS_Left', u'RT_RingFK', 0),(u'RS_Left', u'RT_PinkyFK', 0)],
uiWindow.FK_L_Finger_01_Ctrl:[(u'RS_Left', u'RT_IndexFK', 1),(u'RS_Left', u'RT_MiddleFK', 1),(u'RS_Left', u'RT_RingFK', 1),(u'RS_Left', u'RT_PinkyFK', 1)],
uiWindow.FK_L_Finger_02_Ctrl:[(u'RS_Left', u'RT_IndexFK', 2),(u'RS_Left', u'RT_MiddleFK', 2),(u'RS_Left', u'RT_RingFK', 2),(u'RS_Left', u'RT_PinkyFK', 2)],
uiWindow.FK_L_Finger_03_Ctrl:[(u'RS_Left', u'RT_IndexFK', 3),(u'RS_Left', u'RT_MiddleFK', 3),(u'RS_Left', u'RT_RingFK', 3),(u'RS_Left', u'RT_PinkyFK', 3)],
uiWindow.FK_R_Finger_00_Ctrl:[(u'RS_Right', u'RT_IndexFK', 0),(u'RS_Right', u'RT_MiddleFK', 0),(u'RS_Right', u'RT_RingFK', 0),(u'RS_Right', u'RT_PinkyFK', 0)],
uiWindow.FK_R_Finger_01_Ctrl:[(u'RS_Right', u'RT_IndexFK', 1),(u'RS_Right', u'RT_MiddleFK', 1),(u'RS_Right', u'RT_RingFK', 1),(u'RS_Right', u'RT_PinkyFK', 1)],
uiWindow.FK_R_Finger_02_Ctrl:[(u'RS_Right', u'RT_IndexFK', 2),(u'RS_Right', u'RT_MiddleFK', 2),(u'RS_Right', u'RT_RingFK', 2),(u'RS_Right', u'RT_PinkyFK', 2)],
uiWindow.FK_R_Finger_03_Ctrl:[(u'RS_Right', u'RT_IndexFK', 3),(u'RS_Right', u'RT_MiddleFK', 3),(u'RS_Right', u'RT_RingFK', 3),(u'RS_Right', u'RT_PinkyFK', 3)],
uiWindow.FK_R_FourFinger_Ctrl:[#(u'RS_Right', u'RT_IndexFK', 0),(u'RS_Right', u'RT_MiddleFK', 0),(u'RS_Right', u'RT_RingFK', 0),(u'RS_Right', u'RT_PinkyFK', 0),
(u'RS_Right', u'RT_IndexFK', 1),(u'RS_Right', u'RT_MiddleFK', 1),(u'RS_Right', u'RT_RingFK', 1),(u'RS_Right', u'RT_PinkyFK', 1),
(u'RS_Right', u'RT_IndexFK', 2),(u'RS_Right', u'RT_MiddleFK', 2),(u'RS_Right', u'RT_RingFK', 2),(u'RS_Right', u'RT_PinkyFK', 2),
(u'RS_Right', u'RT_IndexFK', 3),(u'RS_Right', u'RT_MiddleFK', 3),(u'RS_Right', u'RT_RingFK', 3),(u'RS_Right', u'RT_PinkyFK', 3)],
uiWindow.FK_L_FourFinger_Ctrl:[#(u'RS_Left', u'RT_IndexFK', 0),(u'RS_Left', u'RT_MiddleFK', 0),(u'RS_Left', u'RT_RingFK', 0),(u'RS_Left', u'RT_PinkyFK', 0),
(u'RS_Left', u'RT_IndexFK', 1),(u'RS_Left', u'RT_MiddleFK', 1),(u'RS_Left', u'RT_RingFK', 1),(u'RS_Left', u'RT_PinkyFK', 1),
(u'RS_Left', u'RT_IndexFK', 2),(u'RS_Left', u'RT_MiddleFK', 2),(u'RS_Left', u'RT_RingFK', 2),(u'RS_Left', u'RT_PinkyFK', 2),
(u'RS_Left', u'RT_IndexFK', 3),(u'RS_Left', u'RT_MiddleFK', 3),(u'RS_Left', u'RT_RingFK', 3),(u'RS_Left', u'RT_PinkyFK', 3)],
}
def getCurrentSelecterName(uiWindow):
name = uiWindow.characterSelector.currentText()
if name == "None":
#cmds.confirmDialog(title = "Wrong Character", icon = "critical", message = "Please select a vaild Character name in comboBox" )
return name
return name
# def selectControl(CurrWidget):
# data = ButtonToControl[CurrWidget]
# name = getCurrentSelecterName(uiWindow)
# currentRig = RigObjectHelper.getRigControlObject(name, data[0], data[1], data[2])
# cmds.select(currentRig)
# uiWindow.FK_C_Neck_0_Ctrl.clicked.connect(lambda *arg:selectControl(uiWindow.FK_C_Neck_0_Ctrl))
# uiWindow.FK_C_Neck_1_Ctrl.clicked.connect(lambda *arg:selectControl(uiWindow.FK_C_Neck_1_Ctrl))
# for key in ButtonToControl:
# def function(*arg):
# print arg
# print key
# keyy = key
# lambda *arg:selectControl(keyy)
# # selectControl(key)
# # key.clicked.connect(lambda *arg:selectControl(key))
# key.clicked.connect(function)
# print key
# for key, value in ButtonToControl.items():
# print (key, ' value : ', value)
# valuee = value[2]
# aaa = callbackobj(key)
# print aaa.key
# def function(*arg):
# # print key , valuee
# print aaa.key
# lambda *arg:selectControl(aaa.key)
# key.clicked.connect(lambda *arg:selectControl(aaa.key))
def setMutiSelectedButtonCallback(uiWindow):
for key in MultiVerticalButtonToControl:
callback = multiCallbackobj(key , uiWindow)
key.clicked.connect(functools.partial(callback.functor, callback))
def setButtonCallback(uiWindow):
for key in ButtonToControl:
callback = callbackobj(key , uiWindow)
key.clicked.connect(functools.partial(callback.functor, callback))
def setResetToModelBasePose(uiWindow):
print "setResetToModelBasePose"
def resetToBasePoseCallback():
print resetToBasePoseCallback
name = uiWindow.characterSelector.currentText()
controllrigs = getAllControllRigByName(name)
for key , rig in controllrigs[0].items():
RigObjectHelper.setOneRigRotAndTrans(rig ,0,0,0,0,0,0)
uiWindow.ResetPoseButton.clicked.connect(resetToBasePoseCallback)
def setIKFKShow(uiWindow):
def modifyValue(SliderValue):
return SliderValue / 10.0
def setLeftLegIKFK(SliderValue):
RigObjectHelper.hideCharacterIKFKByName(uiWindow.characterSelector.currentText() , modifyValue(SliderValue) , SERigNaming.sLeftLegIKFKSwitch)
def setLeftArmIKFK(SliderValue):
RigObjectHelper.hideCharacterIKFKByName(uiWindow.characterSelector.currentText() , modifyValue(SliderValue) , SERigNaming.sLeftArmIKFKSwitch)
def setRightLegIKFK(SliderValue):
RigObjectHelper.hideCharacterIKFKByName(uiWindow.characterSelector.currentText() , modifyValue(SliderValue) , SERigNaming.sRightLegIKFKSwitch)
def setRightArmIKFK(SliderValue):
RigObjectHelper.hideCharacterIKFKByName(uiWindow.characterSelector.currentText() , modifyValue(SliderValue) , SERigNaming.sRightArmIKFKSwitch)
uiWindow.IKFKLLeg.valueChanged.connect(setLeftLegIKFK)
uiWindow.IKFKLHand.valueChanged.connect(setLeftArmIKFK)
uiWindow.IKFKRLeg.valueChanged.connect(setRightLegIKFK)
uiWindow.IKFKRHand.valueChanged.connect(setRightArmIKFK)
uiWindow.IKToFKRHandBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncIKToFK('R_Arm_RigComponentsGrp'))
uiWindow.FKToIKRHandBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncFKToIK('R_Arm_RigComponentsGrp'))
uiWindow.IKToFKLHandBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncIKToFK('L_Arm_RigComponentsGrp'))
uiWindow.FKToIKLHandBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncFKToIK('L_Arm_RigComponentsGrp'))
uiWindow.IKToFKRLegBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncIKToFK('R_Leg_RigComponentsGrp'))
uiWindow.FKToIKRLegBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncFKToIK('R_Leg_RigComponentsGrp'))
uiWindow.IKToFKLLegBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncIKToFK('L_Leg_RigComponentsGrp'))
uiWindow.FKToIKLLegBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncFKToIK('L_Leg_RigComponentsGrp'))
class callbackobj():
def __init__(self, key ,uiWindow):
self.key = key
self.uiWindow = uiWindow
def functor(self , *arg):
print self.key
self.selectControl()
def selectControl(self):
data = ButtonToControl[self.key]
name = getCurrentSelecterName(self.uiWindow)
currentRig = RigObjectHelper.getRigControlObject(name, data[0], data[1], data[2])
print currentRig
cmds.select(currentRig)
class multiCallbackobj():
def __init__(self, key , uiWindow):
self.key = key
self.uiWindow = uiWindow
def functor(self , *arg):
# print self.key
self.selectControl()
def selectControl(self):
dataArray = MultiVerticalButtonToControl[self.key]
name = getCurrentSelecterName(self.uiWindow)
cmds.select( clear=True )
for data in dataArray:
currentRig = RigObjectHelper.getRigControlObject(name, data[0], data[1], data[2])
cmds.select(currentRig , add=True)
| <filename>UI/ControlRigUI.py
import maya.OpenMayaMPx as OpenMayaMPx
import maya.OpenMaya as OpenMaya
import maya.OpenMayaAnim as OpenMayaAnim
import maya.mel
import sys
import maya.cmds as cmds
import maya.OpenMayaUI as mui
from PySide2 import QtCore, QtGui, QtWidgets , QtUiTools
import shiboken2
import os
import time
import functools
import cPickle
from ..Character import SECharacter
from ..Base import SERigNaming
from ..Utils import SERigObjectTypeHelper as RigObjectHelper
from ..Rig import SERigBipedLimbComponent
import UIConfig
#"E:/Users/admin/Documents/GitHub/SERiggingTools/UI/LoadRiggingUI.ui"
uiRootFile = os.path.dirname(UIConfig.__file__)
uifile_path = uiRootFile + "/Control2Rig.ui"
def openControlRigWindow():
''' todo: stop open more than one window'''
global ui
ui = loadUI(uifile_path)
ui.show()
def loadUI(uifile_path):
#QtCore.QResource.addSearchPath("E:/Users/admin/Documents/GitHub/SERiggingTools/UI")
uifile = QtCore.QFile(uifile_path)
print(uifile)
uifile.open(QtCore.QFile.ReadOnly)
#QtCore.QResource.registerResource("E:/Users/admin/Documents/GitHub/SERiggingTools/UI/UIResource.qrc")
uiWindow = QtUiTools.QUiLoader().load(uifile)
uifile.close()
uiWindow.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
setControlToButtonMap(uiWindow)
setSelectorCallBack(uiWindow)
setButtonCallback(uiWindow)
setMutiSelectedButtonCallback(uiWindow)
setResetToModelBasePose(uiWindow)
setIKFKShow(uiWindow)
uiWindow.BodyBG.setPixmap(QtGui.QPixmap((uiRootFile +"/ControllUIBG.png")))
uiWindow.RHandBG.setPixmap(QtGui.QPixmap((uiRootFile +"/ControllUIRHandBG.png")))
uiWindow.LHandBG.setPixmap(QtGui.QPixmap((uiRootFile +"/ControllUILHandBG.png")))
uiWindow.FootBG.setPixmap(QtGui.QPixmap((uiRootFile +"/Foot.png")))
refreshCharacterInSelector(uiWindow)
return uiWindow
def setSelectorCallBack(uiWindow):
def selecterChangeCallback(index):
name = uiWindow.characterSelector.currentText()
getAllControllRigByName(name)
uiWindow.characterSelector.currentIndexChanged.connect(selecterChangeCallback)
def refreshCharacterInSelector(uiWindow):
CharacterArray = getCurrentHaveCharacter()
uiWindow.characterSelector.addItem("None")
for x in range(len(CharacterArray)):
uiWindow.characterSelector.addItem(CharacterArray[x])
index = uiWindow.characterSelector.currentText()
def getCurrentHaveCharacter():
cha = RigObjectHelper.listRigCharacters()
return cha
def getAllControllRigByName(charName):
if charName != "None":
characterControls = RigObjectHelper.listRigCharacterControls(charName)
return characterControls
return None
def setControlToButtonMap(uiWindow):
'''{(u'RS_Center', u'RT_Global', 1): u'Global_01_Ctrl', (u'RS_Center', u'RT_Global', 0): u'Main_Ctrl', (u'RS_Center', u'RT_Global', 2): u'Global_02_Ctrl'}'''
# global MainControllToButton
# MainControllToButton = {(u'RS_Center', u'RT_Global', 1): uiWindow.Global_01_Ctrl, (u'RS_Center', u'RT_Global', 0): uiWindow.Main_Ctrl, (u'RS_Center', u'RT_Global', 2): uiWindow.Global_02_Ctrl}
# global ButtonToMainControll
# ButtonToMainControll = {uiWindow.Global_01_Ctrl:(u'RS_Center', u'RT_Global', 1),
# uiWindow.Main_Ctrl:(u'RS_Center', u'RT_Global', 0),
# uiWindow.Global_02_Ctrl:(u'RS_Center', u'RT_Global', 2)}
global ControlToButton
ControlToButton = {
(u'RS_Center', u'RT_SpineFK', 0): uiWindow.FK_C_Spine_0_Ctrl,
(u'RS_Right', u'RT_WristFK', 0): uiWindow.FK_R_Arm2_Ctrl,
(u'RS_Center', u'RT_SpineFK', 1): uiWindow.FK_C_Spine_1_Ctrl,
(u'RS_Right', u'RT_Clavicle', 0): uiWindow.R_Arm_Clav_Rotation_Ctrl,
(u'RS_Center', u'RT_NeckFK', 0): uiWindow.FK_C_Neck_0_Ctrl,
(u'RS_Left', u'RT_ShoulderFK', 0): uiWindow.FK_L_Arm0_Ctrl,
(u'RS_Left', u'RT_LegFK', 1): uiWindow.FK_L_Leg1_Ctrl,
(u'RS_Left', u'RT_LegFK', 0): uiWindow.FK_L_Leg0_Ctrl,
(u'RS_Left', u'RT_LegFK', 3): uiWindow.FK_L_Leg3_Ctrl,
(u'RS_Left', u'RT_LegFK', 2): uiWindow.FK_L_Leg2_Ctrl,
(u'RS_Right', u'RT_LegFK', 3): uiWindow.FK_R_Leg3_Ctrl,
(u'RS_Right', u'RT_LegFK', 2): uiWindow.FK_R_Leg2_Ctrl,
(u'RS_Right', u'RT_LegFK', 1): uiWindow.FK_R_Leg1_Ctrl,
(u'RS_Right', u'RT_LegFK', 0): uiWindow.FK_R_Leg0_Ctrl,
(u'RS_Right', u'RT_ShoulderFK', 0): uiWindow.FK_R_Arm0_Ctrl,
(u'RS_Center', u'RT_SpineUpperBody', 0): uiWindow.C_SpineUpperBody_Ctrl,
(u'RS_Center', u'RT_HeadFK', 0): uiWindow.FK_C_Head_Ctrl,
(u'RS_Center', u'RT_NeckFK', 1): uiWindow.FK_C_Neck_1_Ctrl,
(u'RS_Left', u'RT_ElbowFK', 0): uiWindow.FK_L_Arm1_Ctrl,
(u'RS_Left', u'RT_Clavicle', 0): uiWindow.L_Arm_Clav_Rotation_Ctrl,
(u'RS_Left', u'RT_WristFK', 0): uiWindow.FK_L_Arm2_Ctrl,
(u'RS_Right', u'RT_ElbowFK', 0): uiWindow.FK_R_Arm1_Ctrl,
(u'RS_Left', u'RT_MiddleFK', 2): uiWindow.FK_L_Middle_02_Ctrl,
(u'RS_Right', u'RT_PinkyFK', 2): uiWindow.FK_R_Pinky_02_Ctrl,
(u'RS_Right', u'RT_ThumbFK', 2): uiWindow.FK_R_Thumb_02_Ctrl,
(u'RS_Left', u'RT_PinkyFK', 2): uiWindow.FK_L_Pinky_02_Ctrl,
(u'RS_Left', u'RT_MiddleFK', 1): uiWindow.FK_L_Middle_01_Ctrl,
(u'RS_Right', u'RT_PinkyFK', 3): uiWindow.FK_R_Pinky_03_Ctrl,
(u'RS_Left', u'RT_PinkyFK', 3): uiWindow.FK_L_Pinky_03_Ctrl,
(u'RS_Left', u'RT_MiddleFK', 0): uiWindow.FK_L_Middle_00_Ctrl,
(u'RS_Left', u'RT_ThumbFK', 2): uiWindow.FK_L_Thumb_02_Ctrl,
(u'RS_Left', u'RT_PinkyFK', 1): uiWindow.FK_L_Pinky_01_Ctrl,
(u'RS_Right', u'RT_PinkyFK', 1): uiWindow.FK_R_Pinky_01_Ctrl,
(u'RS_Left', u'RT_ThumbFK', 1): uiWindow.FK_L_Thumb_01_Ctrl,
(u'RS_Left', u'RT_IndexFK', 3): uiWindow.FK_L_Index_03_Ctrl,
(u'RS_Left', u'RT_ThumbFK', 0): uiWindow.FK_L_Thumb_00_Ctrl,
(u'RS_Left', u'RT_RingFK', 0): uiWindow.FK_L_Ring_00_Ctrl,
(u'RS_Left', u'RT_IndexFK', 2): uiWindow.FK_L_Index_02_Ctrl,
(u'RS_Right', u'RT_MiddleFK', 0): uiWindow.FK_R_Middle_00_Ctrl,
(u'RS_Left', u'RT_RingFK', 1): uiWindow.FK_L_Ring_01_Ctrl,
(u'RS_Left', u'RT_IndexFK', 1): uiWindow.FK_L_Index_01_Ctrl,
(u'RS_Left', u'RT_RingFK', 3): uiWindow.FK_L_Ring_03_Ctrl,
(u'RS_Right', u'RT_MiddleFK', 3): uiWindow.FK_R_Middle_03_Ctrl,
(u'RS_Left', u'RT_IndexFK', 0): uiWindow.FK_L_Index_00_Ctrl,
(u'RS_Right', u'RT_MiddleFK', 2): uiWindow.FK_R_Middle_02_Ctrl,
(u'RS_Right', u'RT_ThumbFK', 0): uiWindow.FK_R_Thumb_00_Ctrl,
(u'RS_Left', u'RT_PinkyFK', 0): uiWindow.FK_L_Pinky_00_Ctrl,
(u'RS_Right', u'RT_IndexFK', 3): uiWindow.FK_R_Index_03_Ctrl,
(u'RS_Left', u'RT_RingFK', 2): uiWindow.FK_L_Ring_02_Ctrl,
(u'RS_Right', u'RT_ThumbFK', 1): uiWindow.FK_R_Thumb_01_Ctrl,
(u'RS_Right', u'RT_MiddleFK', 1): uiWindow.FK_R_Middle_01_Ctrl,
(u'RS_Right', u'RT_PinkyFK', 0): uiWindow.FK_R_Pinky_00_Ctrl,
(u'RS_Right', u'RT_RingFK', 0): uiWindow.FK_R_Ring_00_Ctrl,
(u'RS_Right', u'RT_RingFK', 2): uiWindow.FK_R_Ring_02_Ctrl,
(u'RS_Right', u'RT_RingFK', 1): uiWindow.FK_R_Ring_01_Ctrl,
(u'RS_Right', u'RT_IndexFK', 2): uiWindow.FK_R_Index_02_Ctrl,
(u'RS_Right', u'RT_RingFK', 3): uiWindow.FK_R_Ring_03_Ctrl,
(u'RS_Left', u'RT_MiddleFK', 3): uiWindow.FK_L_Middle_03_Ctrl,
(u'RS_Right', u'RT_IndexFK', 0): uiWindow.FK_R_Index_00_Ctrl,
(u'RS_Right', u'RT_IndexFK', 1): uiWindow.FK_R_Index_01_Ctrl,
(u'RS_Right', u'RT_FootToeSwive', 0): uiWindow.R_Leg_ToeSwive_Ctrl,
(u'RS_Center', u'RT_SpinePelvis', 0): uiWindow.C_SpinePelvis_Ctrl,
(u'RS_Right', u'RT_FootRotation', 0): uiWindow.R_Leg_Rotation_Ctrl,
(u'RS_Right', u'RT_FootBaseSwive', 0): uiWindow.R_Leg_FootBaseSwive_Ctrl,
(u'RS_Left', u'RT_FootBaseSwive', 0): uiWindow.L_Leg_FootBaseSwive_Ctrl,
(u'RS_Right', u'RT_FootIKMain', 0): uiWindow.R_Leg_IK_Main_Ctrl,
(u'RS_Left', u'RT_LegPV', 0): uiWindow.L_Leg_PV_Ctrl,
(u'RS_Left', u'RT_FootRotation', 0): uiWindow.L_Leg_Rotation_Ctrl,
(u'RS_Center', u'RT_SpineChest', 0): uiWindow.C_SpineChest_Ctrl,
(u'RS_Left', u'RT_ArmIKMain', 0): uiWindow.L_Arm_IK_Main_Ctrl,
(u'RS_Left', u'RT_FootToeSwive', 0): uiWindow.L_Leg_ToeSwive_Ctrl,
(u'RS_Right', u'RT_LegPV', 0): uiWindow.R_Leg_PV_Ctrl,
(u'RS_Right', u'RT_ArmPV', 0): uiWindow.R_Arm_PV_Ctrl,
(u'RS_Right', u'RT_AnkleIKRotation', 0): uiWindow.R_Leg_IK_Rotation_Ctrl,
(u'RS_Left', u'RT_AnkleIKRotation', 0): uiWindow.L_Leg_IK_Rotation_Ctrl,
(u'RS_Right', u'RT_ArmIKMain', 0): uiWindow.R_Arm_IK_Main_Ctrl,
(u'RS_Left', u'RT_ArmPV', 0): uiWindow.L_Arm_PV_Ctrl,
(u'RS_Left', u'RT_FootIKMain', 0): uiWindow.L_Leg_IK_Main_Ctrl,
}
global ButtonToControl
ButtonToControl = {
uiWindow.FK_C_Neck_0_Ctrl:(u'RS_Center', u'RT_NeckFK', 0),
uiWindow.FK_C_Neck_1_Ctrl:(u'RS_Center', u'RT_NeckFK', 1),
uiWindow.FK_C_Head_Ctrl:(u'RS_Center', u'RT_HeadFK', 0),
uiWindow.FK_C_Spine_1_Ctrl:(u'RS_Center', u'RT_SpineFK', 1),
uiWindow.FK_C_Spine_0_Ctrl:(u'RS_Center', u'RT_SpineFK', 0),
uiWindow.C_SpineUpperBody_Ctrl:(u'RS_Center', u'RT_SpineUpperBody', 0),
uiWindow.FK_L_Leg0_Ctrl:(u'RS_Left', u'RT_LegFK', 0),
uiWindow.FK_L_Leg1_Ctrl :(u'RS_Left', u'RT_LegFK', 1),
uiWindow.FK_L_Leg2_Ctrl :(u'RS_Left', u'RT_LegFK', 2),
uiWindow.FK_L_Leg3_Ctrl:(u'RS_Left', u'RT_LegFK', 3),
uiWindow.FK_R_Leg0_Ctrl:(u'RS_Right', u'RT_LegFK', 0),
uiWindow.FK_R_Leg1_Ctrl:(u'RS_Right', u'RT_LegFK', 1),
uiWindow.FK_R_Leg2_Ctrl:(u'RS_Right', u'RT_LegFK', 2),
uiWindow.FK_R_Leg3_Ctrl:(u'RS_Right', u'RT_LegFK', 3),
uiWindow.FK_R_Arm1_Ctrl:(u'RS_Right', u'RT_ElbowFK', 0),
uiWindow.FK_L_Arm2_Ctrl:(u'RS_Left', u'RT_WristFK', 0),
uiWindow.FK_L_Arm1_Ctrl:(u'RS_Left', u'RT_ElbowFK', 0),
uiWindow.FK_R_Arm2_Ctrl:(u'RS_Right', u'RT_WristFK', 0),
uiWindow.FK_R_Arm0_Ctrl:(u'RS_Right', u'RT_ShoulderFK', 0),
uiWindow.FK_L_Arm0_Ctrl:(u'RS_Left', u'RT_ShoulderFK', 0),
uiWindow.L_Arm_Clav_Rotation_Ctrl:(u'RS_Left', u'RT_Clavicle', 0),
uiWindow.R_Arm_Clav_Rotation_Ctrl:(u'RS_Right', u'RT_Clavicle', 0),
uiWindow.FK_L_Thumb_00_Ctrl:(u'RS_Left', u'RT_ThumbFK', 0),
uiWindow.FK_L_Thumb_01_Ctrl:(u'RS_Left', u'RT_ThumbFK', 1),
uiWindow.FK_L_Thumb_02_Ctrl:(u'RS_Left', u'RT_ThumbFK', 2),
uiWindow.FK_L_Index_03_Ctrl:(u'RS_Left', u'RT_IndexFK', 3),
uiWindow.FK_L_Index_02_Ctrl:(u'RS_Left', u'RT_IndexFK', 2),
uiWindow.FK_L_Index_01_Ctrl:(u'RS_Left', u'RT_IndexFK', 1),
uiWindow.FK_L_Index_00_Ctrl:(u'RS_Left', u'RT_IndexFK', 0),
uiWindow.FK_L_Middle_02_Ctrl:(u'RS_Left', u'RT_MiddleFK', 2),
uiWindow.FK_L_Middle_01_Ctrl:(u'RS_Left', u'RT_MiddleFK', 1),
uiWindow.FK_L_Middle_00_Ctrl:(u'RS_Left', u'RT_MiddleFK', 0),
uiWindow.FK_L_Middle_03_Ctrl:(u'RS_Left', u'RT_MiddleFK', 3),
uiWindow.FK_L_Ring_00_Ctrl:(u'RS_Left', u'RT_RingFK', 0),
uiWindow.FK_L_Ring_01_Ctrl:(u'RS_Left', u'RT_RingFK', 1),
uiWindow.FK_L_Ring_03_Ctrl:(u'RS_Left', u'RT_RingFK', 3),
uiWindow.FK_L_Ring_02_Ctrl:(u'RS_Left', u'RT_RingFK', 2),
uiWindow.FK_R_Thumb_02_Ctrl:(u'RS_Right', u'RT_ThumbFK', 2),
uiWindow.FK_R_Thumb_00_Ctrl:(u'RS_Right', u'RT_ThumbFK', 0),
uiWindow.FK_R_Thumb_01_Ctrl:(u'RS_Right', u'RT_ThumbFK', 1),
uiWindow.FK_R_Index_03_Ctrl:(u'RS_Right', u'RT_IndexFK', 3),
uiWindow.FK_R_Index_02_Ctrl:(u'RS_Right', u'RT_IndexFK', 2),
uiWindow.FK_R_Index_00_Ctrl:(u'RS_Right', u'RT_IndexFK', 0),
uiWindow.FK_R_Index_01_Ctrl:(u'RS_Right', u'RT_IndexFK', 1),
uiWindow.FK_R_Middle_00_Ctrl:(u'RS_Right', u'RT_MiddleFK', 0),
uiWindow.FK_R_Middle_03_Ctrl:(u'RS_Right', u'RT_MiddleFK', 3),
uiWindow.FK_R_Middle_02_Ctrl:(u'RS_Right', u'RT_MiddleFK', 2),
uiWindow.FK_R_Middle_01_Ctrl:(u'RS_Right', u'RT_MiddleFK', 1),
uiWindow.FK_R_Ring_00_Ctrl:(u'RS_Right', u'RT_RingFK', 0),
uiWindow.FK_R_Ring_02_Ctrl:(u'RS_Right', u'RT_RingFK', 2),
uiWindow.FK_R_Ring_01_Ctrl:(u'RS_Right', u'RT_RingFK', 1),
uiWindow.FK_R_Ring_03_Ctrl:(u'RS_Right', u'RT_RingFK', 3),
uiWindow.FK_R_Pinky_02_Ctrl:(u'RS_Right', u'RT_PinkyFK', 2),
uiWindow.FK_R_Pinky_03_Ctrl:(u'RS_Right', u'RT_PinkyFK', 3),
uiWindow.FK_R_Pinky_01_Ctrl:(u'RS_Right', u'RT_PinkyFK', 1),
uiWindow.FK_R_Pinky_00_Ctrl:(u'RS_Right', u'RT_PinkyFK', 0),
uiWindow.FK_L_Pinky_02_Ctrl:(u'RS_Left', u'RT_PinkyFK', 2),
uiWindow.FK_L_Pinky_03_Ctrl:(u'RS_Left', u'RT_PinkyFK', 3),
uiWindow.FK_L_Pinky_01_Ctrl:(u'RS_Left', u'RT_PinkyFK', 1),
uiWindow.FK_L_Pinky_00_Ctrl:(u'RS_Left', u'RT_PinkyFK', 0),
uiWindow.L_Leg_PV_Ctrl:(u'RS_Left', u'RT_LegPV', 0),
uiWindow.R_Leg_PV_Ctrl:(u'RS_Right', u'RT_LegPV', 0),
uiWindow.R_Arm_PV_Ctrl:(u'RS_Right', u'RT_ArmPV', 0),
uiWindow.L_Arm_PV_Ctrl:(u'RS_Left', u'RT_ArmPV', 0),
uiWindow.L_Leg_IK_Main_Ctrl:(u'RS_Left', u'RT_FootIKMain', 0),
uiWindow.L_Arm_IK_Main_Ctrl:(u'RS_Left', u'RT_ArmIKMain', 0),
uiWindow.R_Leg_IK_Main_Ctrl:(u'RS_Right', u'RT_FootIKMain', 0),
uiWindow.R_Arm_IK_Main_Ctrl:(u'RS_Right', u'RT_ArmIKMain', 0),
uiWindow.R_Leg_Rotation_Ctrl:(u'RS_Right', u'RT_FootRotation', 0),
uiWindow.L_Leg_Rotation_Ctrl:(u'RS_Left', u'RT_FootRotation', 0),
uiWindow.R_Leg_ToeSwive_Ctrl:(u'RS_Right', u'RT_FootToeSwive', 0),
uiWindow.L_Leg_ToeSwive_Ctrl:(u'RS_Left', u'RT_FootToeSwive', 0),
uiWindow.R_Leg_FootBaseSwive_Ctrl:(u'RS_Right', u'RT_FootBaseSwive', 0),
uiWindow.L_Leg_FootBaseSwive_Ctrl:(u'RS_Left', u'RT_FootBaseSwive', 0),
uiWindow.R_Leg_IK_Rotation_Ctrl:(u'RS_Right', u'RT_AnkleIKRotation', 0),
uiWindow.L_Leg_IK_Rotation_Ctrl:(u'RS_Left', u'RT_AnkleIKRotation', 0),
uiWindow.C_SpinePelvis_Ctrl:(u'RS_Center', u'RT_SpinePelvis', 0),
uiWindow.C_SpineChest_Ctrl:(u'RS_Center', u'RT_SpineChest', 0),
}
global MultiVerticalButtonToControl
MultiVerticalButtonToControl = {
uiWindow.FK_L_Pinky_Ctrl:[(u'RS_Left', u'RT_PinkyFK', 0),(u'RS_Left', u'RT_PinkyFK', 1),(u'RS_Left', u'RT_PinkyFK', 2),(u'RS_Left', u'RT_PinkyFK', 3)],
uiWindow.FK_R_Pinky_Ctrl:[(u'RS_Right', u'RT_PinkyFK', 0),(u'RS_Right', u'RT_PinkyFK', 1),(u'RS_Right', u'RT_PinkyFK', 2),(u'RS_Right', u'RT_PinkyFK', 3)],
uiWindow.FK_R_Ring_Ctrl:[(u'RS_Right', u'RT_RingFK', 0),(u'RS_Right', u'RT_RingFK', 1),(u'RS_Right', u'RT_RingFK', 2),(u'RS_Right', u'RT_RingFK', 3)],
uiWindow.FK_L_Ring_Ctrl:[(u'RS_Left', u'RT_RingFK', 0),(u'RS_Left', u'RT_RingFK', 1),(u'RS_Left', u'RT_RingFK', 2),(u'RS_Left', u'RT_RingFK', 3)],
uiWindow.FK_R_Index_Ctrl:[(u'RS_Right', u'RT_IndexFK', 0),(u'RS_Right', u'RT_IndexFK', 1),(u'RS_Right', u'RT_IndexFK', 2),(u'RS_Right', u'RT_IndexFK', 3)],
uiWindow.FK_L_Index_Ctrl:[(u'RS_Left', u'RT_IndexFK', 0),(u'RS_Left', u'RT_IndexFK', 1),(u'RS_Left', u'RT_IndexFK', 2),(u'RS_Left', u'RT_IndexFK', 3)],
uiWindow.FK_R_Middle_Ctrl:[(u'RS_Right', u'RT_MiddleFK', 0),(u'RS_Right', u'RT_MiddleFK', 1),(u'RS_Right', u'RT_MiddleFK', 2),(u'RS_Right', u'RT_MiddleFK', 3)],
uiWindow.FK_L_Middle_Ctrl:[(u'RS_Left', u'RT_MiddleFK', 0),(u'RS_Left', u'RT_MiddleFK', 1),(u'RS_Left', u'RT_MiddleFK', 2),(u'RS_Left', u'RT_MiddleFK', 3)],
uiWindow.FK_L_Finger_00_Ctrl:[(u'RS_Left', u'RT_IndexFK', 0),(u'RS_Left', u'RT_MiddleFK', 0),(u'RS_Left', u'RT_RingFK', 0),(u'RS_Left', u'RT_PinkyFK', 0)],
uiWindow.FK_L_Finger_01_Ctrl:[(u'RS_Left', u'RT_IndexFK', 1),(u'RS_Left', u'RT_MiddleFK', 1),(u'RS_Left', u'RT_RingFK', 1),(u'RS_Left', u'RT_PinkyFK', 1)],
uiWindow.FK_L_Finger_02_Ctrl:[(u'RS_Left', u'RT_IndexFK', 2),(u'RS_Left', u'RT_MiddleFK', 2),(u'RS_Left', u'RT_RingFK', 2),(u'RS_Left', u'RT_PinkyFK', 2)],
uiWindow.FK_L_Finger_03_Ctrl:[(u'RS_Left', u'RT_IndexFK', 3),(u'RS_Left', u'RT_MiddleFK', 3),(u'RS_Left', u'RT_RingFK', 3),(u'RS_Left', u'RT_PinkyFK', 3)],
uiWindow.FK_R_Finger_00_Ctrl:[(u'RS_Right', u'RT_IndexFK', 0),(u'RS_Right', u'RT_MiddleFK', 0),(u'RS_Right', u'RT_RingFK', 0),(u'RS_Right', u'RT_PinkyFK', 0)],
uiWindow.FK_R_Finger_01_Ctrl:[(u'RS_Right', u'RT_IndexFK', 1),(u'RS_Right', u'RT_MiddleFK', 1),(u'RS_Right', u'RT_RingFK', 1),(u'RS_Right', u'RT_PinkyFK', 1)],
uiWindow.FK_R_Finger_02_Ctrl:[(u'RS_Right', u'RT_IndexFK', 2),(u'RS_Right', u'RT_MiddleFK', 2),(u'RS_Right', u'RT_RingFK', 2),(u'RS_Right', u'RT_PinkyFK', 2)],
uiWindow.FK_R_Finger_03_Ctrl:[(u'RS_Right', u'RT_IndexFK', 3),(u'RS_Right', u'RT_MiddleFK', 3),(u'RS_Right', u'RT_RingFK', 3),(u'RS_Right', u'RT_PinkyFK', 3)],
uiWindow.FK_R_FourFinger_Ctrl:[#(u'RS_Right', u'RT_IndexFK', 0),(u'RS_Right', u'RT_MiddleFK', 0),(u'RS_Right', u'RT_RingFK', 0),(u'RS_Right', u'RT_PinkyFK', 0),
(u'RS_Right', u'RT_IndexFK', 1),(u'RS_Right', u'RT_MiddleFK', 1),(u'RS_Right', u'RT_RingFK', 1),(u'RS_Right', u'RT_PinkyFK', 1),
(u'RS_Right', u'RT_IndexFK', 2),(u'RS_Right', u'RT_MiddleFK', 2),(u'RS_Right', u'RT_RingFK', 2),(u'RS_Right', u'RT_PinkyFK', 2),
(u'RS_Right', u'RT_IndexFK', 3),(u'RS_Right', u'RT_MiddleFK', 3),(u'RS_Right', u'RT_RingFK', 3),(u'RS_Right', u'RT_PinkyFK', 3)],
uiWindow.FK_L_FourFinger_Ctrl:[#(u'RS_Left', u'RT_IndexFK', 0),(u'RS_Left', u'RT_MiddleFK', 0),(u'RS_Left', u'RT_RingFK', 0),(u'RS_Left', u'RT_PinkyFK', 0),
(u'RS_Left', u'RT_IndexFK', 1),(u'RS_Left', u'RT_MiddleFK', 1),(u'RS_Left', u'RT_RingFK', 1),(u'RS_Left', u'RT_PinkyFK', 1),
(u'RS_Left', u'RT_IndexFK', 2),(u'RS_Left', u'RT_MiddleFK', 2),(u'RS_Left', u'RT_RingFK', 2),(u'RS_Left', u'RT_PinkyFK', 2),
(u'RS_Left', u'RT_IndexFK', 3),(u'RS_Left', u'RT_MiddleFK', 3),(u'RS_Left', u'RT_RingFK', 3),(u'RS_Left', u'RT_PinkyFK', 3)],
}
def getCurrentSelecterName(uiWindow):
name = uiWindow.characterSelector.currentText()
if name == "None":
#cmds.confirmDialog(title = "Wrong Character", icon = "critical", message = "Please select a vaild Character name in comboBox" )
return name
return name
# def selectControl(CurrWidget):
# data = ButtonToControl[CurrWidget]
# name = getCurrentSelecterName(uiWindow)
# currentRig = RigObjectHelper.getRigControlObject(name, data[0], data[1], data[2])
# cmds.select(currentRig)
# uiWindow.FK_C_Neck_0_Ctrl.clicked.connect(lambda *arg:selectControl(uiWindow.FK_C_Neck_0_Ctrl))
# uiWindow.FK_C_Neck_1_Ctrl.clicked.connect(lambda *arg:selectControl(uiWindow.FK_C_Neck_1_Ctrl))
# for key in ButtonToControl:
# def function(*arg):
# print arg
# print key
# keyy = key
# lambda *arg:selectControl(keyy)
# # selectControl(key)
# # key.clicked.connect(lambda *arg:selectControl(key))
# key.clicked.connect(function)
# print key
# for key, value in ButtonToControl.items():
# print (key, ' value : ', value)
# valuee = value[2]
# aaa = callbackobj(key)
# print aaa.key
# def function(*arg):
# # print key , valuee
# print aaa.key
# lambda *arg:selectControl(aaa.key)
# key.clicked.connect(lambda *arg:selectControl(aaa.key))
def setMutiSelectedButtonCallback(uiWindow):
for key in MultiVerticalButtonToControl:
callback = multiCallbackobj(key , uiWindow)
key.clicked.connect(functools.partial(callback.functor, callback))
def setButtonCallback(uiWindow):
for key in ButtonToControl:
callback = callbackobj(key , uiWindow)
key.clicked.connect(functools.partial(callback.functor, callback))
def setResetToModelBasePose(uiWindow):
print "setResetToModelBasePose"
def resetToBasePoseCallback():
print resetToBasePoseCallback
name = uiWindow.characterSelector.currentText()
controllrigs = getAllControllRigByName(name)
for key , rig in controllrigs[0].items():
RigObjectHelper.setOneRigRotAndTrans(rig ,0,0,0,0,0,0)
uiWindow.ResetPoseButton.clicked.connect(resetToBasePoseCallback)
def setIKFKShow(uiWindow):
def modifyValue(SliderValue):
return SliderValue / 10.0
def setLeftLegIKFK(SliderValue):
RigObjectHelper.hideCharacterIKFKByName(uiWindow.characterSelector.currentText() , modifyValue(SliderValue) , SERigNaming.sLeftLegIKFKSwitch)
def setLeftArmIKFK(SliderValue):
RigObjectHelper.hideCharacterIKFKByName(uiWindow.characterSelector.currentText() , modifyValue(SliderValue) , SERigNaming.sLeftArmIKFKSwitch)
def setRightLegIKFK(SliderValue):
RigObjectHelper.hideCharacterIKFKByName(uiWindow.characterSelector.currentText() , modifyValue(SliderValue) , SERigNaming.sRightLegIKFKSwitch)
def setRightArmIKFK(SliderValue):
RigObjectHelper.hideCharacterIKFKByName(uiWindow.characterSelector.currentText() , modifyValue(SliderValue) , SERigNaming.sRightArmIKFKSwitch)
uiWindow.IKFKLLeg.valueChanged.connect(setLeftLegIKFK)
uiWindow.IKFKLHand.valueChanged.connect(setLeftArmIKFK)
uiWindow.IKFKRLeg.valueChanged.connect(setRightLegIKFK)
uiWindow.IKFKRHand.valueChanged.connect(setRightArmIKFK)
uiWindow.IKToFKRHandBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncIKToFK('R_Arm_RigComponentsGrp'))
uiWindow.FKToIKRHandBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncFKToIK('R_Arm_RigComponentsGrp'))
uiWindow.IKToFKLHandBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncIKToFK('L_Arm_RigComponentsGrp'))
uiWindow.FKToIKLHandBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncFKToIK('L_Arm_RigComponentsGrp'))
uiWindow.IKToFKRLegBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncIKToFK('R_Leg_RigComponentsGrp'))
uiWindow.FKToIKRLegBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncFKToIK('R_Leg_RigComponentsGrp'))
uiWindow.IKToFKLLegBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncIKToFK('L_Leg_RigComponentsGrp'))
uiWindow.FKToIKLLegBtn.clicked.connect(lambda *arg:SERigBipedLimbComponent.RigHumanLimb.syncFKToIK('L_Leg_RigComponentsGrp'))
class callbackobj():
def __init__(self, key ,uiWindow):
self.key = key
self.uiWindow = uiWindow
def functor(self , *arg):
print self.key
self.selectControl()
def selectControl(self):
data = ButtonToControl[self.key]
name = getCurrentSelecterName(self.uiWindow)
currentRig = RigObjectHelper.getRigControlObject(name, data[0], data[1], data[2])
print currentRig
cmds.select(currentRig)
class multiCallbackobj():
def __init__(self, key , uiWindow):
self.key = key
self.uiWindow = uiWindow
def functor(self , *arg):
# print self.key
self.selectControl()
def selectControl(self):
dataArray = MultiVerticalButtonToControl[self.key]
name = getCurrentSelecterName(self.uiWindow)
cmds.select( clear=True )
for data in dataArray:
currentRig = RigObjectHelper.getRigControlObject(name, data[0], data[1], data[2])
cmds.select(currentRig , add=True)
| en | 0.30625 | #"E:/Users/admin/Documents/GitHub/SERiggingTools/UI/LoadRiggingUI.ui" todo: stop open more than one window #QtCore.QResource.addSearchPath("E:/Users/admin/Documents/GitHub/SERiggingTools/UI") #QtCore.QResource.registerResource("E:/Users/admin/Documents/GitHub/SERiggingTools/UI/UIResource.qrc") {(u'RS_Center', u'RT_Global', 1): u'Global_01_Ctrl', (u'RS_Center', u'RT_Global', 0): u'Main_Ctrl', (u'RS_Center', u'RT_Global', 2): u'Global_02_Ctrl'} # global MainControllToButton # MainControllToButton = {(u'RS_Center', u'RT_Global', 1): uiWindow.Global_01_Ctrl, (u'RS_Center', u'RT_Global', 0): uiWindow.Main_Ctrl, (u'RS_Center', u'RT_Global', 2): uiWindow.Global_02_Ctrl} # global ButtonToMainControll # ButtonToMainControll = {uiWindow.Global_01_Ctrl:(u'RS_Center', u'RT_Global', 1), # uiWindow.Main_Ctrl:(u'RS_Center', u'RT_Global', 0), # uiWindow.Global_02_Ctrl:(u'RS_Center', u'RT_Global', 2)} #(u'RS_Right', u'RT_IndexFK', 0),(u'RS_Right', u'RT_MiddleFK', 0),(u'RS_Right', u'RT_RingFK', 0),(u'RS_Right', u'RT_PinkyFK', 0), #(u'RS_Left', u'RT_IndexFK', 0),(u'RS_Left', u'RT_MiddleFK', 0),(u'RS_Left', u'RT_RingFK', 0),(u'RS_Left', u'RT_PinkyFK', 0), #cmds.confirmDialog(title = "Wrong Character", icon = "critical", message = "Please select a vaild Character name in comboBox" ) # def selectControl(CurrWidget): # data = ButtonToControl[CurrWidget] # name = getCurrentSelecterName(uiWindow) # currentRig = RigObjectHelper.getRigControlObject(name, data[0], data[1], data[2]) # cmds.select(currentRig) # uiWindow.FK_C_Neck_0_Ctrl.clicked.connect(lambda *arg:selectControl(uiWindow.FK_C_Neck_0_Ctrl)) # uiWindow.FK_C_Neck_1_Ctrl.clicked.connect(lambda *arg:selectControl(uiWindow.FK_C_Neck_1_Ctrl)) # for key in ButtonToControl: # def function(*arg): # print arg # print key # keyy = key # lambda *arg:selectControl(keyy) # # selectControl(key) # # key.clicked.connect(lambda *arg:selectControl(key)) # key.clicked.connect(function) # print key # for key, value in ButtonToControl.items(): # print (key, ' value : ', value) # valuee = value[2] # aaa = callbackobj(key) # print aaa.key # def function(*arg): # # print key , valuee # print aaa.key # lambda *arg:selectControl(aaa.key) # key.clicked.connect(lambda *arg:selectControl(aaa.key)) # print self.key | 2.040735 | 2 |
karbor/common/notification.py | thisismsreddy/karbor | 0 | 6624494 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The notification module."""
import abc
import copy
import traceback
from karbor import exception
from karbor.i18n import _
from karbor import rpc
from oslo_config import cfg
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class EndNotification(object):
@property
def _notifier(self):
"""Returns the notification for Karbor API."""
return (self.context.notification)
def __init__(self, context, **kwargs):
self.context = context
self.context.notification.payload.update(kwargs)
def __enter__(self):
return self.context.notification
def __exit__(self, etype, value, tb):
if etype:
message = str(value)
exception = traceback.format_exception(etype, value, tb)
self._notifier.notify_exc_info(message, exception)
else:
self._notifier.notify_end()
class StartNotification(EndNotification):
def __enter__(self):
self.context.notification.notify_start()
return super(StartNotification, self).__enter__()
class KaborAPINotification(object):
"""The traits of karbor.* notifications."""
event_type_format = 'karbor.%s.%s'
notify_callback = None
@classmethod
def register_notify_callback(cls, callback):
"""Callback when a notification is sent out."""
cls.notify_callback = callback
@abc.abstractmethod
def event_type(self):
'Returns the event type (like "create" for karbor.create.start)'
pass
@abc.abstractmethod
def required_start_traits(self):
'Returns list of required traits for start notification'
pass
def optional_start_traits(self):
'Returns list of optional traits for start notification'
return []
def required_end_traits(self):
'Returns list of required traits for end notification'
return []
def optional_end_traits(self):
'Returns list of optional traits for end notification'
return []
def required_error_traits(self):
'Returns list of required traits for error notification'
return ['message', 'exception']
def optional_error_traits(self):
'Returns list of optional traits for error notification'
return ['id']
def required_base_traits(self):
return ['tenant_id', 'client_ip', 'request_id']
@property
def request_id(self):
return self.payload['request_id']
def __init__(self, context, **kwargs):
self.context = context
self.needs_end_notification = True
self.payload = {}
if 'request' in kwargs:
request = kwargs.pop('request')
self.payload.update({
'request_id': context.request_id,
'client_ip': request.remote_addr,
'tenant_id': context.tenant,
})
elif 'request_id' not in kwargs:
raise exception.InvalidInput(
reason="Notification must include 'request' property")
self.payload.update(kwargs)
def serialize(self, context):
return self.payload
def validate(self, required_traits):
required_keys = set(required_traits)
provided_keys = set(self.payload.keys())
if not required_keys.issubset(provided_keys):
msg = (_("The following required keys not defined for"
" notification %(name)s: %(keys)s")
% {'name': self.__class__.__name__,
'keys': list(required_keys - provided_keys)})
raise exception.InvalidInput(reason=msg)
def _notify(self, event_qualifier, required_traits, optional_traits,
**kwargs):
self.payload.update(kwargs)
self.validate(self.required_base_traits() + required_traits)
available_values = self.serialize(self.context)
payload = {k: available_values[k]
for k in self.required_base_traits() + required_traits}
for k in optional_traits:
if k in available_values:
payload[k] = available_values[k]
qualified_event_type = (KaborAPINotification.event_type_format
% (self.event_type(), event_qualifier))
LOG.debug('Sending event: %(event_type)s, %(payload)s',
{'event_type': qualified_event_type, 'payload': payload})
context = copy.copy(self.context)
del context.notification
notifier = rpc.get_notifier()
notifier.info(context, qualified_event_type, self.payload)
if self.notify_callback:
self.notify_callback(event_qualifier)
def notify_start(self, **kwargs):
self._notify('start', self.required_start_traits(),
self.optional_start_traits(), **kwargs)
def notify_end(self, **kwargs):
if self.needs_end_notification:
self._notify('end', self.required_end_traits(),
self.optional_end_traits(), **kwargs)
def notify_exc_info(self, message, exception):
self.payload.update({
'message': message,
'exception': exception
})
self._notify('error', self.required_error_traits(),
self.optional_error_traits())
class KarborPlanCreate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'plan_create'
@abc.abstractmethod
def required_start_traits(self):
return ['name']
def optional_start_traits(self):
return ['parameters']
def required_end_traits(self):
return ['name']
class KarborPlanDelete(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'plan_delete'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
class KarborPlanUpdate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'plan_update'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
class KarborTriggerDelete(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'trigger_delete'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
class KarborTriggerCreate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'trigger_create'
@abc.abstractmethod
def required_start_traits(self):
return ['name']
def optional_start_traits(self):
return ['parameters']
def required_end_traits(self):
return ['name']
class KarborTriggerUpdate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'trigger_update'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
class KarborRestoreDelete(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'restore_delete'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
class KarborRestoreCreate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'restore_create'
@abc.abstractmethod
def required_start_traits(self):
return ['parameters']
def required_end_traits(self):
return ['parameters']
class KarborCheckpointCreate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'checkpoint_create'
@abc.abstractmethod
def required_start_traits(self):
return ['checkpoint_properties']
def required_end_traits(self):
return ['checkpoint_properties']
class KarborCheckpointDelete(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'checkpoint_delete'
@abc.abstractmethod
def required_start_traits(self):
return ['checkpoint_id']
def required_end_traits(self):
return ['checkpoint_id']
class KarborCheckpointUpdate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'checkpoint_update'
@abc.abstractmethod
def required_start_traits(self):
return ['checkpoint_id']
class KarborScheduledOpsCreate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'scheduled_operation_create'
@abc.abstractmethod
def required_start_traits(self):
return ['operation_obj']
def required_end_traits(self):
return ['operation_obj']
class KarborScheduledOpsDelete(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'scheduled_operation_delete'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
def required_end_traits(self):
return ['id']
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The notification module."""
import abc
import copy
import traceback
from karbor import exception
from karbor.i18n import _
from karbor import rpc
from oslo_config import cfg
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class EndNotification(object):
@property
def _notifier(self):
"""Returns the notification for Karbor API."""
return (self.context.notification)
def __init__(self, context, **kwargs):
self.context = context
self.context.notification.payload.update(kwargs)
def __enter__(self):
return self.context.notification
def __exit__(self, etype, value, tb):
if etype:
message = str(value)
exception = traceback.format_exception(etype, value, tb)
self._notifier.notify_exc_info(message, exception)
else:
self._notifier.notify_end()
class StartNotification(EndNotification):
def __enter__(self):
self.context.notification.notify_start()
return super(StartNotification, self).__enter__()
class KaborAPINotification(object):
"""The traits of karbor.* notifications."""
event_type_format = 'karbor.%s.%s'
notify_callback = None
@classmethod
def register_notify_callback(cls, callback):
"""Callback when a notification is sent out."""
cls.notify_callback = callback
@abc.abstractmethod
def event_type(self):
'Returns the event type (like "create" for karbor.create.start)'
pass
@abc.abstractmethod
def required_start_traits(self):
'Returns list of required traits for start notification'
pass
def optional_start_traits(self):
'Returns list of optional traits for start notification'
return []
def required_end_traits(self):
'Returns list of required traits for end notification'
return []
def optional_end_traits(self):
'Returns list of optional traits for end notification'
return []
def required_error_traits(self):
'Returns list of required traits for error notification'
return ['message', 'exception']
def optional_error_traits(self):
'Returns list of optional traits for error notification'
return ['id']
def required_base_traits(self):
return ['tenant_id', 'client_ip', 'request_id']
@property
def request_id(self):
return self.payload['request_id']
def __init__(self, context, **kwargs):
self.context = context
self.needs_end_notification = True
self.payload = {}
if 'request' in kwargs:
request = kwargs.pop('request')
self.payload.update({
'request_id': context.request_id,
'client_ip': request.remote_addr,
'tenant_id': context.tenant,
})
elif 'request_id' not in kwargs:
raise exception.InvalidInput(
reason="Notification must include 'request' property")
self.payload.update(kwargs)
def serialize(self, context):
return self.payload
def validate(self, required_traits):
required_keys = set(required_traits)
provided_keys = set(self.payload.keys())
if not required_keys.issubset(provided_keys):
msg = (_("The following required keys not defined for"
" notification %(name)s: %(keys)s")
% {'name': self.__class__.__name__,
'keys': list(required_keys - provided_keys)})
raise exception.InvalidInput(reason=msg)
def _notify(self, event_qualifier, required_traits, optional_traits,
**kwargs):
self.payload.update(kwargs)
self.validate(self.required_base_traits() + required_traits)
available_values = self.serialize(self.context)
payload = {k: available_values[k]
for k in self.required_base_traits() + required_traits}
for k in optional_traits:
if k in available_values:
payload[k] = available_values[k]
qualified_event_type = (KaborAPINotification.event_type_format
% (self.event_type(), event_qualifier))
LOG.debug('Sending event: %(event_type)s, %(payload)s',
{'event_type': qualified_event_type, 'payload': payload})
context = copy.copy(self.context)
del context.notification
notifier = rpc.get_notifier()
notifier.info(context, qualified_event_type, self.payload)
if self.notify_callback:
self.notify_callback(event_qualifier)
def notify_start(self, **kwargs):
self._notify('start', self.required_start_traits(),
self.optional_start_traits(), **kwargs)
def notify_end(self, **kwargs):
if self.needs_end_notification:
self._notify('end', self.required_end_traits(),
self.optional_end_traits(), **kwargs)
def notify_exc_info(self, message, exception):
self.payload.update({
'message': message,
'exception': exception
})
self._notify('error', self.required_error_traits(),
self.optional_error_traits())
class KarborPlanCreate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'plan_create'
@abc.abstractmethod
def required_start_traits(self):
return ['name']
def optional_start_traits(self):
return ['parameters']
def required_end_traits(self):
return ['name']
class KarborPlanDelete(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'plan_delete'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
class KarborPlanUpdate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'plan_update'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
class KarborTriggerDelete(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'trigger_delete'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
class KarborTriggerCreate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'trigger_create'
@abc.abstractmethod
def required_start_traits(self):
return ['name']
def optional_start_traits(self):
return ['parameters']
def required_end_traits(self):
return ['name']
class KarborTriggerUpdate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'trigger_update'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
class KarborRestoreDelete(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'restore_delete'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
class KarborRestoreCreate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'restore_create'
@abc.abstractmethod
def required_start_traits(self):
return ['parameters']
def required_end_traits(self):
return ['parameters']
class KarborCheckpointCreate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'checkpoint_create'
@abc.abstractmethod
def required_start_traits(self):
return ['checkpoint_properties']
def required_end_traits(self):
return ['checkpoint_properties']
class KarborCheckpointDelete(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'checkpoint_delete'
@abc.abstractmethod
def required_start_traits(self):
return ['checkpoint_id']
def required_end_traits(self):
return ['checkpoint_id']
class KarborCheckpointUpdate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'checkpoint_update'
@abc.abstractmethod
def required_start_traits(self):
return ['checkpoint_id']
class KarborScheduledOpsCreate(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'scheduled_operation_create'
@abc.abstractmethod
def required_start_traits(self):
return ['operation_obj']
def required_end_traits(self):
return ['operation_obj']
class KarborScheduledOpsDelete(KaborAPINotification):
@abc.abstractmethod
def event_type(self):
return 'scheduled_operation_delete'
@abc.abstractmethod
def required_start_traits(self):
return ['id']
def required_end_traits(self):
return ['id']
| en | 0.836366 | # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. The notification module. Returns the notification for Karbor API. The traits of karbor.* notifications. Callback when a notification is sent out. | 1.860254 | 2 |
LoggerK.py | FabioChiodini/LoggerK | 0 | 6624495 | import logging
import logstash
import os
import sys
import socket
import time
if 'LOG_HOST' not in os.environ:
raise(Exception("LOG_HOST NOT DEFINED"))
host = os.environ['LOG_HOST']
test_logger = logging.getLogger('python-logstash-logger')
test_logger.setLevel(logging.INFO)
test_logger.addHandler(logstash.LogstashHandler(host, 5000, version=1))
# test_logger.addHandler(logstash.TCPLogstashHandler(host, 5000, version=1))
test_logger.error('python-logstash: test logstash error message.')
test_logger.info('python-logstash: test logstash info message.')
test_logger.warning('python-logstash: test logstash warning message.')
# add extra field to logstash message
extra = {
'test_string': 'python version: ' + repr(sys.version_info),
'test_string': 'Host ' + repr(socket.gethostname()),
'test_boolean': True,
'test_dict': {'a': 1, 'b': 'c'},
'test_float': 1.23,
'test_integer': 123,
'test_list': [1, 2, '3'],
}
test_logger.info('python-logstash: test extra fields', extra=extra)
j=0
while True:
# Code executed here
extra = {
'test_string': 'Iteration ' + ' $j '
}
test_logger.info('python-logstash: test extra fields', extra=extra)
j += 1
#print j
time.sleep(60)
| import logging
import logstash
import os
import sys
import socket
import time
if 'LOG_HOST' not in os.environ:
raise(Exception("LOG_HOST NOT DEFINED"))
host = os.environ['LOG_HOST']
test_logger = logging.getLogger('python-logstash-logger')
test_logger.setLevel(logging.INFO)
test_logger.addHandler(logstash.LogstashHandler(host, 5000, version=1))
# test_logger.addHandler(logstash.TCPLogstashHandler(host, 5000, version=1))
test_logger.error('python-logstash: test logstash error message.')
test_logger.info('python-logstash: test logstash info message.')
test_logger.warning('python-logstash: test logstash warning message.')
# add extra field to logstash message
extra = {
'test_string': 'python version: ' + repr(sys.version_info),
'test_string': 'Host ' + repr(socket.gethostname()),
'test_boolean': True,
'test_dict': {'a': 1, 'b': 'c'},
'test_float': 1.23,
'test_integer': 123,
'test_list': [1, 2, '3'],
}
test_logger.info('python-logstash: test extra fields', extra=extra)
j=0
while True:
# Code executed here
extra = {
'test_string': 'Iteration ' + ' $j '
}
test_logger.info('python-logstash: test extra fields', extra=extra)
j += 1
#print j
time.sleep(60)
| en | 0.33552 | # test_logger.addHandler(logstash.TCPLogstashHandler(host, 5000, version=1)) # add extra field to logstash message # Code executed here #print j | 2.346286 | 2 |
src/dash/services.py | zenly/visibility | 4 | 6624496 | '''
Copyright 2013 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import re
import logging
import inspect
import sys
import datetime
from django.db import models
logger = logging.getLogger(__name__)
def updateObject(obj,request):
'''Updates the given object with data from the request'''
module = sys.modules[obj.__module__]
'''Get all objects from the request - overkill, but we have this code, so lets use it'''
logger.debug('Working with module: '+str(module))
objects = requestToObjects(module,request)
'''Get the new build object, populated with our new build data'''
buildAdditional = findObject(objects,obj.__class__)
for name,value in vars(buildAdditional).iteritems():
if value == None: continue
logger.debug('Adding element from request; Name: '+str(name) + ' Value: '+str(value))
if (name == 'start' or name == 'end') and value.lower() == 'now':
q = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
logger.debug('Setting DateTimeField to '+str(q))
setattr(obj,name,value)
return obj
def requestToObjects(module,request):
'''
Creates objects from the given request.
For example ?Mouse.name=tiny&Mouse.size=2&Cat.name=fred&Cat.claws=sharp
Creates these objects:
Mouse(name=tiny,size=2)
Cat(name=fred,claws=sharp)
Sub classes - that's up to you to assemble as we do not know the hierarchy you want
Does not save the objects
Args:
module:
module in which the models are such as dash.models
request:
HTTP request
Returns:
list of objects
'''
objects = list()
for p,q in request.REQUEST.iteritems():
words = re.split('\.',p)
logger.debug(words)
field = words.pop()
className = '.'.join(words)
logger.debug('Found in request: Class Name: ' + className + ' Field: '+ field + ' Value: ' + q)
klass = getattr(module,className)
'''Check to see if we have an object of that type and create if needed'''
myObj = False
for object in objects:
if isinstance(object,klass):
myObj = object
break
if (myObj == False):
'''Need to be careful about creating objects - some are lists of unique items'''
myObj = klass()
logger.debug('Created object of type '+myObj.__class__.__name__)
objects.append(myObj)
if (field == 'start' or field == 'end') and q.lower() == 'now':
q = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
logger.debug('Setting DateTimeField to '+str(q))
'''Set the field value to the object'''
setattr(myObj,field,q)
return objects
def findObject(objects,klass):
'''Find an object of the given class in the list of objects or create a new one'''
for object in objects:
if (isinstance(object,klass)):
logger.debug('Found an existing object of type '+object.__class__.__name__)
return object
newObj = klass()
logger.debug('Created new object of type '+newObj.__class__.__name__)
return newObj
| '''
Copyright 2013 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import re
import logging
import inspect
import sys
import datetime
from django.db import models
logger = logging.getLogger(__name__)
def updateObject(obj,request):
'''Updates the given object with data from the request'''
module = sys.modules[obj.__module__]
'''Get all objects from the request - overkill, but we have this code, so lets use it'''
logger.debug('Working with module: '+str(module))
objects = requestToObjects(module,request)
'''Get the new build object, populated with our new build data'''
buildAdditional = findObject(objects,obj.__class__)
for name,value in vars(buildAdditional).iteritems():
if value == None: continue
logger.debug('Adding element from request; Name: '+str(name) + ' Value: '+str(value))
if (name == 'start' or name == 'end') and value.lower() == 'now':
q = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
logger.debug('Setting DateTimeField to '+str(q))
setattr(obj,name,value)
return obj
def requestToObjects(module,request):
'''
Creates objects from the given request.
For example ?Mouse.name=tiny&Mouse.size=2&Cat.name=fred&Cat.claws=sharp
Creates these objects:
Mouse(name=tiny,size=2)
Cat(name=fred,claws=sharp)
Sub classes - that's up to you to assemble as we do not know the hierarchy you want
Does not save the objects
Args:
module:
module in which the models are such as dash.models
request:
HTTP request
Returns:
list of objects
'''
objects = list()
for p,q in request.REQUEST.iteritems():
words = re.split('\.',p)
logger.debug(words)
field = words.pop()
className = '.'.join(words)
logger.debug('Found in request: Class Name: ' + className + ' Field: '+ field + ' Value: ' + q)
klass = getattr(module,className)
'''Check to see if we have an object of that type and create if needed'''
myObj = False
for object in objects:
if isinstance(object,klass):
myObj = object
break
if (myObj == False):
'''Need to be careful about creating objects - some are lists of unique items'''
myObj = klass()
logger.debug('Created object of type '+myObj.__class__.__name__)
objects.append(myObj)
if (field == 'start' or field == 'end') and q.lower() == 'now':
q = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
logger.debug('Setting DateTimeField to '+str(q))
'''Set the field value to the object'''
setattr(myObj,field,q)
return objects
def findObject(objects,klass):
'''Find an object of the given class in the list of objects or create a new one'''
for object in objects:
if (isinstance(object,klass)):
logger.debug('Found an existing object of type '+object.__class__.__name__)
return object
newObj = klass()
logger.debug('Created new object of type '+newObj.__class__.__name__)
return newObj
| en | 0.837625 | Copyright 2013 <NAME> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Updates the given object with data from the request Get all objects from the request - overkill, but we have this code, so lets use it Get the new build object, populated with our new build data Creates objects from the given request. For example ?Mouse.name=tiny&Mouse.size=2&Cat.name=fred&Cat.claws=sharp Creates these objects: Mouse(name=tiny,size=2) Cat(name=fred,claws=sharp) Sub classes - that's up to you to assemble as we do not know the hierarchy you want Does not save the objects Args: module: module in which the models are such as dash.models request: HTTP request Returns: list of objects Check to see if we have an object of that type and create if needed Need to be careful about creating objects - some are lists of unique items Set the field value to the object Find an object of the given class in the list of objects or create a new one | 2.221939 | 2 |
interlib/run.py | D-Programming-Man/Pseudo | 0 | 6624497 | <reponame>D-Programming-Man/Pseudo
from interlib.utility import print_line
help_manual = " Note: \n" \
" You must define a function first with the \"Define\" keyword to use this keyword \n" \
" The syntax is very similar to the \"Define\" keyword \n" \
" Make sure this statement doesn't end with a colon (:) \n" \
" \n" \
" Syntax: \n" \
" Run function <function name> [with parameter[s] (number/string/list/table) (<variable>/<number>/<string>)[, (number/string/list/table) (<variable>/<number>/<string>][, ...]]: \n" \
" \n" \
" Examples: \n" \
" Run function hello_world \n" \
" Run function display_number with parameter number x \n" \
" Run function add_display with parameters number op1, number op2 \n" \
" Run function color_name with parameter string name \n" \
" Run function print_this_text with parameter string \"Text\" \n" \
" - Note that you can only pass strings that contains no spaces inbetween words \n" \
'''
Handler that allows running of functions
Requires:
. line_numb = The line number we are looking at in the Pseudo code file
. line_list = The line we took from the Pseudo code file, but in list format
. all_variables = The dictionary that contains all of the variables for that Psudo code file
. indent = The indentation to correctly format the line of python code
. py_file = The output python code file we are writing to
Returns:
. A boolean. This is used in the interpreter.py file to make sure that the parsing of the code executes correctly.
Otherwise the parsing stops and ends it prematurely.
'''
def handler(interpret_state):
line_numb = interpret_state["line_numb"]
line_list = interpret_state["line_list"]
all_variables = interpret_state["all_variables"]
indent = interpret_state["pseudo_indent"] + interpret_state["indent"]
py_lines = interpret_state["py_lines"]
word_pos = 2
indent_space = indent * " "
func_name = line_list[word_pos]
if func_name not in all_variables:
print("Error on line " + str(line_numb) + ". Must run an existing function.")
print_line(line_numb, line_list)
return False
param_names = []
if len(line_list) >= 4:
word_pos += 1
if line_list[word_pos] == "with":
word_pos += 1
else:
print("Error on line " + str(line_numb) + ". Improper function running format.")
print_line(line_numb, line_list)
return False
if line_list[word_pos] == "parameter" or line_list[word_pos] == "parameters":
word_pos += 1
else:
print("Error on line " + str(line_numb) + ". Improper function running format.")
print_line(line_numb, line_list)
return False
data_types = {"number": "int", "string": "str", "list": "list", "table": "dict"}
while len(line_list) > word_pos:
if line_list[word_pos] in data_types:
word_pos += 1
else:
print("Error on line " + str(line_numb) + ". Improper data type.")
print_line(line_numb, line_list)
return False
if line_list[word_pos][-1] == ",":
param_names.append(line_list[word_pos] + " ")
else:
param_names.append(line_list[word_pos])
word_pos += 1
py_line = indent_space + func_name + "("
for name in param_names:
py_line += name
py_line += ")\n"
py_lines.append(py_line)
return True
| from interlib.utility import print_line
help_manual = " Note: \n" \
" You must define a function first with the \"Define\" keyword to use this keyword \n" \
" The syntax is very similar to the \"Define\" keyword \n" \
" Make sure this statement doesn't end with a colon (:) \n" \
" \n" \
" Syntax: \n" \
" Run function <function name> [with parameter[s] (number/string/list/table) (<variable>/<number>/<string>)[, (number/string/list/table) (<variable>/<number>/<string>][, ...]]: \n" \
" \n" \
" Examples: \n" \
" Run function hello_world \n" \
" Run function display_number with parameter number x \n" \
" Run function add_display with parameters number op1, number op2 \n" \
" Run function color_name with parameter string name \n" \
" Run function print_this_text with parameter string \"Text\" \n" \
" - Note that you can only pass strings that contains no spaces inbetween words \n" \
'''
Handler that allows running of functions
Requires:
. line_numb = The line number we are looking at in the Pseudo code file
. line_list = The line we took from the Pseudo code file, but in list format
. all_variables = The dictionary that contains all of the variables for that Psudo code file
. indent = The indentation to correctly format the line of python code
. py_file = The output python code file we are writing to
Returns:
. A boolean. This is used in the interpreter.py file to make sure that the parsing of the code executes correctly.
Otherwise the parsing stops and ends it prematurely.
'''
def handler(interpret_state):
line_numb = interpret_state["line_numb"]
line_list = interpret_state["line_list"]
all_variables = interpret_state["all_variables"]
indent = interpret_state["pseudo_indent"] + interpret_state["indent"]
py_lines = interpret_state["py_lines"]
word_pos = 2
indent_space = indent * " "
func_name = line_list[word_pos]
if func_name not in all_variables:
print("Error on line " + str(line_numb) + ". Must run an existing function.")
print_line(line_numb, line_list)
return False
param_names = []
if len(line_list) >= 4:
word_pos += 1
if line_list[word_pos] == "with":
word_pos += 1
else:
print("Error on line " + str(line_numb) + ". Improper function running format.")
print_line(line_numb, line_list)
return False
if line_list[word_pos] == "parameter" or line_list[word_pos] == "parameters":
word_pos += 1
else:
print("Error on line " + str(line_numb) + ". Improper function running format.")
print_line(line_numb, line_list)
return False
data_types = {"number": "int", "string": "str", "list": "list", "table": "dict"}
while len(line_list) > word_pos:
if line_list[word_pos] in data_types:
word_pos += 1
else:
print("Error on line " + str(line_numb) + ". Improper data type.")
print_line(line_numb, line_list)
return False
if line_list[word_pos][-1] == ",":
param_names.append(line_list[word_pos] + " ")
else:
param_names.append(line_list[word_pos])
word_pos += 1
py_line = indent_space + func_name + "("
for name in param_names:
py_line += name
py_line += ")\n"
py_lines.append(py_line)
return True | en | 0.872802 | Handler that allows running of functions Requires: . line_numb = The line number we are looking at in the Pseudo code file . line_list = The line we took from the Pseudo code file, but in list format . all_variables = The dictionary that contains all of the variables for that Psudo code file . indent = The indentation to correctly format the line of python code . py_file = The output python code file we are writing to Returns: . A boolean. This is used in the interpreter.py file to make sure that the parsing of the code executes correctly. Otherwise the parsing stops and ends it prematurely. | 4.159113 | 4 |
src/pretix/base/services/quotas.py | bhaettasch/pretix | 0 | 6624498 | import sys
from collections import Counter, defaultdict
from datetime import timedelta
from itertools import zip_longest
from django.conf import settings
from django.db import OperationalError, models
from django.db.models import (
Case, Count, F, Func, Max, OuterRef, Q, Subquery, Sum, Value, When,
)
from django.dispatch import receiver
from django.utils.timezone import now
from django_scopes import scopes_disabled
from pretix.base.models import (
CartPosition, Checkin, Event, LogEntry, Order, OrderPosition, Quota,
Voucher, WaitingListEntry,
)
from pretix.celery_app import app
from ...helpers.periodic import minimum_interval
from ..signals import periodic_task, quota_availability
class QuotaAvailability:
"""
This special object allows so compute the availability of multiple quotas, even across events, and inspect their
results. The maximum number of SQL queries is constant and not dependent on the number of quotas.
Usage example::
qa = QuotaAvailability()
qa.queue(quota1, quota2, …)
qa.compute()
print(qa.results)
Properties you can access after computation.
* results (dict mapping quotas to availability tuples)
* count_paid_orders (dict mapping quotas to ints)
* count_paid_orders (dict mapping quotas to ints)
* count_pending_orders (dict mapping quotas to ints)
* count_vouchers (dict mapping quotas to ints)
* count_waitinglist (dict mapping quotas to ints)
* count_cart (dict mapping quotas to ints)
"""
def __init__(self, count_waitinglist=True, ignore_closed=False, full_results=False, early_out=True):
"""
Initialize a new quota availability calculator
:param count_waitinglist: If ``True`` (default), the waiting list is considered. If ``False``, it is ignored.
:param ignore_closed: Quotas have a ``closed`` state that always makes the quota return as sold out. If you set
``ignore_closed`` to ``True``, we will ignore this completely. Default is ``False``.
:param full_results: Usually, the computation is as efficient as possible, i.e. if after counting the sold
orders we already see that the quota is sold out, we're not going to count the carts,
since it does not matter. This also means that you will not be able to get that number from
``.count_cart``. If you want all parts to be calculated (i.e. because you want to show
statistics to the user), pass ``full_results`` and we'll skip that optimization.
items
:param early_out: Usually, if a quota is ``closed`` or if its ``size`` is ``None`` (i.e. unlimited), we will
not need database access to determine the availability and return it right away. If you set
this to ``False``, however, we will *still* count the number of orders, which is required to
keep the database-level quota cache up to date so backend overviews render quickly. If you
do not care about keeping the cache up to date, you can set this to ``False`` for further
performance improvements.
"""
self._queue = []
self._count_waitinglist = count_waitinglist
self._ignore_closed = ignore_closed
self._full_results = full_results
self._item_to_quotas = defaultdict(list)
self._var_to_quotas = defaultdict(list)
self._early_out = early_out
self._quota_objects = {}
self.results = {}
self.count_paid_orders = defaultdict(int)
self.count_pending_orders = defaultdict(int)
self.count_exited_orders = defaultdict(int)
self.count_vouchers = defaultdict(int)
self.count_waitinglist = defaultdict(int)
self.count_cart = defaultdict(int)
self.sizes = {}
def queue(self, *quota):
self._queue += quota
def compute(self, now_dt=None):
now_dt = now_dt or now()
quotas = list(set(self._queue))
quotas_original = list(self._queue)
self._queue.clear()
if not quotas:
return
self._compute(quotas, now_dt)
for q in quotas_original:
for recv, resp in quota_availability.send(sender=q.event, quota=q, result=self.results[q],
count_waitinglist=self.count_waitinglist):
self.results[q] = resp
self._close(quotas)
try:
self._write_cache(quotas, now_dt)
except OperationalError as e:
# Ignore deadlocks when multiple threads try to write to the cache
if 'deadlock' not in str(e).lower():
raise e
def _write_cache(self, quotas, now_dt):
# We used to also delete item_quota_cache:* from the event cache here, but as the cache
# gets more complex, this does not seem worth it. The cache is only present for up to
# 5 seconds to prevent high peaks, and a 5-second delay in availability is usually
# tolerable
update = []
for q in quotas:
rewrite_cache = self._count_waitinglist and (
not q.cache_is_hot(now_dt) or self.results[q][0] > q.cached_availability_state
or q.cached_availability_paid_orders is None
)
if rewrite_cache:
q.cached_availability_state = self.results[q][0]
q.cached_availability_number = self.results[q][1]
q.cached_availability_time = now_dt
if q in self.count_paid_orders:
q.cached_availability_paid_orders = self.count_paid_orders[q]
update.append(q)
if update:
Quota.objects.using('default').bulk_update(update, [
'cached_availability_state', 'cached_availability_number', 'cached_availability_time',
'cached_availability_paid_orders'
], batch_size=50)
def _close(self, quotas):
for q in quotas:
if self.results[q][0] <= Quota.AVAILABILITY_ORDERED and q.close_when_sold_out and not q.closed:
q.closed = True
q.save(update_fields=['closed'])
q.log_action('pretix.event.quota.closed')
def _compute(self, quotas, now_dt):
# Quotas we want to look at now
self.sizes.update({q: q.size for q in quotas})
# Some helpful caches
self._quota_objects.update({q.pk: q for q in quotas})
# Compute result for closed or unlimited
self._compute_early_outs(quotas)
if self._early_out:
if not self._full_results:
quotas = [q for q in quotas if q not in self.results]
if not quotas:
return
size_left = Counter({q: (sys.maxsize if s is None else s) for q, s in self.sizes.items()})
for q in quotas:
self.count_paid_orders[q] = 0
self.count_pending_orders[q] = 0
self.count_cart[q] = 0
self.count_vouchers[q] = 0
self.count_waitinglist[q] = 0
# Fetch which quotas belong to which items and variations
q_items = Quota.items.through.objects.filter(
quota_id__in=[q.pk for q in quotas]
).values('quota_id', 'item_id')
for m in q_items:
self._item_to_quotas[m['item_id']].append(self._quota_objects[m['quota_id']])
q_vars = Quota.variations.through.objects.filter(
quota_id__in=[q.pk for q in quotas]
).values('quota_id', 'itemvariation_id')
for m in q_vars:
self._var_to_quotas[m['itemvariation_id']].append(self._quota_objects[m['quota_id']])
self._compute_orders(quotas, q_items, q_vars, size_left)
if not self._full_results:
quotas = [q for q in quotas if q not in self.results]
if not quotas:
return
self._compute_vouchers(quotas, q_items, q_vars, size_left, now_dt)
if not self._full_results:
quotas = [q for q in quotas if q not in self.results]
if not quotas:
return
self._compute_carts(quotas, q_items, q_vars, size_left, now_dt)
if self._count_waitinglist:
if not self._full_results:
quotas = [q for q in quotas if q not in self.results]
if not quotas:
return
self._compute_waitinglist(quotas, q_items, q_vars, size_left)
for q in quotas:
if q not in self.results:
if size_left[q] > 0:
self.results[q] = Quota.AVAILABILITY_OK, size_left[q]
else:
raise ValueError("inconclusive quota")
def _compute_orders(self, quotas, q_items, q_vars, size_left):
events = {q.event_id for q in quotas}
subevents = {q.subevent_id for q in quotas}
seq = Q(subevent_id__in=subevents)
if None in subevents:
seq |= Q(subevent__isnull=True)
op_lookup = OrderPosition.objects.filter(
order__status__in=[Order.STATUS_PAID, Order.STATUS_PENDING],
order__event_id__in=events,
).filter(seq).filter(
Q(
Q(variation_id__isnull=True) &
Q(item_id__in={i['item_id'] for i in q_items if self._quota_objects[i['quota_id']] in quotas})
) | Q(
variation_id__in={i['itemvariation_id'] for i in q_vars if self._quota_objects[i['quota_id']] in quotas})
).order_by()
if any(q.release_after_exit for q in quotas):
op_lookup = op_lookup.annotate(
last_entry=Subquery(
Checkin.objects.filter(
position_id=OuterRef('pk'),
list__allow_entry_after_exit=False,
type=Checkin.TYPE_ENTRY,
).order_by().values('position_id').annotate(
m=Max('datetime')
).values('m')
),
last_exit=Subquery(
Checkin.objects.filter(
position_id=OuterRef('pk'),
list__allow_entry_after_exit=False,
type=Checkin.TYPE_EXIT,
).order_by().values('position_id').annotate(
m=Max('datetime')
).values('m')
),
).annotate(
is_exited=Case(
When(
Q(last_entry__isnull=False) & Q(last_exit__isnull=False) & Q(last_exit__gt=F('last_entry')),
then=Value(1, output_field=models.IntegerField()),
),
default=Value(0, output_field=models.IntegerField()),
output_field=models.IntegerField(),
),
)
else:
op_lookup = op_lookup.annotate(
is_exited=Value(0, output_field=models.IntegerField())
)
op_lookup = op_lookup.values('order__status', 'item_id', 'subevent_id', 'variation_id', 'is_exited').annotate(c=Count('*'))
for line in sorted(op_lookup, key=lambda li: (int(li['is_exited']), li['order__status']), reverse=True): # p before n, exited before non-exited
if line['variation_id']:
qs = self._var_to_quotas[line['variation_id']]
else:
qs = self._item_to_quotas[line['item_id']]
for q in qs:
if q.subevent_id == line['subevent_id']:
if line['order__status'] == Order.STATUS_PAID:
self.count_paid_orders[q] += line['c']
q.cached_availability_paid_orders = self.count_paid_orders[q]
elif line['order__status'] == Order.STATUS_PENDING:
self.count_pending_orders[q] += line['c']
if q.release_after_exit and line['is_exited']:
self.count_exited_orders[q] += line['c']
else:
size_left[q] -= line['c']
if size_left[q] <= 0 and q not in self.results:
if line['order__status'] == Order.STATUS_PAID:
self.results[q] = Quota.AVAILABILITY_GONE, 0
else:
self.results[q] = Quota.AVAILABILITY_ORDERED, 0
def _compute_vouchers(self, quotas, q_items, q_vars, size_left, now_dt):
events = {q.event_id for q in quotas}
if 'sqlite3' in settings.DATABASES['default']['ENGINE']:
func = 'MAX'
else: # NOQA
func = 'GREATEST'
subevents = {q.subevent_id for q in quotas}
seq = Q(subevent_id__in=subevents)
if None in subevents:
seq |= Q(subevent__isnull=True)
v_lookup = Voucher.objects.filter(
Q(event_id__in=events) &
seq &
Q(block_quota=True) &
Q(Q(valid_until__isnull=True) | Q(valid_until__gte=now_dt)) &
Q(
Q(
Q(variation_id__isnull=True) &
Q(item_id__in={i['item_id'] for i in q_items if self._quota_objects[i['quota_id']] in quotas})
) | Q(
variation_id__in={i['itemvariation_id'] for i in q_vars if
self._quota_objects[i['quota_id']] in quotas}
) | Q(
quota_id__in=[q.pk for q in quotas]
)
)
).order_by().values('subevent_id', 'item_id', 'quota_id', 'variation_id').annotate(
free=Sum(Func(F('max_usages') - F('redeemed'), 0, function=func))
)
for line in v_lookup:
if line['variation_id']:
qs = self._var_to_quotas[line['variation_id']]
elif line['item_id']:
qs = self._item_to_quotas[line['item_id']]
else:
qs = [self._quota_objects[line['quota_id']]]
for q in qs:
if q.subevent_id == line['subevent_id']:
size_left[q] -= line['free']
self.count_vouchers[q] += line['free']
if q not in self.results and size_left[q] <= 0:
self.results[q] = Quota.AVAILABILITY_ORDERED, 0
def _compute_carts(self, quotas, q_items, q_vars, size_left, now_dt):
events = {q.event_id for q in quotas}
subevents = {q.subevent_id for q in quotas}
seq = Q(subevent_id__in=subevents)
if None in subevents:
seq |= Q(subevent__isnull=True)
cart_lookup = CartPosition.objects.filter(
Q(event_id__in=events) &
seq &
Q(expires__gte=now_dt) &
Q(
Q(voucher__isnull=True)
| Q(voucher__block_quota=False)
| Q(voucher__valid_until__lt=now_dt)
) &
Q(
Q(
Q(variation_id__isnull=True) &
Q(item_id__in={i['item_id'] for i in q_items if self._quota_objects[i['quota_id']] in quotas})
) | Q(
variation_id__in={i['itemvariation_id'] for i in q_vars if self._quota_objects[i['quota_id']] in quotas}
)
)
).order_by().values('item_id', 'subevent_id', 'variation_id').annotate(c=Count('*'))
for line in cart_lookup:
if line['variation_id']:
qs = self._var_to_quotas[line['variation_id']]
else:
qs = self._item_to_quotas[line['item_id']]
for q in qs:
if q.subevent_id == line['subevent_id']:
size_left[q] -= line['c']
self.count_cart[q] += line['c']
if q not in self.results and size_left[q] <= 0:
self.results[q] = Quota.AVAILABILITY_RESERVED, 0
def _compute_waitinglist(self, quotas, q_items, q_vars, size_left):
events = {q.event_id for q in quotas}
subevents = {q.subevent_id for q in quotas}
seq = Q(subevent_id__in=subevents)
if None in subevents:
seq |= Q(subevent__isnull=True)
w_lookup = WaitingListEntry.objects.filter(
Q(event_id__in=events) &
Q(voucher__isnull=True) &
seq &
Q(
Q(
Q(variation_id__isnull=True) &
Q(item_id__in={i['item_id'] for i in q_items if self._quota_objects[i['quota_id']] in quotas})
) | Q(variation_id__in={i['itemvariation_id'] for i in q_vars if
self._quota_objects[i['quota_id']] in quotas})
)
).order_by().values('item_id', 'subevent_id', 'variation_id').annotate(c=Count('*'))
for line in w_lookup:
if line['variation_id']:
qs = self._var_to_quotas[line['variation_id']]
else:
qs = self._item_to_quotas[line['item_id']]
for q in qs:
if q.subevent_id == line['subevent_id']:
size_left[q] -= line['c']
self.count_waitinglist[q] += line['c']
if q not in self.results and size_left[q] <= 0:
self.results[q] = Quota.AVAILABILITY_ORDERED, 0
def _compute_early_outs(self, quotas):
for q in quotas:
if q.closed and not self._ignore_closed:
self.results[q] = Quota.AVAILABILITY_ORDERED, 0
elif q.size is None:
self.results[q] = Quota.AVAILABILITY_OK, None
elif q.size == 0:
self.results[q] = Quota.AVAILABILITY_GONE, 0
@receiver(signal=periodic_task)
@minimum_interval(minutes_after_success=60)
def build_all_quota_caches(sender, **kwargs):
refresh_quota_caches.apply()
def grouper(iterable, n, fillvalue=None):
"""Collect data into fixed-length chunks or blocks"""
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return zip_longest(fillvalue=fillvalue, *args)
@app.task
@scopes_disabled()
def refresh_quota_caches():
# Active events
active = LogEntry.objects.using(settings.DATABASE_REPLICA).filter(
datetime__gt=now() - timedelta(days=7)
).order_by().values('event').annotate(
last_activity=Max('datetime')
)
for a in active:
try:
e = Event.objects.using(settings.DATABASE_REPLICA).get(pk=a['event'])
except Event.DoesNotExist:
continue
quotas = e.quotas.filter(
Q(cached_availability_time__isnull=True) |
Q(cached_availability_time__lt=a['last_activity']) |
Q(cached_availability_time__lt=now() - timedelta(hours=2))
).filter(
Q(subevent__isnull=True) |
Q(subevent__date_to__isnull=False, subevent__date_to__gte=now() - timedelta(days=14)) |
Q(subevent__date_from__gte=now() - timedelta(days=14))
)
for qs in grouper(quotas, 100, None):
qa = QuotaAvailability(early_out=False)
qa.queue(*[q for q in qs if q is not None])
qa.compute()
| import sys
from collections import Counter, defaultdict
from datetime import timedelta
from itertools import zip_longest
from django.conf import settings
from django.db import OperationalError, models
from django.db.models import (
Case, Count, F, Func, Max, OuterRef, Q, Subquery, Sum, Value, When,
)
from django.dispatch import receiver
from django.utils.timezone import now
from django_scopes import scopes_disabled
from pretix.base.models import (
CartPosition, Checkin, Event, LogEntry, Order, OrderPosition, Quota,
Voucher, WaitingListEntry,
)
from pretix.celery_app import app
from ...helpers.periodic import minimum_interval
from ..signals import periodic_task, quota_availability
class QuotaAvailability:
"""
This special object allows so compute the availability of multiple quotas, even across events, and inspect their
results. The maximum number of SQL queries is constant and not dependent on the number of quotas.
Usage example::
qa = QuotaAvailability()
qa.queue(quota1, quota2, …)
qa.compute()
print(qa.results)
Properties you can access after computation.
* results (dict mapping quotas to availability tuples)
* count_paid_orders (dict mapping quotas to ints)
* count_paid_orders (dict mapping quotas to ints)
* count_pending_orders (dict mapping quotas to ints)
* count_vouchers (dict mapping quotas to ints)
* count_waitinglist (dict mapping quotas to ints)
* count_cart (dict mapping quotas to ints)
"""
def __init__(self, count_waitinglist=True, ignore_closed=False, full_results=False, early_out=True):
"""
Initialize a new quota availability calculator
:param count_waitinglist: If ``True`` (default), the waiting list is considered. If ``False``, it is ignored.
:param ignore_closed: Quotas have a ``closed`` state that always makes the quota return as sold out. If you set
``ignore_closed`` to ``True``, we will ignore this completely. Default is ``False``.
:param full_results: Usually, the computation is as efficient as possible, i.e. if after counting the sold
orders we already see that the quota is sold out, we're not going to count the carts,
since it does not matter. This also means that you will not be able to get that number from
``.count_cart``. If you want all parts to be calculated (i.e. because you want to show
statistics to the user), pass ``full_results`` and we'll skip that optimization.
items
:param early_out: Usually, if a quota is ``closed`` or if its ``size`` is ``None`` (i.e. unlimited), we will
not need database access to determine the availability and return it right away. If you set
this to ``False``, however, we will *still* count the number of orders, which is required to
keep the database-level quota cache up to date so backend overviews render quickly. If you
do not care about keeping the cache up to date, you can set this to ``False`` for further
performance improvements.
"""
self._queue = []
self._count_waitinglist = count_waitinglist
self._ignore_closed = ignore_closed
self._full_results = full_results
self._item_to_quotas = defaultdict(list)
self._var_to_quotas = defaultdict(list)
self._early_out = early_out
self._quota_objects = {}
self.results = {}
self.count_paid_orders = defaultdict(int)
self.count_pending_orders = defaultdict(int)
self.count_exited_orders = defaultdict(int)
self.count_vouchers = defaultdict(int)
self.count_waitinglist = defaultdict(int)
self.count_cart = defaultdict(int)
self.sizes = {}
def queue(self, *quota):
self._queue += quota
def compute(self, now_dt=None):
now_dt = now_dt or now()
quotas = list(set(self._queue))
quotas_original = list(self._queue)
self._queue.clear()
if not quotas:
return
self._compute(quotas, now_dt)
for q in quotas_original:
for recv, resp in quota_availability.send(sender=q.event, quota=q, result=self.results[q],
count_waitinglist=self.count_waitinglist):
self.results[q] = resp
self._close(quotas)
try:
self._write_cache(quotas, now_dt)
except OperationalError as e:
# Ignore deadlocks when multiple threads try to write to the cache
if 'deadlock' not in str(e).lower():
raise e
def _write_cache(self, quotas, now_dt):
# We used to also delete item_quota_cache:* from the event cache here, but as the cache
# gets more complex, this does not seem worth it. The cache is only present for up to
# 5 seconds to prevent high peaks, and a 5-second delay in availability is usually
# tolerable
update = []
for q in quotas:
rewrite_cache = self._count_waitinglist and (
not q.cache_is_hot(now_dt) or self.results[q][0] > q.cached_availability_state
or q.cached_availability_paid_orders is None
)
if rewrite_cache:
q.cached_availability_state = self.results[q][0]
q.cached_availability_number = self.results[q][1]
q.cached_availability_time = now_dt
if q in self.count_paid_orders:
q.cached_availability_paid_orders = self.count_paid_orders[q]
update.append(q)
if update:
Quota.objects.using('default').bulk_update(update, [
'cached_availability_state', 'cached_availability_number', 'cached_availability_time',
'cached_availability_paid_orders'
], batch_size=50)
def _close(self, quotas):
for q in quotas:
if self.results[q][0] <= Quota.AVAILABILITY_ORDERED and q.close_when_sold_out and not q.closed:
q.closed = True
q.save(update_fields=['closed'])
q.log_action('pretix.event.quota.closed')
def _compute(self, quotas, now_dt):
# Quotas we want to look at now
self.sizes.update({q: q.size for q in quotas})
# Some helpful caches
self._quota_objects.update({q.pk: q for q in quotas})
# Compute result for closed or unlimited
self._compute_early_outs(quotas)
if self._early_out:
if not self._full_results:
quotas = [q for q in quotas if q not in self.results]
if not quotas:
return
size_left = Counter({q: (sys.maxsize if s is None else s) for q, s in self.sizes.items()})
for q in quotas:
self.count_paid_orders[q] = 0
self.count_pending_orders[q] = 0
self.count_cart[q] = 0
self.count_vouchers[q] = 0
self.count_waitinglist[q] = 0
# Fetch which quotas belong to which items and variations
q_items = Quota.items.through.objects.filter(
quota_id__in=[q.pk for q in quotas]
).values('quota_id', 'item_id')
for m in q_items:
self._item_to_quotas[m['item_id']].append(self._quota_objects[m['quota_id']])
q_vars = Quota.variations.through.objects.filter(
quota_id__in=[q.pk for q in quotas]
).values('quota_id', 'itemvariation_id')
for m in q_vars:
self._var_to_quotas[m['itemvariation_id']].append(self._quota_objects[m['quota_id']])
self._compute_orders(quotas, q_items, q_vars, size_left)
if not self._full_results:
quotas = [q for q in quotas if q not in self.results]
if not quotas:
return
self._compute_vouchers(quotas, q_items, q_vars, size_left, now_dt)
if not self._full_results:
quotas = [q for q in quotas if q not in self.results]
if not quotas:
return
self._compute_carts(quotas, q_items, q_vars, size_left, now_dt)
if self._count_waitinglist:
if not self._full_results:
quotas = [q for q in quotas if q not in self.results]
if not quotas:
return
self._compute_waitinglist(quotas, q_items, q_vars, size_left)
for q in quotas:
if q not in self.results:
if size_left[q] > 0:
self.results[q] = Quota.AVAILABILITY_OK, size_left[q]
else:
raise ValueError("inconclusive quota")
def _compute_orders(self, quotas, q_items, q_vars, size_left):
events = {q.event_id for q in quotas}
subevents = {q.subevent_id for q in quotas}
seq = Q(subevent_id__in=subevents)
if None in subevents:
seq |= Q(subevent__isnull=True)
op_lookup = OrderPosition.objects.filter(
order__status__in=[Order.STATUS_PAID, Order.STATUS_PENDING],
order__event_id__in=events,
).filter(seq).filter(
Q(
Q(variation_id__isnull=True) &
Q(item_id__in={i['item_id'] for i in q_items if self._quota_objects[i['quota_id']] in quotas})
) | Q(
variation_id__in={i['itemvariation_id'] for i in q_vars if self._quota_objects[i['quota_id']] in quotas})
).order_by()
if any(q.release_after_exit for q in quotas):
op_lookup = op_lookup.annotate(
last_entry=Subquery(
Checkin.objects.filter(
position_id=OuterRef('pk'),
list__allow_entry_after_exit=False,
type=Checkin.TYPE_ENTRY,
).order_by().values('position_id').annotate(
m=Max('datetime')
).values('m')
),
last_exit=Subquery(
Checkin.objects.filter(
position_id=OuterRef('pk'),
list__allow_entry_after_exit=False,
type=Checkin.TYPE_EXIT,
).order_by().values('position_id').annotate(
m=Max('datetime')
).values('m')
),
).annotate(
is_exited=Case(
When(
Q(last_entry__isnull=False) & Q(last_exit__isnull=False) & Q(last_exit__gt=F('last_entry')),
then=Value(1, output_field=models.IntegerField()),
),
default=Value(0, output_field=models.IntegerField()),
output_field=models.IntegerField(),
),
)
else:
op_lookup = op_lookup.annotate(
is_exited=Value(0, output_field=models.IntegerField())
)
op_lookup = op_lookup.values('order__status', 'item_id', 'subevent_id', 'variation_id', 'is_exited').annotate(c=Count('*'))
for line in sorted(op_lookup, key=lambda li: (int(li['is_exited']), li['order__status']), reverse=True): # p before n, exited before non-exited
if line['variation_id']:
qs = self._var_to_quotas[line['variation_id']]
else:
qs = self._item_to_quotas[line['item_id']]
for q in qs:
if q.subevent_id == line['subevent_id']:
if line['order__status'] == Order.STATUS_PAID:
self.count_paid_orders[q] += line['c']
q.cached_availability_paid_orders = self.count_paid_orders[q]
elif line['order__status'] == Order.STATUS_PENDING:
self.count_pending_orders[q] += line['c']
if q.release_after_exit and line['is_exited']:
self.count_exited_orders[q] += line['c']
else:
size_left[q] -= line['c']
if size_left[q] <= 0 and q not in self.results:
if line['order__status'] == Order.STATUS_PAID:
self.results[q] = Quota.AVAILABILITY_GONE, 0
else:
self.results[q] = Quota.AVAILABILITY_ORDERED, 0
def _compute_vouchers(self, quotas, q_items, q_vars, size_left, now_dt):
events = {q.event_id for q in quotas}
if 'sqlite3' in settings.DATABASES['default']['ENGINE']:
func = 'MAX'
else: # NOQA
func = 'GREATEST'
subevents = {q.subevent_id for q in quotas}
seq = Q(subevent_id__in=subevents)
if None in subevents:
seq |= Q(subevent__isnull=True)
v_lookup = Voucher.objects.filter(
Q(event_id__in=events) &
seq &
Q(block_quota=True) &
Q(Q(valid_until__isnull=True) | Q(valid_until__gte=now_dt)) &
Q(
Q(
Q(variation_id__isnull=True) &
Q(item_id__in={i['item_id'] for i in q_items if self._quota_objects[i['quota_id']] in quotas})
) | Q(
variation_id__in={i['itemvariation_id'] for i in q_vars if
self._quota_objects[i['quota_id']] in quotas}
) | Q(
quota_id__in=[q.pk for q in quotas]
)
)
).order_by().values('subevent_id', 'item_id', 'quota_id', 'variation_id').annotate(
free=Sum(Func(F('max_usages') - F('redeemed'), 0, function=func))
)
for line in v_lookup:
if line['variation_id']:
qs = self._var_to_quotas[line['variation_id']]
elif line['item_id']:
qs = self._item_to_quotas[line['item_id']]
else:
qs = [self._quota_objects[line['quota_id']]]
for q in qs:
if q.subevent_id == line['subevent_id']:
size_left[q] -= line['free']
self.count_vouchers[q] += line['free']
if q not in self.results and size_left[q] <= 0:
self.results[q] = Quota.AVAILABILITY_ORDERED, 0
def _compute_carts(self, quotas, q_items, q_vars, size_left, now_dt):
events = {q.event_id for q in quotas}
subevents = {q.subevent_id for q in quotas}
seq = Q(subevent_id__in=subevents)
if None in subevents:
seq |= Q(subevent__isnull=True)
cart_lookup = CartPosition.objects.filter(
Q(event_id__in=events) &
seq &
Q(expires__gte=now_dt) &
Q(
Q(voucher__isnull=True)
| Q(voucher__block_quota=False)
| Q(voucher__valid_until__lt=now_dt)
) &
Q(
Q(
Q(variation_id__isnull=True) &
Q(item_id__in={i['item_id'] for i in q_items if self._quota_objects[i['quota_id']] in quotas})
) | Q(
variation_id__in={i['itemvariation_id'] for i in q_vars if self._quota_objects[i['quota_id']] in quotas}
)
)
).order_by().values('item_id', 'subevent_id', 'variation_id').annotate(c=Count('*'))
for line in cart_lookup:
if line['variation_id']:
qs = self._var_to_quotas[line['variation_id']]
else:
qs = self._item_to_quotas[line['item_id']]
for q in qs:
if q.subevent_id == line['subevent_id']:
size_left[q] -= line['c']
self.count_cart[q] += line['c']
if q not in self.results and size_left[q] <= 0:
self.results[q] = Quota.AVAILABILITY_RESERVED, 0
def _compute_waitinglist(self, quotas, q_items, q_vars, size_left):
events = {q.event_id for q in quotas}
subevents = {q.subevent_id for q in quotas}
seq = Q(subevent_id__in=subevents)
if None in subevents:
seq |= Q(subevent__isnull=True)
w_lookup = WaitingListEntry.objects.filter(
Q(event_id__in=events) &
Q(voucher__isnull=True) &
seq &
Q(
Q(
Q(variation_id__isnull=True) &
Q(item_id__in={i['item_id'] for i in q_items if self._quota_objects[i['quota_id']] in quotas})
) | Q(variation_id__in={i['itemvariation_id'] for i in q_vars if
self._quota_objects[i['quota_id']] in quotas})
)
).order_by().values('item_id', 'subevent_id', 'variation_id').annotate(c=Count('*'))
for line in w_lookup:
if line['variation_id']:
qs = self._var_to_quotas[line['variation_id']]
else:
qs = self._item_to_quotas[line['item_id']]
for q in qs:
if q.subevent_id == line['subevent_id']:
size_left[q] -= line['c']
self.count_waitinglist[q] += line['c']
if q not in self.results and size_left[q] <= 0:
self.results[q] = Quota.AVAILABILITY_ORDERED, 0
def _compute_early_outs(self, quotas):
for q in quotas:
if q.closed and not self._ignore_closed:
self.results[q] = Quota.AVAILABILITY_ORDERED, 0
elif q.size is None:
self.results[q] = Quota.AVAILABILITY_OK, None
elif q.size == 0:
self.results[q] = Quota.AVAILABILITY_GONE, 0
@receiver(signal=periodic_task)
@minimum_interval(minutes_after_success=60)
def build_all_quota_caches(sender, **kwargs):
refresh_quota_caches.apply()
def grouper(iterable, n, fillvalue=None):
"""Collect data into fixed-length chunks or blocks"""
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return zip_longest(fillvalue=fillvalue, *args)
@app.task
@scopes_disabled()
def refresh_quota_caches():
# Active events
active = LogEntry.objects.using(settings.DATABASE_REPLICA).filter(
datetime__gt=now() - timedelta(days=7)
).order_by().values('event').annotate(
last_activity=Max('datetime')
)
for a in active:
try:
e = Event.objects.using(settings.DATABASE_REPLICA).get(pk=a['event'])
except Event.DoesNotExist:
continue
quotas = e.quotas.filter(
Q(cached_availability_time__isnull=True) |
Q(cached_availability_time__lt=a['last_activity']) |
Q(cached_availability_time__lt=now() - timedelta(hours=2))
).filter(
Q(subevent__isnull=True) |
Q(subevent__date_to__isnull=False, subevent__date_to__gte=now() - timedelta(days=14)) |
Q(subevent__date_from__gte=now() - timedelta(days=14))
)
for qs in grouper(quotas, 100, None):
qa = QuotaAvailability(early_out=False)
qa.queue(*[q for q in qs if q is not None])
qa.compute()
| en | 0.906071 | This special object allows so compute the availability of multiple quotas, even across events, and inspect their results. The maximum number of SQL queries is constant and not dependent on the number of quotas. Usage example:: qa = QuotaAvailability() qa.queue(quota1, quota2, …) qa.compute() print(qa.results) Properties you can access after computation. * results (dict mapping quotas to availability tuples) * count_paid_orders (dict mapping quotas to ints) * count_paid_orders (dict mapping quotas to ints) * count_pending_orders (dict mapping quotas to ints) * count_vouchers (dict mapping quotas to ints) * count_waitinglist (dict mapping quotas to ints) * count_cart (dict mapping quotas to ints) Initialize a new quota availability calculator :param count_waitinglist: If ``True`` (default), the waiting list is considered. If ``False``, it is ignored. :param ignore_closed: Quotas have a ``closed`` state that always makes the quota return as sold out. If you set ``ignore_closed`` to ``True``, we will ignore this completely. Default is ``False``. :param full_results: Usually, the computation is as efficient as possible, i.e. if after counting the sold orders we already see that the quota is sold out, we're not going to count the carts, since it does not matter. This also means that you will not be able to get that number from ``.count_cart``. If you want all parts to be calculated (i.e. because you want to show statistics to the user), pass ``full_results`` and we'll skip that optimization. items :param early_out: Usually, if a quota is ``closed`` or if its ``size`` is ``None`` (i.e. unlimited), we will not need database access to determine the availability and return it right away. If you set this to ``False``, however, we will *still* count the number of orders, which is required to keep the database-level quota cache up to date so backend overviews render quickly. If you do not care about keeping the cache up to date, you can set this to ``False`` for further performance improvements. # Ignore deadlocks when multiple threads try to write to the cache # We used to also delete item_quota_cache:* from the event cache here, but as the cache # gets more complex, this does not seem worth it. The cache is only present for up to # 5 seconds to prevent high peaks, and a 5-second delay in availability is usually # tolerable # Quotas we want to look at now # Some helpful caches # Compute result for closed or unlimited # Fetch which quotas belong to which items and variations # p before n, exited before non-exited # NOQA Collect data into fixed-length chunks or blocks # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx # Active events | 2.055242 | 2 |
gamesrc/food.py | Adi-UA/AI-Plays-Snake | 4 | 6624499 | <gh_stars>1-10
import pygame
class Food:
"""
This class represents a food object in the snake game.
"""
def __init__(self, grid, x, y):
"""
This function instantiates the Food object.
Arguments:
grid {Grid} -- The on which this food item is to be placed.
x {int} -- X coordinate in the standard coordinate system
y {int} -- Y coordinate in the standard coordinate system
"""
self.x = x
self.y = y
# Project's convention is to map food as a value of 2 on the grid.
grid.update_pos(x, y, 2)
| import pygame
class Food:
"""
This class represents a food object in the snake game.
"""
def __init__(self, grid, x, y):
"""
This function instantiates the Food object.
Arguments:
grid {Grid} -- The on which this food item is to be placed.
x {int} -- X coordinate in the standard coordinate system
y {int} -- Y coordinate in the standard coordinate system
"""
self.x = x
self.y = y
# Project's convention is to map food as a value of 2 on the grid.
grid.update_pos(x, y, 2) | en | 0.839864 | This class represents a food object in the snake game. This function instantiates the Food object. Arguments: grid {Grid} -- The on which this food item is to be placed. x {int} -- X coordinate in the standard coordinate system y {int} -- Y coordinate in the standard coordinate system # Project's convention is to map food as a value of 2 on the grid. | 3.997263 | 4 |
cm/services/data/transient_storage.py | adrianppg/cloudman | 0 | 6624500 | <reponame>adrianppg/cloudman
"""
A wrapper class around instance's transient storage. This class exposes that
storage over NFS to the rest of the cluster.
.. important::
The file system behind this device is transient, meaning that it will
dissapear at instance termination and it cannot be recovered.
"""
import commands
import grp
import logging
import os
import pwd
from cm.services import ServiceRole, service_states
from cm.services.data import BlockStorage
from cm.util import misc
log = logging.getLogger('cloudman')
class TransientStorage(BlockStorage):
def __init__(self, filesystem, from_archive=None):
"""
Instance's transient storage exposed over NFS.
"""
super(TransientStorage, self).__init__(filesystem.app)
self.fs = filesystem
self.app = self.fs.app
self.device = None
self.from_archive = from_archive
self.svc_roles = [ServiceRole.TRANSIENT_NFS]
self.name = ServiceRole.to_string(ServiceRole.TRANSIENT_NFS)
def __repr__(self):
return self.get_full_name()
def get_full_name(self):
return "Transient storage @ {0}".format(self.fs.mount_point)
def _get_details(self, details):
"""
Transient storage-specific file system details
"""
details['DoT'] = "Yes"
details['device'] = self.device
# TODO: keep track of any errors
details['err_msg'] = None if details.get(
'err_msg', '') == '' else details['err_msg']
return details
def add(self):
"""
Add this file system by creating a dedicated path (i.e., self.fs.mount_point)
and exporting it over NFS. Set the owner of the repo as ``ubuntu`` user.
"""
try:
log.debug("Adding transient file system at {0}".format(self.fs.mount_point))
if not os.path.exists(self.fs.mount_point):
os.mkdir(self.fs.mount_point)
os.chown(self.fs.mount_point, pwd.getpwnam("ubuntu")[2],
grp.getgrnam("ubuntu")[2])
self.device = commands.getoutput("df -h %s | grep -v Filesystem | awk '{print $1}'"
% self.fs.mount_point)
# If based on bucket, extract bucket contents onto new volume
try:
if self.from_archive:
log.info("Extracting archive url: {0} to mount point: {1}. This could take a while...".format(self.from_archive['url'], self.fs.mount_point))
misc.extract_archive_content_to_path(self.from_archive['url'], self.fs.mount_point, self.from_archive['md5_sum'])
self.fs.persistent = True
except Exception, e:
log.error("Error while extracting archive: {0}".format(e))
return False
if self.fs.add_nfs_share(self.fs.mount_point):
self.fs.state = service_states.RUNNING
else:
log.warning('Trouble sharing {0} over NFS?'.format(
self.fs.mount_point))
except OSError, e:
log.debug("Trouble adding transient file system: {0}".format(e))
def remove(self):
"""
Initiate removal of this file system from the system.
Because the backend storage will be gone after an instance is terminated,
here we just need to remove the NFS share point.
"""
log.debug("Removing transient instance storage from {0}".format(
self.fs.mount_point))
self.fs.remove_nfs_share()
self.state = service_states.SHUT_DOWN
def status(self):
"""
Update the status of this data service: ake sure the mount point exists
and that it is in /etc/exports for NFS
"""
# log.debug("Checking the status of {0}".format(self.fs.mount_point))
if self.fs._service_transitioning():
# log.debug("Data service {0}
# transitioning".format(self.fs.get_full_name()))
pass
elif self.fs._service_starting():
# log.debug("Data service {0}
# starting".format(self.fs.get_full_name()))
pass
elif not os.path.exists(self.fs.mount_point):
# log.debug("Data service {0} dir {1} not there?".format(self.fs.get_full_name(),\
# self.fs.mount_point))
self.fs.state = service_states.UNSTARTED
else:
ee_file = '/etc/exports'
try:
# This does read the file every time the service status is
# updated. Is this really necessary?
with open(ee_file, 'r') as f:
shared_paths = f.readlines()
for shared_path in shared_paths:
if self.fs.mount_point in shared_path:
self.fs.state = service_states.RUNNING
# Transient storage needs to be special-cased because
# it's not a mounted disk per se but a disk on an
# otherwise default device for an instance (i.e., /mnt)
update_size_cmd = "df --block-size 1 | grep /mnt$ | awk '{print $2, $3, $5}'"
self.fs._update_size(cmd=update_size_cmd)
return
# Or should this set it to UNSTARTED? Because this FS is just an
# NFS-exported file path...
log.warning("Data service {0} not found in {1}; error!"
.format(self.fs.get_full_name(), ee_file))
self.fs.state = service_states.ERROR
except Exception, e:
log.error("Error checking the status of {0} service: {1}".format(
self.fs.get_full_name(), e))
self.fs.state = service_states.ERROR
| """
A wrapper class around instance's transient storage. This class exposes that
storage over NFS to the rest of the cluster.
.. important::
The file system behind this device is transient, meaning that it will
dissapear at instance termination and it cannot be recovered.
"""
import commands
import grp
import logging
import os
import pwd
from cm.services import ServiceRole, service_states
from cm.services.data import BlockStorage
from cm.util import misc
log = logging.getLogger('cloudman')
class TransientStorage(BlockStorage):
def __init__(self, filesystem, from_archive=None):
"""
Instance's transient storage exposed over NFS.
"""
super(TransientStorage, self).__init__(filesystem.app)
self.fs = filesystem
self.app = self.fs.app
self.device = None
self.from_archive = from_archive
self.svc_roles = [ServiceRole.TRANSIENT_NFS]
self.name = ServiceRole.to_string(ServiceRole.TRANSIENT_NFS)
def __repr__(self):
return self.get_full_name()
def get_full_name(self):
return "Transient storage @ {0}".format(self.fs.mount_point)
def _get_details(self, details):
"""
Transient storage-specific file system details
"""
details['DoT'] = "Yes"
details['device'] = self.device
# TODO: keep track of any errors
details['err_msg'] = None if details.get(
'err_msg', '') == '' else details['err_msg']
return details
def add(self):
"""
Add this file system by creating a dedicated path (i.e., self.fs.mount_point)
and exporting it over NFS. Set the owner of the repo as ``ubuntu`` user.
"""
try:
log.debug("Adding transient file system at {0}".format(self.fs.mount_point))
if not os.path.exists(self.fs.mount_point):
os.mkdir(self.fs.mount_point)
os.chown(self.fs.mount_point, pwd.getpwnam("ubuntu")[2],
grp.getgrnam("ubuntu")[2])
self.device = commands.getoutput("df -h %s | grep -v Filesystem | awk '{print $1}'"
% self.fs.mount_point)
# If based on bucket, extract bucket contents onto new volume
try:
if self.from_archive:
log.info("Extracting archive url: {0} to mount point: {1}. This could take a while...".format(self.from_archive['url'], self.fs.mount_point))
misc.extract_archive_content_to_path(self.from_archive['url'], self.fs.mount_point, self.from_archive['md5_sum'])
self.fs.persistent = True
except Exception, e:
log.error("Error while extracting archive: {0}".format(e))
return False
if self.fs.add_nfs_share(self.fs.mount_point):
self.fs.state = service_states.RUNNING
else:
log.warning('Trouble sharing {0} over NFS?'.format(
self.fs.mount_point))
except OSError, e:
log.debug("Trouble adding transient file system: {0}".format(e))
def remove(self):
"""
Initiate removal of this file system from the system.
Because the backend storage will be gone after an instance is terminated,
here we just need to remove the NFS share point.
"""
log.debug("Removing transient instance storage from {0}".format(
self.fs.mount_point))
self.fs.remove_nfs_share()
self.state = service_states.SHUT_DOWN
def status(self):
"""
Update the status of this data service: ake sure the mount point exists
and that it is in /etc/exports for NFS
"""
# log.debug("Checking the status of {0}".format(self.fs.mount_point))
if self.fs._service_transitioning():
# log.debug("Data service {0}
# transitioning".format(self.fs.get_full_name()))
pass
elif self.fs._service_starting():
# log.debug("Data service {0}
# starting".format(self.fs.get_full_name()))
pass
elif not os.path.exists(self.fs.mount_point):
# log.debug("Data service {0} dir {1} not there?".format(self.fs.get_full_name(),\
# self.fs.mount_point))
self.fs.state = service_states.UNSTARTED
else:
ee_file = '/etc/exports'
try:
# This does read the file every time the service status is
# updated. Is this really necessary?
with open(ee_file, 'r') as f:
shared_paths = f.readlines()
for shared_path in shared_paths:
if self.fs.mount_point in shared_path:
self.fs.state = service_states.RUNNING
# Transient storage needs to be special-cased because
# it's not a mounted disk per se but a disk on an
# otherwise default device for an instance (i.e., /mnt)
update_size_cmd = "df --block-size 1 | grep /mnt$ | awk '{print $2, $3, $5}'"
self.fs._update_size(cmd=update_size_cmd)
return
# Or should this set it to UNSTARTED? Because this FS is just an
# NFS-exported file path...
log.warning("Data service {0} not found in {1}; error!"
.format(self.fs.get_full_name(), ee_file))
self.fs.state = service_states.ERROR
except Exception, e:
log.error("Error checking the status of {0} service: {1}".format(
self.fs.get_full_name(), e))
self.fs.state = service_states.ERROR | en | 0.836511 | A wrapper class around instance's transient storage. This class exposes that storage over NFS to the rest of the cluster. .. important:: The file system behind this device is transient, meaning that it will dissapear at instance termination and it cannot be recovered. Instance's transient storage exposed over NFS. Transient storage-specific file system details # TODO: keep track of any errors Add this file system by creating a dedicated path (i.e., self.fs.mount_point) and exporting it over NFS. Set the owner of the repo as ``ubuntu`` user. # If based on bucket, extract bucket contents onto new volume Initiate removal of this file system from the system. Because the backend storage will be gone after an instance is terminated, here we just need to remove the NFS share point. Update the status of this data service: ake sure the mount point exists and that it is in /etc/exports for NFS # log.debug("Checking the status of {0}".format(self.fs.mount_point)) # log.debug("Data service {0} # transitioning".format(self.fs.get_full_name())) # log.debug("Data service {0} # starting".format(self.fs.get_full_name())) # log.debug("Data service {0} dir {1} not there?".format(self.fs.get_full_name(),\ # self.fs.mount_point)) # This does read the file every time the service status is # updated. Is this really necessary? # Transient storage needs to be special-cased because # it's not a mounted disk per se but a disk on an # otherwise default device for an instance (i.e., /mnt) # Or should this set it to UNSTARTED? Because this FS is just an # NFS-exported file path... | 2.585368 | 3 |
html_parsing/pravicon_com__s/upload_to_modx.py | DazEB2/SimplePyScripts | 117 | 6624501 | <gh_stars>100-1000
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
import json
import time
import traceback
# pip install selenium
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
from config import DIR_DUMP, URL_MODIX_BASE, URL_MODIX_CREATE, LOGIN, PASSWORD
from main import secure_filename
options = Options()
# options.add_argument('--headless')
driver = webdriver.Firefox(options=options)
try:
driver.implicitly_wait(5)
driver.get(URL_MODIX_BASE)
print(f'Title: {driver.title!r}')
driver.find_element_by_id('modx-login-username').send_keys(LOGIN)
driver.find_element_by_id('modx-login-password').send_keys(PASSWORD)
driver.find_element_by_id("modx-login-btn").click()
# Папки с сортировкой по имени
items = sorted(DIR_DUMP.glob('icon-*'), key=lambda x: x.stem.split('__', maxsplit=1)[1])
for i, path_dir in enumerate(items, 1):
path_ignore = path_dir / 'ignore'
if path_ignore.exists():
continue
driver.get(URL_MODIX_CREATE)
path_info = path_dir / 'Информация.json'
data_info = json.loads(path_info.read_text('utf-8'))
title = data_info['title']
print(f'#{i} / {len(items)}. {title}')
file_name_img = f"{secure_filename(title)}.jpg"
driver.find_element_by_id('modx-resource-pagetitle').send_keys(title)
path_description = path_dir / 'Описания иконы.txt'
description = path_description.read_text('utf-8')
if 'В этом разделе записей пока нет.' not in description:
driver.find_element_by_id('ta').send_keys(description)
# Тип содержимого JSON
# driver.find_element_by_css_selector('[name=content_type]').get_attribute()
driver.execute_script(
"arguments[0].setAttribute('value', arguments[1])",
driver.find_element_by_css_selector('[name=content_type]'),
'7' # JSON
)
driver.find_element_by_id('modx-resource-tabs__modx-panel-resource-tv').click()
driver.find_element_by_id('tvbrowser1').send_keys(f'manager/иконы святых/{file_name_img}')
driver.find_element_by_id('tv3').send_keys('Описание')
path_liturgical_texts = path_dir / 'Богослужебные тексты.txt'
if path_liturgical_texts.exists():
liturgical_texts = path_liturgical_texts.read_text('utf-8')
driver.find_element_by_id('tv5').send_keys(liturgical_texts)
driver.find_element_by_id('modx-abtn-save').click()
path_ignore.touch()
time.sleep(5)
except:
print(traceback.format_exc())
finally:
# driver.quit()
pass
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
import json
import time
import traceback
# pip install selenium
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
from config import DIR_DUMP, URL_MODIX_BASE, URL_MODIX_CREATE, LOGIN, PASSWORD
from main import secure_filename
options = Options()
# options.add_argument('--headless')
driver = webdriver.Firefox(options=options)
try:
driver.implicitly_wait(5)
driver.get(URL_MODIX_BASE)
print(f'Title: {driver.title!r}')
driver.find_element_by_id('modx-login-username').send_keys(LOGIN)
driver.find_element_by_id('modx-login-password').send_keys(PASSWORD)
driver.find_element_by_id("modx-login-btn").click()
# Папки с сортировкой по имени
items = sorted(DIR_DUMP.glob('icon-*'), key=lambda x: x.stem.split('__', maxsplit=1)[1])
for i, path_dir in enumerate(items, 1):
path_ignore = path_dir / 'ignore'
if path_ignore.exists():
continue
driver.get(URL_MODIX_CREATE)
path_info = path_dir / 'Информация.json'
data_info = json.loads(path_info.read_text('utf-8'))
title = data_info['title']
print(f'#{i} / {len(items)}. {title}')
file_name_img = f"{secure_filename(title)}.jpg"
driver.find_element_by_id('modx-resource-pagetitle').send_keys(title)
path_description = path_dir / 'Описания иконы.txt'
description = path_description.read_text('utf-8')
if 'В этом разделе записей пока нет.' not in description:
driver.find_element_by_id('ta').send_keys(description)
# Тип содержимого JSON
# driver.find_element_by_css_selector('[name=content_type]').get_attribute()
driver.execute_script(
"arguments[0].setAttribute('value', arguments[1])",
driver.find_element_by_css_selector('[name=content_type]'),
'7' # JSON
)
driver.find_element_by_id('modx-resource-tabs__modx-panel-resource-tv').click()
driver.find_element_by_id('tvbrowser1').send_keys(f'manager/иконы святых/{file_name_img}')
driver.find_element_by_id('tv3').send_keys('Описание')
path_liturgical_texts = path_dir / 'Богослужебные тексты.txt'
if path_liturgical_texts.exists():
liturgical_texts = path_liturgical_texts.read_text('utf-8')
driver.find_element_by_id('tv5').send_keys(liturgical_texts)
driver.find_element_by_id('modx-abtn-save').click()
path_ignore.touch()
time.sleep(5)
except:
print(traceback.format_exc())
finally:
# driver.quit()
pass | ru | 0.324161 | #!/usr/bin/env python3 # -*- coding: utf-8 -*- # pip install selenium # options.add_argument('--headless') # Папки с сортировкой по имени # Тип содержимого JSON # driver.find_element_by_css_selector('[name=content_type]').get_attribute() # JSON # driver.quit() | 2.393347 | 2 |
tests/formatters/test_results.py | washingtonpost/elex-clarity | 9 | 6624502 | <gh_stars>1-10
from elexclarity.formatters.results import ClarityDetailXMLConverter
def test_georgia_precinct_formatting_basic(atkinson_precincts, ga_county_mapping_fips):
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_fips).convert(
atkinson_precincts,
level="precinct"
)
assert len(results) == 25
assert results["2020-11-03_GA_G_P_13003"]["precinctsReportingPct"] == 100
assert results["2020-11-03_GA_G_P_13003"]["lastUpdated"] == "2020-11-06T18:05:50Z"
assert len(results["2020-11-03_GA_G_P_13003"]["subunits"]) == 4
# Top level counts for this county
counts = results["2020-11-03_GA_G_P_13003"]["counts"]
assert counts["donald_j_trump_i_rep"] == 2300
assert counts["joseph_r_biden_dem"] == 825
assert counts["jo_jorgensen_lib"] == 30
# Pearson City precinct
pearson = results["2020-11-03_GA_G_P_13003"]["subunits"]["pearson_city"]
assert pearson["precinctsReportingPct"] == 100
assert pearson["expectedVotes"] == 564
assert pearson["counts"]["donald_j_trump_i_rep"] == 229
assert pearson["counts"]["joseph_r_biden_dem"] == 329
assert pearson["counts"]["jo_jorgensen_lib"] == 6
# Willacoochee precinct
willacoochee = results["2020-11-03_GA_G_P_13003"]["subunits"]["willacoochee"]
assert willacoochee["precinctsReportingPct"] == 100
assert willacoochee["expectedVotes"] == 522
assert willacoochee["counts"]["donald_j_trump_i_rep"] == 342
assert willacoochee["counts"]["joseph_r_biden_dem"] == 174
assert willacoochee["counts"]["jo_jorgensen_lib"] == 6
def test_georgia_precinct_formatting_vote_types_completion_mode(atkinson_precincts, ga_county_mapping_fips):
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_fips).convert(
atkinson_precincts,
level="precinct",
office_id="P",
vote_completion_mode="voteTypes"
)
# Pearson City precinct
pearson = results["2020-11-03_GA_G_P_13003"]["subunits"]["pearson_city"]
assert pearson["precinctsReportingPct"] == 100
assert pearson["expectedVotes"] == 564
# Willacoochee precinct
willacoochee = results["2020-11-03_GA_G_P_13003"]["subunits"]["willacoochee"]
assert willacoochee["precinctsReportingPct"] == 0
assert willacoochee.get("expectedVotes") is None
def test_georgia_precinct_formatting_race_name_mapping(gwinnett_precincts, ga_county_mapping_fips):
'''
Gwinnett has some special contest names so this test makes sure that those get mapped
to the right office IDs
'''
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_fips).convert(
gwinnett_precincts,
level="precinct"
)
# President
assert "2020-11-03_GA_G_P_13135" in results
# Loeffler
assert "2020-11-03_GA_G_S2_13135" in results
# Perdue
assert "2020-11-03_GA_G_S_13135" in results
def test_georgia_state_formatting_basic(ga_counties, ga_county_mapping_fips):
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_fips).convert(
ga_counties,
level="state"
)
assert len(results) == 2
assert "2020-11-03_GA_G_P" in results
assert "2020-11-03_GA_G_state_senate_district_1" in results
assert not results["2020-11-03_GA_G_P"].get("subunits")
assert results["2020-11-03_GA_G_P"]["precinctsReportingPct"] == 100
assert results["2020-11-03_GA_G_P"]["lastUpdated"] == "2020-11-20T20:37:06Z"
counts = results["2020-11-03_GA_G_P"]["counts"]
assert counts["donald_j_trump_i_rep"] == 2461837
assert counts["joseph_r_biden_dem"] == 2474507
assert counts["jo_jorgensen_lib"] == 62138
def test_georgia_county_formatting_basic(ga_counties, ga_county_mapping_fips):
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_fips).convert(
ga_counties,
level="county"
)
assert len(results["2020-11-03_GA_G_P"]["subunits"]) == 159
# County-level counts
wilcox_county = results["2020-11-03_GA_G_P"]["subunits"]["13315"]
assert wilcox_county["id"] == "13315"
assert wilcox_county["counts"]["donald_j_trump_i_rep"] == 2403
assert wilcox_county["counts"]["joseph_r_biden_dem"] == 862
assert wilcox_county["counts"]["jo_jorgensen_lib"] == 16
def test_georgia_county_formatting_alternate_county_mapping(ga_counties, ga_county_mapping_alternate):
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_alternate).convert(
ga_counties,
level="county"
)
assert len(results) == 2
catoosa_county = results["2020-11-03_GA_G_P"]["subunits"]["22"]
assert catoosa_county["id"] == "22"
assert len(catoosa_county["counts"].keys()) == 3
assert catoosa_county["counts"]["donald_j_trump_i_rep"] == 25167
assert catoosa_county["counts"]["joseph_r_biden_dem"] == 6932
assert catoosa_county["counts"]["jo_jorgensen_lib"] == 494
def test_county_formatting_no_county_mapping(wv_counties):
results = ClarityDetailXMLConverter("WV").convert(wv_counties, level="county")
assert len(results) == 2
marshall_county = results["2020-11-03_WV_G_P"]["subunits"]["marshall"]
assert marshall_county["id"] == "marshall"
assert len(marshall_county["counts"].keys()) == 4
assert marshall_county["counts"]["donald_j_trump"] == 10435
assert marshall_county["counts"]["joseph_r_biden"] == 3455
assert marshall_county["counts"]["jo_jorgensen"] == 143
assert marshall_county["counts"]["howie_hawkins"] == 47
| from elexclarity.formatters.results import ClarityDetailXMLConverter
def test_georgia_precinct_formatting_basic(atkinson_precincts, ga_county_mapping_fips):
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_fips).convert(
atkinson_precincts,
level="precinct"
)
assert len(results) == 25
assert results["2020-11-03_GA_G_P_13003"]["precinctsReportingPct"] == 100
assert results["2020-11-03_GA_G_P_13003"]["lastUpdated"] == "2020-11-06T18:05:50Z"
assert len(results["2020-11-03_GA_G_P_13003"]["subunits"]) == 4
# Top level counts for this county
counts = results["2020-11-03_GA_G_P_13003"]["counts"]
assert counts["donald_j_trump_i_rep"] == 2300
assert counts["joseph_r_biden_dem"] == 825
assert counts["jo_jorgensen_lib"] == 30
# Pearson City precinct
pearson = results["2020-11-03_GA_G_P_13003"]["subunits"]["pearson_city"]
assert pearson["precinctsReportingPct"] == 100
assert pearson["expectedVotes"] == 564
assert pearson["counts"]["donald_j_trump_i_rep"] == 229
assert pearson["counts"]["joseph_r_biden_dem"] == 329
assert pearson["counts"]["jo_jorgensen_lib"] == 6
# Willacoochee precinct
willacoochee = results["2020-11-03_GA_G_P_13003"]["subunits"]["willacoochee"]
assert willacoochee["precinctsReportingPct"] == 100
assert willacoochee["expectedVotes"] == 522
assert willacoochee["counts"]["donald_j_trump_i_rep"] == 342
assert willacoochee["counts"]["joseph_r_biden_dem"] == 174
assert willacoochee["counts"]["jo_jorgensen_lib"] == 6
def test_georgia_precinct_formatting_vote_types_completion_mode(atkinson_precincts, ga_county_mapping_fips):
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_fips).convert(
atkinson_precincts,
level="precinct",
office_id="P",
vote_completion_mode="voteTypes"
)
# Pearson City precinct
pearson = results["2020-11-03_GA_G_P_13003"]["subunits"]["pearson_city"]
assert pearson["precinctsReportingPct"] == 100
assert pearson["expectedVotes"] == 564
# Willacoochee precinct
willacoochee = results["2020-11-03_GA_G_P_13003"]["subunits"]["willacoochee"]
assert willacoochee["precinctsReportingPct"] == 0
assert willacoochee.get("expectedVotes") is None
def test_georgia_precinct_formatting_race_name_mapping(gwinnett_precincts, ga_county_mapping_fips):
'''
Gwinnett has some special contest names so this test makes sure that those get mapped
to the right office IDs
'''
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_fips).convert(
gwinnett_precincts,
level="precinct"
)
# President
assert "2020-11-03_GA_G_P_13135" in results
# Loeffler
assert "2020-11-03_GA_G_S2_13135" in results
# Perdue
assert "2020-11-03_GA_G_S_13135" in results
def test_georgia_state_formatting_basic(ga_counties, ga_county_mapping_fips):
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_fips).convert(
ga_counties,
level="state"
)
assert len(results) == 2
assert "2020-11-03_GA_G_P" in results
assert "2020-11-03_GA_G_state_senate_district_1" in results
assert not results["2020-11-03_GA_G_P"].get("subunits")
assert results["2020-11-03_GA_G_P"]["precinctsReportingPct"] == 100
assert results["2020-11-03_GA_G_P"]["lastUpdated"] == "2020-11-20T20:37:06Z"
counts = results["2020-11-03_GA_G_P"]["counts"]
assert counts["donald_j_trump_i_rep"] == 2461837
assert counts["joseph_r_biden_dem"] == 2474507
assert counts["jo_jorgensen_lib"] == 62138
def test_georgia_county_formatting_basic(ga_counties, ga_county_mapping_fips):
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_fips).convert(
ga_counties,
level="county"
)
assert len(results["2020-11-03_GA_G_P"]["subunits"]) == 159
# County-level counts
wilcox_county = results["2020-11-03_GA_G_P"]["subunits"]["13315"]
assert wilcox_county["id"] == "13315"
assert wilcox_county["counts"]["donald_j_trump_i_rep"] == 2403
assert wilcox_county["counts"]["joseph_r_biden_dem"] == 862
assert wilcox_county["counts"]["jo_jorgensen_lib"] == 16
def test_georgia_county_formatting_alternate_county_mapping(ga_counties, ga_county_mapping_alternate):
results = ClarityDetailXMLConverter("GA", county_lookup=ga_county_mapping_alternate).convert(
ga_counties,
level="county"
)
assert len(results) == 2
catoosa_county = results["2020-11-03_GA_G_P"]["subunits"]["22"]
assert catoosa_county["id"] == "22"
assert len(catoosa_county["counts"].keys()) == 3
assert catoosa_county["counts"]["donald_j_trump_i_rep"] == 25167
assert catoosa_county["counts"]["joseph_r_biden_dem"] == 6932
assert catoosa_county["counts"]["jo_jorgensen_lib"] == 494
def test_county_formatting_no_county_mapping(wv_counties):
results = ClarityDetailXMLConverter("WV").convert(wv_counties, level="county")
assert len(results) == 2
marshall_county = results["2020-11-03_WV_G_P"]["subunits"]["marshall"]
assert marshall_county["id"] == "marshall"
assert len(marshall_county["counts"].keys()) == 4
assert marshall_county["counts"]["donald_j_trump"] == 10435
assert marshall_county["counts"]["joseph_r_biden"] == 3455
assert marshall_county["counts"]["jo_jorgensen"] == 143
assert marshall_county["counts"]["howie_hawkins"] == 47 | en | 0.852287 | # Top level counts for this county # Pearson City precinct # Willacoochee precinct # Pearson City precinct # Willacoochee precinct Gwinnett has some special contest names so this test makes sure that those get mapped to the right office IDs # President # Loeffler # Perdue # County-level counts | 2.616725 | 3 |
benchmarks/ltl_timed_automata/fischer/f3/fischer_0010.py | EnricoMagnago/F3 | 3 | 6624503 | <filename>benchmarks/ltl_timed_automata/fischer/f3/fischer_0010.py
from collections import Iterable
from math import log, ceil
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_rational_type, msat_get_integer_type, \
msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or, msat_make_iff
from mathsat import msat_make_leq, msat_make_equal, msat_make_true
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next
num_procs = 10
delta_name = "delta"
def decl_consts(menv: msat_env, name: str, c_type):
assert not name.startswith("_"), name
s = msat_declare_function(menv, name, c_type)
s = msat_make_constant(menv, s)
x_s = msat_declare_function(menv, name_next(name), c_type)
x_s = msat_make_constant(menv, x_s)
return s, x_s
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
m_one = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, m_one)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def diverging_symbs(menv: msat_env) -> frozenset:
real_type = msat_get_rational_type(menv)
delta = msat_declare_function(menv, delta_name, real_type)
delta = msat_make_constant(menv, delta)
return frozenset([delta])
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
real_type = msat_get_rational_type(menv)
int_type = msat_get_integer_type(menv)
delta, x_delta = decl_consts(menv, delta_name, real_type)
s_id, x_id = decl_consts(menv, "id", int_type)
turn, x_turn = decl_consts(menv, "turn", int_type)
curr2next = {delta: x_delta, s_id: x_id, turn: x_turn}
procs = [P("p{}".format(idx), menv, enc,
msat_make_number(menv, str(idx + 1)), s_id, x_id, turn, delta)
for idx in range(num_procs)]
for p in procs:
for s, x_s in p.symb2next.items():
assert s not in curr2next.keys()
curr2next[s] = x_s
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
init = msat_make_geq(menv, delta, zero)
# bound id
bound_id = msat_make_equal(menv, s_id, zero)
x_bound_id = msat_make_equal(menv, x_id, zero)
for idx in range(1, num_procs + 1):
num = msat_make_number(menv, str(idx))
bound_id = msat_make_or(menv, bound_id,
msat_make_equal(menv, s_id, num))
x_bound_id = msat_make_or(menv, x_bound_id,
msat_make_equal(menv, x_id, num))
init = msat_make_and(menv, init, bound_id)
trans = bound_id
# bound turn
bound_turn = msat_make_equal(menv, turn, one)
x_bound_turn = msat_make_equal(menv, x_turn, one)
for idx in range(2, num_procs + 1):
num = msat_make_number(menv, str(idx))
bound_turn = msat_make_or(menv, bound_turn,
msat_make_equal(menv, turn, num))
x_bound_turn = msat_make_or(menv, x_bound_turn,
msat_make_equal(menv, x_turn, num))
init = msat_make_and(menv, init, bound_turn)
trans = msat_make_and(menv, trans, x_bound_turn)
trans = msat_make_and(menv, trans,
msat_make_geq(menv, x_delta, zero))
# delta > 0 -> id' = id & turn' = turn
curr = msat_make_impl(menv, msat_make_gt(menv, delta, zero),
msat_make_and(menv,
msat_make_equal(menv, x_id, s_id),
msat_make_equal(menv, x_turn, turn)))
trans = msat_make_and(menv, curr, trans)
for p in procs:
init = msat_make_and(menv, init, p.init)
trans = msat_make_and(menv, trans, p.trans)
init = msat_make_and(menv, init, msat_make_equal(menv, s_id, zero))
# (G F P0.location = wait) -> (G F P0.location = cs)
ltl = msat_make_impl(menv,
enc.make_G(enc.make_F(procs[0].wait)),
enc.make_G(enc.make_F(procs[0].cs)))
return TermMap(curr2next), init, trans, ltl
class Module:
"""Synchronous component"""
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
*args, **kwargs):
self.name = name
self.menv = menv
self.enc = enc
self.symb2next = {}
true = msat_make_true(menv)
self.init = true
self.trans = true
def _symb(self, v_name, v_type):
v_name = "{}_{}".format(self.name, v_name)
return decl_consts(self.menv, v_name, v_type)
def _enum(self, v_name: str, enum_size: int):
bool_type = msat_get_bool_type(self.menv)
num_bits = ceil(log(enum_size, 2))
b_vars = []
for idx in range(num_bits):
c_name = "{}{}".format(v_name, idx)
b_vars.append(tuple(self._symb(c_name, bool_type)))
vals = []
x_vals = []
for enum_val in range(enum_size):
bit_val = format(enum_val, '0{}b'.format(num_bits))
assert len(bit_val) == num_bits
assert all(c in {'0', '1'} for c in bit_val)
assign = [b_vars[idx] if c == '1' else
(msat_make_not(self.menv, b_vars[idx][0]),
msat_make_not(self.menv, b_vars[idx][1]))
for idx, c in enumerate(reversed(bit_val))]
pred = assign[0][0]
x_pred = assign[0][1]
for it in assign[1:]:
pred = msat_make_and(self.menv, pred, it[0])
x_pred = msat_make_and(self.menv, x_pred, it[1])
vals.append(pred)
x_vals.append(x_pred)
assert len(vals) == enum_size
assert len(x_vals) == enum_size
return b_vars, vals, x_vals
class P(Module):
"""Process module"""
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
pid, s_id, x_id, turn, delta):
super().__init__(name, menv, enc)
# int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
# loc, x_loc = self._symb("l", int_type)
loc_symbs, locs, x_locs = self._enum("l", 4)
x, x_x = self._symb("x", real_type)
self.symb2next = {x: x_x}
for s, x_s in loc_symbs:
assert s not in self.symb2next
self.symb2next[s] = x_s
nums = [msat_make_number(menv, str(n))
for n in range(4)]
k = nums[2]
self.idle = locs[0]
self.wait = locs[1]
self.req = locs[2]
self.cs = locs[3]
self.x_idle = x_locs[0]
self.x_wait = x_locs[1]
self.x_req = x_locs[2]
self.x_cs = x_locs[3]
same_loc = msat_make_iff(menv, loc_symbs[0][1], loc_symbs[0][0])
for s, x_s in loc_symbs[1:]:
same_loc = msat_make_and(menv, same_loc,
msat_make_iff(menv, x_s, s))
# l = idle & x = 0
self.init = msat_make_and(menv, self.idle,
msat_make_equal(menv, x, nums[0]))
# bound l
bound_l = msat_make_or(menv,
msat_make_or(menv, self.idle, self.wait),
msat_make_or(menv, self.req, self.cs))
self.init = msat_make_and(menv, self.init, bound_l)
# invars
self.init = msat_make_and(
menv, self.init,
msat_make_impl(menv, self.req, msat_make_leq(menv, x, k)))
# bound l
bound_l = msat_make_or(menv,
msat_make_or(menv, self.x_idle, self.x_wait),
msat_make_or(menv, self.x_req, self.x_cs))
self.trans = msat_make_and(menv, self.trans, bound_l)
# invars
self.trans = msat_make_and(
menv, self.trans,
msat_make_impl(menv, self.x_req,
msat_make_leq(menv, x_x, k)))
lhs = msat_make_or(
menv,
msat_make_gt(menv, delta, nums[0]),
msat_make_not(menv, msat_make_equal(menv, turn, pid)))
rhs = msat_make_and(menv,
same_loc,
msat_make_equal(menv, x_x,
msat_make_plus(menv, x, delta)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
disc_t = msat_make_and(menv, msat_make_equal(menv, delta, nums[0]),
msat_make_equal(menv, turn, pid))
# (l = idle) -> l' = req & id = 0 & x' = 0 & id' = id
lhs = msat_make_and(menv, disc_t, self.idle)
rhs = msat_make_and(
menv,
msat_make_and(menv, self.x_req,
msat_make_equal(menv, s_id, nums[0])),
msat_make_and(menv,
msat_make_equal(menv, x_x, nums[0]),
msat_make_equal(menv, x_id, s_id)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (l = req) -> l' = wait & x <= k & x' = 0 & id' = pid
lhs = msat_make_and(menv, disc_t, self.req)
rhs = msat_make_and(
menv,
msat_make_and(menv, self.x_wait,
msat_make_leq(menv, x, k)),
msat_make_and(menv,
msat_make_equal(menv, x_x, nums[0]),
msat_make_equal(menv, x_id, pid)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (l = wait) -> (l' = idle | l' = cs)
lhs = msat_make_and(menv, disc_t, self.wait)
rhs = msat_make_or(menv, self.x_idle, self.x_cs)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (l = wait & l' = idle) -> x' = 0 & id' = id & x > k & id != pid
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_idle))
rhs = msat_make_and(menv, msat_make_equal(menv, x_x, nums[0]),
msat_make_equal(menv, x_id, s_id))
rhs = msat_make_and(
menv, rhs,
msat_make_and(
menv,
msat_make_gt(menv, x, k),
msat_make_not(menv, msat_make_equal(menv, s_id, pid))))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (l = wait & l' = cs) -> x' = x & id' = id & x > k & id = pid
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_cs))
rhs = msat_make_and(menv, msat_make_equal(menv, x_x, x),
msat_make_equal(menv, x_id, s_id))
rhs = msat_make_and(menv, rhs,
msat_make_and(
menv,
msat_make_gt(menv, x, k),
msat_make_equal(menv, s_id, pid)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (l = cs) -> l' = idle & x' = x & id' = 0
lhs = msat_make_and(menv, disc_t, self.cs)
rhs = msat_make_and(menv, self.x_idle,
msat_make_and(menv,
msat_make_equal(menv, x_x, x),
msat_make_equal(menv, x_id,
nums[0])))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
| <filename>benchmarks/ltl_timed_automata/fischer/f3/fischer_0010.py
from collections import Iterable
from math import log, ceil
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_rational_type, msat_get_integer_type, \
msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or, msat_make_iff
from mathsat import msat_make_leq, msat_make_equal, msat_make_true
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next
num_procs = 10
delta_name = "delta"
def decl_consts(menv: msat_env, name: str, c_type):
assert not name.startswith("_"), name
s = msat_declare_function(menv, name, c_type)
s = msat_make_constant(menv, s)
x_s = msat_declare_function(menv, name_next(name), c_type)
x_s = msat_make_constant(menv, x_s)
return s, x_s
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
m_one = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, m_one)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def diverging_symbs(menv: msat_env) -> frozenset:
real_type = msat_get_rational_type(menv)
delta = msat_declare_function(menv, delta_name, real_type)
delta = msat_make_constant(menv, delta)
return frozenset([delta])
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
real_type = msat_get_rational_type(menv)
int_type = msat_get_integer_type(menv)
delta, x_delta = decl_consts(menv, delta_name, real_type)
s_id, x_id = decl_consts(menv, "id", int_type)
turn, x_turn = decl_consts(menv, "turn", int_type)
curr2next = {delta: x_delta, s_id: x_id, turn: x_turn}
procs = [P("p{}".format(idx), menv, enc,
msat_make_number(menv, str(idx + 1)), s_id, x_id, turn, delta)
for idx in range(num_procs)]
for p in procs:
for s, x_s in p.symb2next.items():
assert s not in curr2next.keys()
curr2next[s] = x_s
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
init = msat_make_geq(menv, delta, zero)
# bound id
bound_id = msat_make_equal(menv, s_id, zero)
x_bound_id = msat_make_equal(menv, x_id, zero)
for idx in range(1, num_procs + 1):
num = msat_make_number(menv, str(idx))
bound_id = msat_make_or(menv, bound_id,
msat_make_equal(menv, s_id, num))
x_bound_id = msat_make_or(menv, x_bound_id,
msat_make_equal(menv, x_id, num))
init = msat_make_and(menv, init, bound_id)
trans = bound_id
# bound turn
bound_turn = msat_make_equal(menv, turn, one)
x_bound_turn = msat_make_equal(menv, x_turn, one)
for idx in range(2, num_procs + 1):
num = msat_make_number(menv, str(idx))
bound_turn = msat_make_or(menv, bound_turn,
msat_make_equal(menv, turn, num))
x_bound_turn = msat_make_or(menv, x_bound_turn,
msat_make_equal(menv, x_turn, num))
init = msat_make_and(menv, init, bound_turn)
trans = msat_make_and(menv, trans, x_bound_turn)
trans = msat_make_and(menv, trans,
msat_make_geq(menv, x_delta, zero))
# delta > 0 -> id' = id & turn' = turn
curr = msat_make_impl(menv, msat_make_gt(menv, delta, zero),
msat_make_and(menv,
msat_make_equal(menv, x_id, s_id),
msat_make_equal(menv, x_turn, turn)))
trans = msat_make_and(menv, curr, trans)
for p in procs:
init = msat_make_and(menv, init, p.init)
trans = msat_make_and(menv, trans, p.trans)
init = msat_make_and(menv, init, msat_make_equal(menv, s_id, zero))
# (G F P0.location = wait) -> (G F P0.location = cs)
ltl = msat_make_impl(menv,
enc.make_G(enc.make_F(procs[0].wait)),
enc.make_G(enc.make_F(procs[0].cs)))
return TermMap(curr2next), init, trans, ltl
class Module:
"""Synchronous component"""
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
*args, **kwargs):
self.name = name
self.menv = menv
self.enc = enc
self.symb2next = {}
true = msat_make_true(menv)
self.init = true
self.trans = true
def _symb(self, v_name, v_type):
v_name = "{}_{}".format(self.name, v_name)
return decl_consts(self.menv, v_name, v_type)
def _enum(self, v_name: str, enum_size: int):
bool_type = msat_get_bool_type(self.menv)
num_bits = ceil(log(enum_size, 2))
b_vars = []
for idx in range(num_bits):
c_name = "{}{}".format(v_name, idx)
b_vars.append(tuple(self._symb(c_name, bool_type)))
vals = []
x_vals = []
for enum_val in range(enum_size):
bit_val = format(enum_val, '0{}b'.format(num_bits))
assert len(bit_val) == num_bits
assert all(c in {'0', '1'} for c in bit_val)
assign = [b_vars[idx] if c == '1' else
(msat_make_not(self.menv, b_vars[idx][0]),
msat_make_not(self.menv, b_vars[idx][1]))
for idx, c in enumerate(reversed(bit_val))]
pred = assign[0][0]
x_pred = assign[0][1]
for it in assign[1:]:
pred = msat_make_and(self.menv, pred, it[0])
x_pred = msat_make_and(self.menv, x_pred, it[1])
vals.append(pred)
x_vals.append(x_pred)
assert len(vals) == enum_size
assert len(x_vals) == enum_size
return b_vars, vals, x_vals
class P(Module):
"""Process module"""
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
pid, s_id, x_id, turn, delta):
super().__init__(name, menv, enc)
# int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
# loc, x_loc = self._symb("l", int_type)
loc_symbs, locs, x_locs = self._enum("l", 4)
x, x_x = self._symb("x", real_type)
self.symb2next = {x: x_x}
for s, x_s in loc_symbs:
assert s not in self.symb2next
self.symb2next[s] = x_s
nums = [msat_make_number(menv, str(n))
for n in range(4)]
k = nums[2]
self.idle = locs[0]
self.wait = locs[1]
self.req = locs[2]
self.cs = locs[3]
self.x_idle = x_locs[0]
self.x_wait = x_locs[1]
self.x_req = x_locs[2]
self.x_cs = x_locs[3]
same_loc = msat_make_iff(menv, loc_symbs[0][1], loc_symbs[0][0])
for s, x_s in loc_symbs[1:]:
same_loc = msat_make_and(menv, same_loc,
msat_make_iff(menv, x_s, s))
# l = idle & x = 0
self.init = msat_make_and(menv, self.idle,
msat_make_equal(menv, x, nums[0]))
# bound l
bound_l = msat_make_or(menv,
msat_make_or(menv, self.idle, self.wait),
msat_make_or(menv, self.req, self.cs))
self.init = msat_make_and(menv, self.init, bound_l)
# invars
self.init = msat_make_and(
menv, self.init,
msat_make_impl(menv, self.req, msat_make_leq(menv, x, k)))
# bound l
bound_l = msat_make_or(menv,
msat_make_or(menv, self.x_idle, self.x_wait),
msat_make_or(menv, self.x_req, self.x_cs))
self.trans = msat_make_and(menv, self.trans, bound_l)
# invars
self.trans = msat_make_and(
menv, self.trans,
msat_make_impl(menv, self.x_req,
msat_make_leq(menv, x_x, k)))
lhs = msat_make_or(
menv,
msat_make_gt(menv, delta, nums[0]),
msat_make_not(menv, msat_make_equal(menv, turn, pid)))
rhs = msat_make_and(menv,
same_loc,
msat_make_equal(menv, x_x,
msat_make_plus(menv, x, delta)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
disc_t = msat_make_and(menv, msat_make_equal(menv, delta, nums[0]),
msat_make_equal(menv, turn, pid))
# (l = idle) -> l' = req & id = 0 & x' = 0 & id' = id
lhs = msat_make_and(menv, disc_t, self.idle)
rhs = msat_make_and(
menv,
msat_make_and(menv, self.x_req,
msat_make_equal(menv, s_id, nums[0])),
msat_make_and(menv,
msat_make_equal(menv, x_x, nums[0]),
msat_make_equal(menv, x_id, s_id)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (l = req) -> l' = wait & x <= k & x' = 0 & id' = pid
lhs = msat_make_and(menv, disc_t, self.req)
rhs = msat_make_and(
menv,
msat_make_and(menv, self.x_wait,
msat_make_leq(menv, x, k)),
msat_make_and(menv,
msat_make_equal(menv, x_x, nums[0]),
msat_make_equal(menv, x_id, pid)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (l = wait) -> (l' = idle | l' = cs)
lhs = msat_make_and(menv, disc_t, self.wait)
rhs = msat_make_or(menv, self.x_idle, self.x_cs)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (l = wait & l' = idle) -> x' = 0 & id' = id & x > k & id != pid
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_idle))
rhs = msat_make_and(menv, msat_make_equal(menv, x_x, nums[0]),
msat_make_equal(menv, x_id, s_id))
rhs = msat_make_and(
menv, rhs,
msat_make_and(
menv,
msat_make_gt(menv, x, k),
msat_make_not(menv, msat_make_equal(menv, s_id, pid))))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (l = wait & l' = cs) -> x' = x & id' = id & x > k & id = pid
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_cs))
rhs = msat_make_and(menv, msat_make_equal(menv, x_x, x),
msat_make_equal(menv, x_id, s_id))
rhs = msat_make_and(menv, rhs,
msat_make_and(
menv,
msat_make_gt(menv, x, k),
msat_make_equal(menv, s_id, pid)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (l = cs) -> l' = idle & x' = x & id' = 0
lhs = msat_make_and(menv, disc_t, self.cs)
rhs = msat_make_and(menv, self.x_idle,
msat_make_and(menv,
msat_make_equal(menv, x_x, x),
msat_make_equal(menv, x_id,
nums[0])))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
| en | 0.457951 | # bound id # bound turn # delta > 0 -> id' = id & turn' = turn # (G F P0.location = wait) -> (G F P0.location = cs) Synchronous component Process module # int_type = msat_get_integer_type(menv) # loc, x_loc = self._symb("l", int_type) # l = idle & x = 0 # bound l # invars # bound l # invars # (l = idle) -> l' = req & id = 0 & x' = 0 & id' = id # (l = req) -> l' = wait & x <= k & x' = 0 & id' = pid # (l = wait) -> (l' = idle | l' = cs) # (l = wait & l' = idle) -> x' = 0 & id' = id & x > k & id != pid # (l = wait & l' = cs) -> x' = x & id' = id & x > k & id = pid # (l = cs) -> l' = idle & x' = x & id' = 0 | 2.420731 | 2 |
token_management_system/token_manager/models.py | pawanvirsingh/token_management | 0 | 6624504 | from datetime import datetime
from dateutil.relativedelta import relativedelta
import uuid
from django.db import models
from django.db.models import CASCADE
class BaseModel(models.Model):
id = models.UUIDField(primary_key=True, verbose_name='id', default=uuid.uuid4, editable=False)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class Pool(BaseModel):
pool_size = models.PositiveIntegerField(default=5)
class Token(BaseModel):
FREE = 'F'
BOOKED = 'B'
EXPIRED = 'E'
STATUS_CHOICES = [
(FREE, 'FREE'), (BOOKED, 'BOOKED'), (EXPIRED, 'EXPIRED'),
]
expires_at = models.DateTimeField(null=True)
status = models.CharField(choices=STATUS_CHOICES, default='F', max_length=1)
pool = models.ForeignKey(Pool, on_delete=CASCADE, null=True, blank=True)
def unblock_token(self):
self.status = self.FREE
self.expires_at = None
self.save()
def mark_token_alive(self):
self.status = self.FREE
self.expires_at = datetime.now() + relativedelta(seconds=60)
self.save()
@classmethod
def assign(cls):
tokens = Token.objects.filter(status=Token.FREE)
if tokens.exists():
expires_at = datetime.now() + relativedelta(seconds=60)
token = tokens.first()
token.expires_at = expires_at
token.status = Token.BOOKED
token.save()
return token.id
| from datetime import datetime
from dateutil.relativedelta import relativedelta
import uuid
from django.db import models
from django.db.models import CASCADE
class BaseModel(models.Model):
id = models.UUIDField(primary_key=True, verbose_name='id', default=uuid.uuid4, editable=False)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class Pool(BaseModel):
pool_size = models.PositiveIntegerField(default=5)
class Token(BaseModel):
FREE = 'F'
BOOKED = 'B'
EXPIRED = 'E'
STATUS_CHOICES = [
(FREE, 'FREE'), (BOOKED, 'BOOKED'), (EXPIRED, 'EXPIRED'),
]
expires_at = models.DateTimeField(null=True)
status = models.CharField(choices=STATUS_CHOICES, default='F', max_length=1)
pool = models.ForeignKey(Pool, on_delete=CASCADE, null=True, blank=True)
def unblock_token(self):
self.status = self.FREE
self.expires_at = None
self.save()
def mark_token_alive(self):
self.status = self.FREE
self.expires_at = datetime.now() + relativedelta(seconds=60)
self.save()
@classmethod
def assign(cls):
tokens = Token.objects.filter(status=Token.FREE)
if tokens.exists():
expires_at = datetime.now() + relativedelta(seconds=60)
token = tokens.first()
token.expires_at = expires_at
token.status = Token.BOOKED
token.save()
return token.id
| none | 1 | 2.10592 | 2 | |
pyscope/submission.py | clusterhack/python-autograde | 1 | 6624505 | <reponame>clusterhack/python-autograde<filename>pyscope/submission.py
# (c) 2022- <NAME> <<EMAIL>>
#
# This file is released under the MIT License:
# https://opensource.org/licenses/MIT
# This software is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied.
import shutil
from typing import Dict, List, Literal, Optional, Tuple, Union
import zipfile as zipf
import os
import os.path
from pathspec import PathSpec
from .util import StrOrPath, validate_zip, ensure_directory
_NAMED_GLOBS: Dict[str, PathSpec] = {
'default': PathSpec.from_lines('gitwildmatch', ['/*.py', '!test*.py', '!.*']),
'recursive': PathSpec.from_lines('gitwildmatch', ['*.py', '!/tests/*', '!/util/*', '!test*.py', '!.*']),
}
# TODO Add some msg logging facility (that gets included in Gradescope's result.json)
def _prepare_zip(dir: str, glob: PathSpec) -> Optional[List[str]]:
# Assumes dir is existing directory
# 1. Check that dir contains a single file that is a zipfile
dir_files = os.listdir(dir)
if not dir_files:
return None # Nothing submitted
zip_file = os.path.join(dir, dir_files[0])
if not zipf.is_zipfile(zip_file):
return None # Either too many files or single file but not zip
if len(dir_files) > 0:
return None # Extraneous files (should be just a zipfile)
with zipf.ZipFile(zip_file, 'r') as zfp:
# 2. Do some basic checks on zipfile contents
if not validate_zip(zfp):
return None # Zipfile contents may be dangerous
# 3. Find common path prefix of all files in zipfile
zip_namelist = zfp.namelist()
prefix = os.path.commonpath(zip_namelist)
if not prefix or prefix == '/':
return None # Zipfile does not contain a directory
# Commonpath does not include trailing pathsep (except in case result is '/'),
# it seems (not mentioned in doc, need to use the source), so play safe
if not prefix.endswith(os.pathsep):
prefix += '/'
# 4. Extract matching files (into dir) and add their names to returned list
files: List[str] = []
try:
match: str # pathspec lib has no type annotations
# Glob should be applied to names without prefix...
for match in glob.match_files(ntrim for n in zip_namelist if (ntrim := n[len(prefix):])):
# Since we need to strip prefix, can't use zfg.extract* methods...
zinfo = zfp.getinfo(os.path.join(prefix, match))
if zinfo.is_dir():
continue # No reason to create (potentially blank) dirs separately
dest_path = os.path.normpath(os.path.join(dir, match))
ensure_directory(dest_path)
with zfp.open(zinfo, 'r') as fsrc, open(dest_path, 'wb') as fdst: # Order matters for assumption in next line
files.append(match) # Appending here ensures that we don't add any non-existent files to the list, but that we also remove everything below...
shutil.copyfileobj(fsrc, fdst)
except (zipf.BadZipFile, KeyError, ValueError, IOError):
# Delete any files already extracted
for fn in files:
os.unlink(os.path.join(dir, fn)) # Files in list should exist (see above), so no try-catch (for now? ;)
return None # XXX Should we just re-throw here? None return should be used to signify "try next preparer"...
return files
def _prepare_files(dir: str, glob: PathSpec) -> Optional[List[str]]:
# Assumes dir is existing directory
# 1a. Figure out appropriate top-level directory
# (GradeScope unzips zipfile uploads, with no way to prevent, so this is necessary)
# Heuristic to find path prefix: while directory contains just one subdir, keep descending
prefix_parts = []
# TODO? Good grief, is this loop fugly.. when time, rewrite after defining
# aux function (or generator?) get_only_subdir_or_none(..) instead..
# XXX Also, list() is a bit dangerous, in principle, but.. eh, will just hose Gradescope VM
# If a student is clever enough to construct tiny zip with huge dirs (or Gradescope careless
# enough to not set appropriate file upload size limits), then.. so be it.
while (
len(dirents := list(os.scandir(os.path.join(dir, *prefix_parts)))) == 1 and
(subdirent := dirents[0]).is_dir()
):
prefix_parts.append(subdirent.name)
# 1b. If the actual submission workspace is within subfolder(s), move everything
# up to the actual dir (arg), i.e., our "canonical form" for submission dir
if prefix_parts:
# dirents has exactly what we need to move up
for de in dirents:
shutil.move(de.path, dir)
# XXX It's (probably?) ok to not rmtree the empty subdir chain
# 2. Find all matching filenames in the submission directory
files: List[str] = list(glob.match_tree(dir))
# 3. If empty, return None instead (to indicate failure)
if not files:
return None
return files
class StudentSubmission:
"""
Encapsulates a student submission on Gradescope, attempting to provide
unified handling of multiple submission options:
First, students may submit a single zipfile with all their files
contained within a *single* top-level folder.
Second, students may submit multiple individual files (we don't use modules for
any homeworks, so that's possible).
"""
# General (i.e., assignment-level) parameters
_dir: str
_glob: PathSpec
# Matching files for particular submission; set by .prepare()
_files: Optional[Tuple[str,...]] # Should be relative to _dir
def __init__(
self,
gradescope_submission_dir: StrOrPath = '/autograder/submission',
glob: Union[Literal['default'], Literal['recursive'], PathSpec] = 'default',
):
self._dir = os.fspath(gradescope_submission_dir)
self._files = None
if not os.path.isdir(self._dir):
raise ValueError(f'Submission path {self._dir} is not existing directory')
if not os.path.isabs(self._dir): # Play it safe..
raise ValueError(f'Submission path {self._dir} is not absolute')
if isinstance(glob, str):
glob = _NAMED_GLOBS[glob]
self._glob = glob
@property
def dir(self) -> str:
return self._dir
@property
def files(self) -> Optional[Tuple[str,...]]:
return self._files
def prepare(self) -> bool:
"""
Validate and (if necessary) prepare submission files (e.g., extract from zipfile).
Returns True if preparation was successful. If unsuccessful, submission directory
contents should not be changed.
Validation: Checks student files conform to (subset of) submission requirements.
Preparation: Transform student files to "canonical form" (a list of .py files, which
should be found in the submission directory (self._dir), and accessible as paths
relative to self._dir and stored in self._files.
Preparation may involve, e.g., extracting files from a zipfile (possibly stripping
a common path prefix).
"""
for prep_func in (_prepare_zip, _prepare_files):
files = prep_func(self.dir, self._glob)
if files is not None:
self._files = tuple(files)
return True
return False
def install(self, dest_dir: StrOrPath = "/autograder/source/assignment") -> None:
if self.files is None:
raise RuntimeError("Internal error: submission files not prepared?")
dest_dir = os.fspath(dest_dir)
for fname in self.files:
src = os.path.join(self.dir, fname)
dst = os.path.join(dest_dir, fname)
ensure_directory(dst)
shutil.copyfile(src, dst)
| # (c) 2022- <NAME> <<EMAIL>>
#
# This file is released under the MIT License:
# https://opensource.org/licenses/MIT
# This software is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied.
import shutil
from typing import Dict, List, Literal, Optional, Tuple, Union
import zipfile as zipf
import os
import os.path
from pathspec import PathSpec
from .util import StrOrPath, validate_zip, ensure_directory
_NAMED_GLOBS: Dict[str, PathSpec] = {
'default': PathSpec.from_lines('gitwildmatch', ['/*.py', '!test*.py', '!.*']),
'recursive': PathSpec.from_lines('gitwildmatch', ['*.py', '!/tests/*', '!/util/*', '!test*.py', '!.*']),
}
# TODO Add some msg logging facility (that gets included in Gradescope's result.json)
def _prepare_zip(dir: str, glob: PathSpec) -> Optional[List[str]]:
# Assumes dir is existing directory
# 1. Check that dir contains a single file that is a zipfile
dir_files = os.listdir(dir)
if not dir_files:
return None # Nothing submitted
zip_file = os.path.join(dir, dir_files[0])
if not zipf.is_zipfile(zip_file):
return None # Either too many files or single file but not zip
if len(dir_files) > 0:
return None # Extraneous files (should be just a zipfile)
with zipf.ZipFile(zip_file, 'r') as zfp:
# 2. Do some basic checks on zipfile contents
if not validate_zip(zfp):
return None # Zipfile contents may be dangerous
# 3. Find common path prefix of all files in zipfile
zip_namelist = zfp.namelist()
prefix = os.path.commonpath(zip_namelist)
if not prefix or prefix == '/':
return None # Zipfile does not contain a directory
# Commonpath does not include trailing pathsep (except in case result is '/'),
# it seems (not mentioned in doc, need to use the source), so play safe
if not prefix.endswith(os.pathsep):
prefix += '/'
# 4. Extract matching files (into dir) and add their names to returned list
files: List[str] = []
try:
match: str # pathspec lib has no type annotations
# Glob should be applied to names without prefix...
for match in glob.match_files(ntrim for n in zip_namelist if (ntrim := n[len(prefix):])):
# Since we need to strip prefix, can't use zfg.extract* methods...
zinfo = zfp.getinfo(os.path.join(prefix, match))
if zinfo.is_dir():
continue # No reason to create (potentially blank) dirs separately
dest_path = os.path.normpath(os.path.join(dir, match))
ensure_directory(dest_path)
with zfp.open(zinfo, 'r') as fsrc, open(dest_path, 'wb') as fdst: # Order matters for assumption in next line
files.append(match) # Appending here ensures that we don't add any non-existent files to the list, but that we also remove everything below...
shutil.copyfileobj(fsrc, fdst)
except (zipf.BadZipFile, KeyError, ValueError, IOError):
# Delete any files already extracted
for fn in files:
os.unlink(os.path.join(dir, fn)) # Files in list should exist (see above), so no try-catch (for now? ;)
return None # XXX Should we just re-throw here? None return should be used to signify "try next preparer"...
return files
def _prepare_files(dir: str, glob: PathSpec) -> Optional[List[str]]:
# Assumes dir is existing directory
# 1a. Figure out appropriate top-level directory
# (GradeScope unzips zipfile uploads, with no way to prevent, so this is necessary)
# Heuristic to find path prefix: while directory contains just one subdir, keep descending
prefix_parts = []
# TODO? Good grief, is this loop fugly.. when time, rewrite after defining
# aux function (or generator?) get_only_subdir_or_none(..) instead..
# XXX Also, list() is a bit dangerous, in principle, but.. eh, will just hose Gradescope VM
# If a student is clever enough to construct tiny zip with huge dirs (or Gradescope careless
# enough to not set appropriate file upload size limits), then.. so be it.
while (
len(dirents := list(os.scandir(os.path.join(dir, *prefix_parts)))) == 1 and
(subdirent := dirents[0]).is_dir()
):
prefix_parts.append(subdirent.name)
# 1b. If the actual submission workspace is within subfolder(s), move everything
# up to the actual dir (arg), i.e., our "canonical form" for submission dir
if prefix_parts:
# dirents has exactly what we need to move up
for de in dirents:
shutil.move(de.path, dir)
# XXX It's (probably?) ok to not rmtree the empty subdir chain
# 2. Find all matching filenames in the submission directory
files: List[str] = list(glob.match_tree(dir))
# 3. If empty, return None instead (to indicate failure)
if not files:
return None
return files
class StudentSubmission:
"""
Encapsulates a student submission on Gradescope, attempting to provide
unified handling of multiple submission options:
First, students may submit a single zipfile with all their files
contained within a *single* top-level folder.
Second, students may submit multiple individual files (we don't use modules for
any homeworks, so that's possible).
"""
# General (i.e., assignment-level) parameters
_dir: str
_glob: PathSpec
# Matching files for particular submission; set by .prepare()
_files: Optional[Tuple[str,...]] # Should be relative to _dir
def __init__(
self,
gradescope_submission_dir: StrOrPath = '/autograder/submission',
glob: Union[Literal['default'], Literal['recursive'], PathSpec] = 'default',
):
self._dir = os.fspath(gradescope_submission_dir)
self._files = None
if not os.path.isdir(self._dir):
raise ValueError(f'Submission path {self._dir} is not existing directory')
if not os.path.isabs(self._dir): # Play it safe..
raise ValueError(f'Submission path {self._dir} is not absolute')
if isinstance(glob, str):
glob = _NAMED_GLOBS[glob]
self._glob = glob
@property
def dir(self) -> str:
return self._dir
@property
def files(self) -> Optional[Tuple[str,...]]:
return self._files
def prepare(self) -> bool:
"""
Validate and (if necessary) prepare submission files (e.g., extract from zipfile).
Returns True if preparation was successful. If unsuccessful, submission directory
contents should not be changed.
Validation: Checks student files conform to (subset of) submission requirements.
Preparation: Transform student files to "canonical form" (a list of .py files, which
should be found in the submission directory (self._dir), and accessible as paths
relative to self._dir and stored in self._files.
Preparation may involve, e.g., extracting files from a zipfile (possibly stripping
a common path prefix).
"""
for prep_func in (_prepare_zip, _prepare_files):
files = prep_func(self.dir, self._glob)
if files is not None:
self._files = tuple(files)
return True
return False
def install(self, dest_dir: StrOrPath = "/autograder/source/assignment") -> None:
if self.files is None:
raise RuntimeError("Internal error: submission files not prepared?")
dest_dir = os.fspath(dest_dir)
for fname in self.files:
src = os.path.join(self.dir, fname)
dst = os.path.join(dest_dir, fname)
ensure_directory(dst)
shutil.copyfile(src, dst) | en | 0.864061 | # (c) 2022- <NAME> <<EMAIL>> # # This file is released under the MIT License: # https://opensource.org/licenses/MIT # This software is distributed on an "AS IS" basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. # TODO Add some msg logging facility (that gets included in Gradescope's result.json) # Assumes dir is existing directory # 1. Check that dir contains a single file that is a zipfile # Nothing submitted # Either too many files or single file but not zip # Extraneous files (should be just a zipfile) # 2. Do some basic checks on zipfile contents # Zipfile contents may be dangerous # 3. Find common path prefix of all files in zipfile # Zipfile does not contain a directory # Commonpath does not include trailing pathsep (except in case result is '/'), # it seems (not mentioned in doc, need to use the source), so play safe # 4. Extract matching files (into dir) and add their names to returned list # pathspec lib has no type annotations # Glob should be applied to names without prefix... # Since we need to strip prefix, can't use zfg.extract* methods... # No reason to create (potentially blank) dirs separately # Order matters for assumption in next line # Appending here ensures that we don't add any non-existent files to the list, but that we also remove everything below... # Delete any files already extracted # Files in list should exist (see above), so no try-catch (for now? ;) # XXX Should we just re-throw here? None return should be used to signify "try next preparer"... # Assumes dir is existing directory # 1a. Figure out appropriate top-level directory # (GradeScope unzips zipfile uploads, with no way to prevent, so this is necessary) # Heuristic to find path prefix: while directory contains just one subdir, keep descending # TODO? Good grief, is this loop fugly.. when time, rewrite after defining # aux function (or generator?) get_only_subdir_or_none(..) instead.. # XXX Also, list() is a bit dangerous, in principle, but.. eh, will just hose Gradescope VM # If a student is clever enough to construct tiny zip with huge dirs (or Gradescope careless # enough to not set appropriate file upload size limits), then.. so be it. # 1b. If the actual submission workspace is within subfolder(s), move everything # up to the actual dir (arg), i.e., our "canonical form" for submission dir # dirents has exactly what we need to move up # XXX It's (probably?) ok to not rmtree the empty subdir chain # 2. Find all matching filenames in the submission directory # 3. If empty, return None instead (to indicate failure) Encapsulates a student submission on Gradescope, attempting to provide unified handling of multiple submission options: First, students may submit a single zipfile with all their files contained within a *single* top-level folder. Second, students may submit multiple individual files (we don't use modules for any homeworks, so that's possible). # General (i.e., assignment-level) parameters # Matching files for particular submission; set by .prepare() # Should be relative to _dir # Play it safe.. Validate and (if necessary) prepare submission files (e.g., extract from zipfile). Returns True if preparation was successful. If unsuccessful, submission directory contents should not be changed. Validation: Checks student files conform to (subset of) submission requirements. Preparation: Transform student files to "canonical form" (a list of .py files, which should be found in the submission directory (self._dir), and accessible as paths relative to self._dir and stored in self._files. Preparation may involve, e.g., extracting files from a zipfile (possibly stripping a common path prefix). | 2.280663 | 2 |
adv/marth.py | pfleg/dl | 0 | 6624506 | from core.advbase import *
from slot.d import *
from slot.a import *
def module():
return Marth
class Marth(Adv):
comment = 'last boost once at start (team DPS not considered)'
a1 = ('prep',100)
a3 = ('cc',0.13,'hit10')
conf = {}
conf['slots.a'] = The_Shining_Overlord()+Me_and_My_Bestie()
conf['acl'] = """
`dragon.act('c3 s s end'),s=2
`s3, not self.s3_buff
`s2, self.afflics.burn.get()
`s1, fsc
`s4, fsc
`fs, x=3
"""
coab = ['Blade', 'Wand', 'Joe']
share = ['Kleimann']
def d_coabs(self):
if self.duration <= 60:
self.coab = ['Blade','Wand','Gala_Sarisse']
def init(self):
self.phase['s2'] = 0
def s1_proc(self, e):
self.afflics.burn(e.name,120,0.97)
def s2_proc(self, e):
with KillerModifier('s2_killer', 'hit', 1.0, ['burn']):
self.dmg_make(e.name, 8.99)
self.phase[e.name] += 1
if self.phase[e.name] == 0:
Selfbuff(e.name,0.1,10).on()
elif self.phase[e.name] == 1:
Teambuff(e.name,0.1,10).on()
elif self.phase[e.name] == 2:
Teambuff(e.name,0.1,10).on()
Spdbuff(f'{e.name}_spd',0.3,10, wide='team').on()
self.phase[e.name] %= 3
if __name__ == '__main__':
from core.simulate import test_with_argv
test_with_argv(None, *sys.argv) | from core.advbase import *
from slot.d import *
from slot.a import *
def module():
return Marth
class Marth(Adv):
comment = 'last boost once at start (team DPS not considered)'
a1 = ('prep',100)
a3 = ('cc',0.13,'hit10')
conf = {}
conf['slots.a'] = The_Shining_Overlord()+Me_and_My_Bestie()
conf['acl'] = """
`dragon.act('c3 s s end'),s=2
`s3, not self.s3_buff
`s2, self.afflics.burn.get()
`s1, fsc
`s4, fsc
`fs, x=3
"""
coab = ['Blade', 'Wand', 'Joe']
share = ['Kleimann']
def d_coabs(self):
if self.duration <= 60:
self.coab = ['Blade','Wand','Gala_Sarisse']
def init(self):
self.phase['s2'] = 0
def s1_proc(self, e):
self.afflics.burn(e.name,120,0.97)
def s2_proc(self, e):
with KillerModifier('s2_killer', 'hit', 1.0, ['burn']):
self.dmg_make(e.name, 8.99)
self.phase[e.name] += 1
if self.phase[e.name] == 0:
Selfbuff(e.name,0.1,10).on()
elif self.phase[e.name] == 1:
Teambuff(e.name,0.1,10).on()
elif self.phase[e.name] == 2:
Teambuff(e.name,0.1,10).on()
Spdbuff(f'{e.name}_spd',0.3,10, wide='team').on()
self.phase[e.name] %= 3
if __name__ == '__main__':
from core.simulate import test_with_argv
test_with_argv(None, *sys.argv) | en | 0.155663 | `dragon.act('c3 s s end'),s=2 `s3, not self.s3_buff `s2, self.afflics.burn.get() `s1, fsc `s4, fsc `fs, x=3 | 2.113624 | 2 |
chapter01/00/code_test.py | riseshia/nlp-100-knock | 0 | 6624507 | <gh_stars>0
import unittest
from code import reverse
class CodeTest(unittest.TestCase):
def test_reverse(self):
self.assertEqual('desserts',
reverse('stressed'))
if __name__ == '__main__':
unittest.main()
| import unittest
from code import reverse
class CodeTest(unittest.TestCase):
def test_reverse(self):
self.assertEqual('desserts',
reverse('stressed'))
if __name__ == '__main__':
unittest.main() | none | 1 | 3.249941 | 3 | |
happy_adventure/pygame_api/pysurface.py | vieirafrancisco/happy-adventure | 3 | 6624508 | <filename>happy_adventure/pygame_api/pysurface.py
import pygame
from pygame import Surface,surfarray
from .pyevent import Events
class Canvas(Surface,Events):
def __init__(self, master = None, surface = None, **surf_kwargs):
self.children = []
if not surface:
self._build_surface(**surf_kwargs)
else:
surf_rect = surface.get_rect()
Surface.__init__(surf_rect.size, masks = pygame.mask.from_surface(surface))
array = surfarray.array2d(surface)
surfarray.blit_array(self, array)
if not master:
self.master = pygame.display.get_surface()
else:
self.master = master
Events.__init__(self)
def _build_surface(self, width, height, flags=pygame.SRCALPHA, depth=32):
Surface.__init__(self,(width,height), flags, depth)
def add_child(self, canvas):
self.children.append(canvas)
def pack(self, posX, posY):
self.posX, self.posY = posX, posY
if isinstance(self.master, Canvas):
self.master.add_child(self)
def update(self):
for child in self.children:
child.update()
if self.has_change():
self.draw()
def draw(self):
self.master.blit(self, (self.posX,self.posY))
def has_change(self):
return True
def event_call(self, event):
if event.type in self.dict_events:
self.dict_events[event.type](event)
for child in self.children:
child.event_call(event)
@property
def array2d(self):
return surfarray.array2d(self)
def blit_array(self, array):
surfarray.blit_array(self, array) | <filename>happy_adventure/pygame_api/pysurface.py
import pygame
from pygame import Surface,surfarray
from .pyevent import Events
class Canvas(Surface,Events):
def __init__(self, master = None, surface = None, **surf_kwargs):
self.children = []
if not surface:
self._build_surface(**surf_kwargs)
else:
surf_rect = surface.get_rect()
Surface.__init__(surf_rect.size, masks = pygame.mask.from_surface(surface))
array = surfarray.array2d(surface)
surfarray.blit_array(self, array)
if not master:
self.master = pygame.display.get_surface()
else:
self.master = master
Events.__init__(self)
def _build_surface(self, width, height, flags=pygame.SRCALPHA, depth=32):
Surface.__init__(self,(width,height), flags, depth)
def add_child(self, canvas):
self.children.append(canvas)
def pack(self, posX, posY):
self.posX, self.posY = posX, posY
if isinstance(self.master, Canvas):
self.master.add_child(self)
def update(self):
for child in self.children:
child.update()
if self.has_change():
self.draw()
def draw(self):
self.master.blit(self, (self.posX,self.posY))
def has_change(self):
return True
def event_call(self, event):
if event.type in self.dict_events:
self.dict_events[event.type](event)
for child in self.children:
child.event_call(event)
@property
def array2d(self):
return surfarray.array2d(self)
def blit_array(self, array):
surfarray.blit_array(self, array) | none | 1 | 2.777438 | 3 | |
src/download_json.py | aws-samples/amazon-sagemaker-predict-accessibility | 12 | 6624509 | import sys
from datetime import date
import requests
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
from pyspark.sql.functions import col, expr, when, round
from pyspark.sql.types import LongType
sc = SparkContext()
glueContext = GlueContext(sc)
spark = glueContext.spark_session
job = Job(glueContext)
current_date=date.today()
path="year="+str(current_date.year)+"/month="+str(current_date.month)+"/day="+str(current_date.day)+"/"
playgrounds_raw_dir="s3://<<userid>>-raw/playgrounds/" +path # replace with your userid here
playgrounds_url='https://www.nycgovparks.org/bigapps/DPR_Playgrounds_001.json'
playgroundsRDD = sc.parallelize([ requests.get(playgrounds_url).text])
playgrounds_df = spark.read.json(playgroundsRDD)
playgrounds_df.coalesce(1).write.format("parquet").mode("overwrite").save(playgrounds_raw_dir)
parks_raw_dir="s3://<<userid>>-raw/parks/" +path ## replace with your userid here
parks_url='https://www.nycgovparks.org/bigapps/DPR_Parks_001.json'
parksRDD = sc.parallelize([ requests.get(parks_url).text])
parks_df = spark.read.json(parksRDD)
parks_df.coalesce(1).write.format("parquet").mode("overwrite").save(parks_raw_dir)
| import sys
from datetime import date
import requests
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
from pyspark.sql.functions import col, expr, when, round
from pyspark.sql.types import LongType
sc = SparkContext()
glueContext = GlueContext(sc)
spark = glueContext.spark_session
job = Job(glueContext)
current_date=date.today()
path="year="+str(current_date.year)+"/month="+str(current_date.month)+"/day="+str(current_date.day)+"/"
playgrounds_raw_dir="s3://<<userid>>-raw/playgrounds/" +path # replace with your userid here
playgrounds_url='https://www.nycgovparks.org/bigapps/DPR_Playgrounds_001.json'
playgroundsRDD = sc.parallelize([ requests.get(playgrounds_url).text])
playgrounds_df = spark.read.json(playgroundsRDD)
playgrounds_df.coalesce(1).write.format("parquet").mode("overwrite").save(playgrounds_raw_dir)
parks_raw_dir="s3://<<userid>>-raw/parks/" +path ## replace with your userid here
parks_url='https://www.nycgovparks.org/bigapps/DPR_Parks_001.json'
parksRDD = sc.parallelize([ requests.get(parks_url).text])
parks_df = spark.read.json(parksRDD)
parks_df.coalesce(1).write.format("parquet").mode("overwrite").save(parks_raw_dir)
| en | 0.673372 | # replace with your userid here ## replace with your userid here | 2.366992 | 2 |
office__excel__openpyxl__xlwt/hidden_columns.py | DazEB2/SimplePyScripts | 117 | 6624510 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
# SOURCE: http://openpyxl.readthedocs.io/en/stable/usage.html#fold-columns-outline
import openpyxl
wb = openpyxl.Workbook()
ws = wb.get_active_sheet()
ws.column_dimensions.group('B', 'D', hidden=True)
wb.save('excel.xlsx')
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
# SOURCE: http://openpyxl.readthedocs.io/en/stable/usage.html#fold-columns-outline
import openpyxl
wb = openpyxl.Workbook()
ws = wb.get_active_sheet()
ws.column_dimensions.group('B', 'D', hidden=True)
wb.save('excel.xlsx')
| en | 0.34604 | #!/usr/bin/env python3 # -*- coding: utf-8 -*- # SOURCE: http://openpyxl.readthedocs.io/en/stable/usage.html#fold-columns-outline | 2.569902 | 3 |
parser/fase2/team12/src/Start/Start.py | Gabriel-15/tytus | 35 | 6624511 | import sys, os.path
import json
nodo_dir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '\\Start\\')
sys.path.append(nodo_dir)
c3d_dir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '\\C3D\\')
sys.path.append(c3d_dir)
nodo_dir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '\\ENTORNO\\')
sys.path.append(nodo_dir)
storage = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '\\typeChecker')
sys.path.append(storage)
variables_globales = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..\..')))
sys.path.append(variables_globales)
from VariablesGlobales import *
from prettytable import PrettyTable
from Libraries import Nodo
from Libraries import Database
from Libraries import Table
from Libraries import Use
from Libraries import Type
from Libraries import Select
from Libraries import InsertTable
from Libraries import UnionAll
from Libraries import Union
from Libraries import Intersect
from Libraries import Except
from Libraries import UpdateTable
from Libraries import AlterTable
from Libraries import Index
from Libraries import Procedure
from Libraries import Execute
from Traduccion import *
from Label import *
from Temporal import *
from Entorno import *
from typeChecker.typeChecker import TypeChecker
tc = TypeChecker()
# Importación de Clases para Execute
class Start(Nodo):
def __init__(self, nombreNodo, fila = -1, columna = -1, valor = None):
Nodo.__init__(self,nombreNodo, fila, columna, valor)
self.listaSemanticos = []
def addChild(self, node):
self.hijos.append(node)
def createChild(self, nombreNodo, fila = -1, columna =-1, valor = None):
nuevo = Start(nombreNodo,fila,columna,valor)
self.hijos.append(nuevo)
def createTerminal(self, lexToken):
nuevo = Start(lexToken.type, lexToken.lineno, lexToken.lexpos, lexToken.value)
self.hijos.append(nuevo)
def tabular_data(self, encabezados : list, data : list) -> str:
print(encabezados)
index = 0
for i in encabezados:
if i == "?column?":
encabezados[index] = "?column?"+str(index)
index += 1
x = PrettyTable()
x.field_names = encabezados
for item in data:
if len(item) == len(encabezados):
x.add_row(item)
return x.get_string()
# recursiva por la izquierda
def execute(self, enviroment):
entornoGlobal = Entorno(None)
entornoGlobal.Global = entornoGlobal
entornoGlobal.nombreEntorno = 'Global'
for hijo in self.hijos:
if hijo.nombreNodo == 'SENTENCIA_FUNCTION':
hijo.execute(entornoGlobal)
#entornoGlobal.recorrerEntorno()
for hijo in self.hijos:
if hijo.nombreNodo == 'CREATE_DATABASE':
nuevaBase=Database()
# Recibe un json
message = nuevaBase.execute(hijo)
self.listaSemanticos.append(message)
elif hijo.nombreNodo == 'SENTENCIA_USE':
useDB = Use()
message = useDB.execute(hijo)
self.listaSemanticos.append(message)
elif hijo.nombreNodo == 'CREATE_TABLE':
nuevaTabla = Table()
res = nuevaTabla.execute(hijo, enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
elif hijo.nombreNodo == 'CREATE_TYPE_ENUM':
nuevoEnum = Type()
nuevoEnum.execute(hijo)
elif hijo.nombreNodo == 'SENTENCIA_SELECT' or hijo.nombreNodo == 'SENTENCIA_SELECT_DISTINCT':
hijo.execute(entornoGlobal)
respuesta = hijo.dataResult
if respuesta.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(respuesta.encabezados, respuesta.data)})
elif hijo.nombreNodo == 'E':
hijo.execute(entornoGlobal)
print("Tipo Expresion: "+str(hijo.tipo.data_type))
print("Expresion valor: "+str(hijo.valorExpresion))
elif hijo.nombreNodo == 'SENTENCIA_INSERT':
nuevoInsert = InsertTable()
res = nuevoInsert.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
elif hijo.nombreNodo == "SENTENCIA_SHOW":
self.listaSemanticos.append(hijo.execute(None))
elif hijo.nombreNodo == "SENTENCIA_ALTER_INDEX":
self.listaSemanticos.append(hijo.execute(None))
elif hijo.nombreNodo == "SENTENCIA_DROP":
self.listaSemanticos.append(hijo.execute(None))
elif hijo.nombreNodo == "SENTENCIA_DELETE":
self.listaSemanticos.append(hijo.execute(None))
elif hijo.nombreNodo == 'SENTENCIA_UNION_ALL':
nuevoUnionAll = UnionAll()
resp = nuevoUnionAll.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
elif hijo.nombreNodo == 'SENTENCIA_UNION':
nuevoUnion = Union()
resp = nuevoUnion.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
elif hijo.nombreNodo == 'SENTENCIA_INTERSECT':
nuevoIntersect = Intersect()
resp = nuevoIntersect.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
elif hijo.nombreNodo == 'SENTENCIA_EXCEPT':
nuevoExcept = Except()
resp = nuevoExcept.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
elif hijo.nombreNodo == 'SENTENCIA_UPDATE':
nuevoUpdate = UpdateTable()
res = nuevoUpdate.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
elif hijo.nombreNodo == 'SENTENCIA_ALTER_TABLE':
nuevoAlterT = AlterTable()
res = nuevoAlterT.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
elif hijo.nombreNodo == 'CREATE_INDEX' or hijo.nombreNodo == 'CREATE_UNIQUE_INDEX':
nuevoIndex = Index()
resp = nuevoIndex.execute(hijo)
entornoGlobal.recorrerEntorno()
def compile(self, enviroment):
global variablesProcedure
#region Declaracion de Variables Execute (SQL)
entornoGlobal = Entorno(None)
entornoGlobal.Global = entornoGlobal
entornoGlobal.nombreEntorno = 'Global'
#endregion
#region Recorrido para la sentencia Function
for hijo in self.hijos:
if hijo.nombreNodo == 'SENTENCIA_FUNCTION':
hijo.execute(entornoGlobal)
#endregion
#region Declaracion de las variables
listaInstrucciones = []
listaInstrucciones.append("")
listaInstrucciones.append("")
listaInstrucciones.append("def execute():")
listaInstrucciones.append("\tglobal p")
listaInstrucciones.append("\tp=p-1")
#listaInstrucciones.append("\tprint(display[p])")
listaInstrucciones.append("\tresp = run_method(display[p])")
listaInstrucciones.append("\tresp.execute(None)")
listaInstrucciones.append("\tp=p+1")
listaInstrucciones.append("")
listaInstrucciones.append("")
#endregion
#region Execute de las sentencias no PLSQL y algunas PLSQL
# (solo el exec tendrá C3D ya que se crean primero los procedures)
for hijo in self.hijos:
if hijo.nombreNodo == 'CREATE_DATABASE':
nuevaBase=Database()
message = nuevaBase.execute(hijo)
self.listaSemanticos.append(message)
listaInstrucciones += self.compile1(nuevaBase.compile(hijo)).splitlines()
elif hijo.nombreNodo == 'DECLARACION_VARIABLE':
print(hijo.compile(entornoGlobal))
elif hijo.nombreNodo == 'SENTENCIA_USE':
useDB = Use()
message = useDB.execute(hijo)
self.listaSemanticos.append(message)
listaInstrucciones += self.compile1(useDB.compile(hijo)).splitlines()
elif hijo.nombreNodo == 'CREATE_TABLE':
nuevaTabla = Table()
res = nuevaTabla.execute(hijo, enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
listaInstrucciones += self.compile1(self.getText()).splitlines()
elif hijo.nombreNodo == 'CREATE_TYPE_ENUM':
nuevoEnum = Type()
nuevoEnum.execute(hijo)
elif hijo.nombreNodo == 'SENTENCIA_SELECT' or hijo.nombreNodo == 'SENTENCIA_SELECT_DISTINCT':
hijo.execute(entornoGlobal)
respuesta = hijo.dataResult
if respuesta.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(respuesta.encabezados, respuesta.data)})
listaInstrucciones += self.compile1(self.getText()).splitlines()
elif hijo.nombreNodo == 'E':
hijo.execute(entornoGlobal)
print("Tipo Expresion: "+str(hijo.tipo.data_type))
print("Expresion valor: "+str(hijo.valorExpresion))
listaInstrucciones += self.compile1(hijo.getText()).splitlines()
elif hijo.nombreNodo == 'SENTENCIA_INSERT':
nuevoInsert = InsertTable()
res = nuevoInsert.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
listaInstrucciones += hijo.compile().splitlines()
elif hijo.nombreNodo == "SENTENCIA_SHOW":
self.listaSemanticos.append(hijo.execute(None))
listaInstrucciones += hijo.compile().splitlines()
elif hijo.nombreNodo == "SENTENCIA_ALTER_INDEX":
self.listaSemanticos.append(hijo.execute(None))
listaInstrucciones += hijo.compile().splitlines()
elif hijo.nombreNodo == "SENTENCIA_DROP":
self.listaSemanticos.append(hijo.execute(None))
listaInstrucciones += hijo.compile().splitlines()
elif hijo.nombreNodo == "SENTENCIA_DELETE":
self.listaSemanticos.append(hijo.execute(None))
listaInstrucciones += hijo.compile().splitlines()
elif hijo.nombreNodo == 'SENTENCIA_UNION_ALL':
nuevoUnionAll = UnionAll()
resp = nuevoUnionAll.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
a = nuevoUnionAll.compile(hijo).replace(";"," ",1)
b = a.replace(";"," ",1)
listaInstrucciones += b.splitlines()
elif hijo.nombreNodo == 'SENTENCIA_UNION':
nuevoUnion = Union()
resp = nuevoUnion.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
a = nuevoUnion.compile(hijo).replace(";"," ",1)
b = a.replace(";"," ",1)
listaInstrucciones += b.splitlines()
elif hijo.nombreNodo == 'SENTENCIA_INTERSECT':
nuevoIntersect = Intersect()
resp = nuevoIntersect.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
a = nuevoIntersect.compile(hijo).replace(";"," ",1)
b = a.replace(";"," ",1)
listaInstrucciones += b.splitlines()
elif hijo.nombreNodo == 'SENTENCIA_EXCEPT':
nuevoExcept = Except()
resp = nuevoExcept.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
a = nuevoExcept.compile(hijo).replace(";"," ",1)
b = a.replace(";"," ",1)
listaInstrucciones += b.splitlines()
elif hijo.nombreNodo == 'SENTENCIA_UPDATE':
nuevoUpdate = UpdateTable()
res = nuevoUpdate.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
try:
listaInstrucciones += hijo.compile().splitlines()
except:
pass
elif hijo.nombreNodo == 'SENTENCIA_ALTER_TABLE':
nuevoAlterT = AlterTable()
res = nuevoAlterT.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
elif hijo.nombreNodo == 'CREATE_INDEX' or hijo.nombreNodo == 'CREATE_UNIQUE_INDEX':
nuevoIndex = Index()
resp = nuevoIndex.execute(hijo)
try:
listaInstrucciones += self.compile1(self.getText()).splitlines()
except:
pass
elif hijo.nombreNodo == 'SENTENCIA_PROCEDURE':
print("Sentencia Procedure")
nuevoProcedure = Procedure()
nuevoProcedure.execute(hijo,entornoGlobal)
elif hijo.nombreNodo == 'EXECUTE':
nuevoExecute = Execute()
listaInstrucciones = listaInstrucciones + nuevoExecute.compile(hijo)
elif hijo.nombreNodo == 'SENTENCIA_CASE':
cod = hijo.compile(enviroment)
listaInstrucciones = listaInstrucciones + cod.splitlines()
entornoGlobal.recorrerEntorno()
#endregion
encabezados = self.crearEncabezado()
procedimientos = self.crearListaProcedimientos()
if procedimientos != None:
finalList = encabezados + procedimientos + listaInstrucciones
else:
finalList = encabezados + listaInstrucciones
self.crearArchivo(finalList)
'''def compile(self,enviroment = None):
pilaInstrucciones = []
pilaProcedimientos = []
instanceLabel.labelActual = 1
instanceTemporal.temporalActual = 1
entornoGlobal = Entorno(None)
entornoGlobal.Global = entornoGlobal
entornoGlobal.nombreEntorno = 'Global'
for hijo in self.hijos:
if hijo.nombreNodo == 'CREATE_DATABASE':
nuevaDB = Database()
texto = "listaParams.append(\""
texto = texto + nuevaDB.compile(hijo)
texto = texto + "\")"
pilaInstrucciones.append(texto)
elif hijo.nombreNodo == 'SENTENCIA_USE':
nuevoUse = Use()
texto = "listaParams.append(\""
texto = texto + nuevoUse.compile(hijo)
texto = texto + "\")"
pilaInstrucciones.append(texto)
elif hijo.nombreNodo == 'E':
cod = hijo.compile(enviroment)
print(cod)
elif hijo.nombreNodo == 'SENTENCIA_PROCEDURE':
nuevoProcedure = Procedure()
nuevoProcedure.compile(hijo, entornoGlobal)
pilaInstrucciones = nuevoProcedure.cuerpoResult
elif hijo.nombreNodo == 'SENTENCIA_SELECT' or hijo.nombreNodo == 'SENTENCIA_SELECT_DISTINCT':
respuesta = hijo.compile(enviroment)
elif hijo.nombreNodo == 'SENTENCIA_IF':
print(hijo.compile(entornoGlobal))
elif hijo.nombreNodo == 'EXECUTE':
nuevoExecute = Execute()
nuevoExecute.compile(hijo)
if nuevoExecute.procedimiento != None:
pilaProcedimientos+=nuevoExecute.codigo3Dimensiones
pilaInstrucciones.append(nuevoExecute.procedimiento)
pilaFinal = pilaEncabezados
pilaFinal += pilaProcedimientos
pilaFinal += pilaInstrucciones
archivo = open('src/C3D/CompileFile.py',"w")
for line in pilaFinal:
archivo.write(line)
archivo.write("\n")
archivo.close()
'''
def getText(self):
textoEntrada = ''
for hijo in self.hijos:
if hijo.nombreNodo == 'SENTENCIA_SELECT':
textoEntrada += traduccionSelect(hijo)
elif hijo.nombreNodo == 'CREATE_TYPE_ENUM':
pass
elif hijo.nombreNodo == 'CREATE_DATABASE':
textoEntrada += traduccionCreate_database(hijo)
elif hijo.nombreNodo == 'CREATE_TABLE':
textoEntrada += traduccion_create_table(hijo)
elif hijo.nombreNodo == 'CREATE_INDEX':
textoEntrada += traduccion_index(hijo)
elif hijo.nombreNodo == 'CREATE_UNIQUE_INDEX':
textoEntrada += traduccion_unique_index(hijo)
elif hijo.nombreNodo == 'SENTENCIA_UNION':
ne = Union()
textoEntrada += ne.getText(hijo)
elif hijo.nombreNodo == 'SENTENCIA_UNION_ALL':
ne = UnionAll()
textoEntrada += ne.getText(hijo)
elif hijo.nombreNodo == 'SENTENCIA_INTERSECT':
ne = Intersect()
textoEntrada += ne.getText(hijo)
elif hijo.nombreNodo == 'SENTENCIA_EXCEPT':
ne = Except()
textoEntrada += ne.getText(hijo)
return textoEntrada
def crearEncabezado(self):
pilaEncabezados = []
pilaEncabezados.append("# Seccion de Imports")
pilaEncabezados.append("import sys, os.path")
pilaEncabezados.append("import sys, os.path")
pilaEncabezados.append("gramaticaDir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))")
pilaEncabezados.append("sys.path.append(gramaticaDir)")
pilaEncabezados.append("from goto import with_goto")
pilaEncabezados.append("from gramatica import run_method")
pilaEncabezados.append("")
pilaEncabezados.append("")
pilaEncabezados.append("#Declaracion de variables")
pilaEncabezados.append("display = {}")
pilaEncabezados.append("p = 0")
return pilaEncabezados
def crearListaProcedimientos(self):
# Verifica si hay una base de datos activa, se utiliza para cualquier instrucción
with open('src/Config/Config.json') as file:
config = json.load(file)
dbUse = config['databaseIndex']
if dbUse == None:
#print("Se debe seleccionar una base de datos")
return None
listaProcedimientos = []
res = tc.get_all_procedure(dbUse.upper())
if res != None:
for pro in res:
listaProcedimientos = listaProcedimientos + pro['C3D']
return listaProcedimientos
def crearArchivo(self,listaTexto):
archivo = open('src/C3D/CompileFile.py',"w")
for line in listaTexto:
archivo.write(line)
archivo.write("\n")
archivo.close()
def compile1(self,texto):
tmp = instanceTemporal.getTemporal()
dir = f"{tmp} = \"{texto}\"\n"
dir += f'display[p] = {tmp}\n'
dir += 'p = p + 1\n'
print("EL TEMPORAL",dir)
return dir
| import sys, os.path
import json
nodo_dir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '\\Start\\')
sys.path.append(nodo_dir)
c3d_dir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '\\C3D\\')
sys.path.append(c3d_dir)
nodo_dir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '\\ENTORNO\\')
sys.path.append(nodo_dir)
storage = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + '\\typeChecker')
sys.path.append(storage)
variables_globales = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..\..')))
sys.path.append(variables_globales)
from VariablesGlobales import *
from prettytable import PrettyTable
from Libraries import Nodo
from Libraries import Database
from Libraries import Table
from Libraries import Use
from Libraries import Type
from Libraries import Select
from Libraries import InsertTable
from Libraries import UnionAll
from Libraries import Union
from Libraries import Intersect
from Libraries import Except
from Libraries import UpdateTable
from Libraries import AlterTable
from Libraries import Index
from Libraries import Procedure
from Libraries import Execute
from Traduccion import *
from Label import *
from Temporal import *
from Entorno import *
from typeChecker.typeChecker import TypeChecker
tc = TypeChecker()
# Importación de Clases para Execute
class Start(Nodo):
def __init__(self, nombreNodo, fila = -1, columna = -1, valor = None):
Nodo.__init__(self,nombreNodo, fila, columna, valor)
self.listaSemanticos = []
def addChild(self, node):
self.hijos.append(node)
def createChild(self, nombreNodo, fila = -1, columna =-1, valor = None):
nuevo = Start(nombreNodo,fila,columna,valor)
self.hijos.append(nuevo)
def createTerminal(self, lexToken):
nuevo = Start(lexToken.type, lexToken.lineno, lexToken.lexpos, lexToken.value)
self.hijos.append(nuevo)
def tabular_data(self, encabezados : list, data : list) -> str:
print(encabezados)
index = 0
for i in encabezados:
if i == "?column?":
encabezados[index] = "?column?"+str(index)
index += 1
x = PrettyTable()
x.field_names = encabezados
for item in data:
if len(item) == len(encabezados):
x.add_row(item)
return x.get_string()
# recursiva por la izquierda
def execute(self, enviroment):
entornoGlobal = Entorno(None)
entornoGlobal.Global = entornoGlobal
entornoGlobal.nombreEntorno = 'Global'
for hijo in self.hijos:
if hijo.nombreNodo == 'SENTENCIA_FUNCTION':
hijo.execute(entornoGlobal)
#entornoGlobal.recorrerEntorno()
for hijo in self.hijos:
if hijo.nombreNodo == 'CREATE_DATABASE':
nuevaBase=Database()
# Recibe un json
message = nuevaBase.execute(hijo)
self.listaSemanticos.append(message)
elif hijo.nombreNodo == 'SENTENCIA_USE':
useDB = Use()
message = useDB.execute(hijo)
self.listaSemanticos.append(message)
elif hijo.nombreNodo == 'CREATE_TABLE':
nuevaTabla = Table()
res = nuevaTabla.execute(hijo, enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
elif hijo.nombreNodo == 'CREATE_TYPE_ENUM':
nuevoEnum = Type()
nuevoEnum.execute(hijo)
elif hijo.nombreNodo == 'SENTENCIA_SELECT' or hijo.nombreNodo == 'SENTENCIA_SELECT_DISTINCT':
hijo.execute(entornoGlobal)
respuesta = hijo.dataResult
if respuesta.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(respuesta.encabezados, respuesta.data)})
elif hijo.nombreNodo == 'E':
hijo.execute(entornoGlobal)
print("Tipo Expresion: "+str(hijo.tipo.data_type))
print("Expresion valor: "+str(hijo.valorExpresion))
elif hijo.nombreNodo == 'SENTENCIA_INSERT':
nuevoInsert = InsertTable()
res = nuevoInsert.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
elif hijo.nombreNodo == "SENTENCIA_SHOW":
self.listaSemanticos.append(hijo.execute(None))
elif hijo.nombreNodo == "SENTENCIA_ALTER_INDEX":
self.listaSemanticos.append(hijo.execute(None))
elif hijo.nombreNodo == "SENTENCIA_DROP":
self.listaSemanticos.append(hijo.execute(None))
elif hijo.nombreNodo == "SENTENCIA_DELETE":
self.listaSemanticos.append(hijo.execute(None))
elif hijo.nombreNodo == 'SENTENCIA_UNION_ALL':
nuevoUnionAll = UnionAll()
resp = nuevoUnionAll.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
elif hijo.nombreNodo == 'SENTENCIA_UNION':
nuevoUnion = Union()
resp = nuevoUnion.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
elif hijo.nombreNodo == 'SENTENCIA_INTERSECT':
nuevoIntersect = Intersect()
resp = nuevoIntersect.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
elif hijo.nombreNodo == 'SENTENCIA_EXCEPT':
nuevoExcept = Except()
resp = nuevoExcept.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
elif hijo.nombreNodo == 'SENTENCIA_UPDATE':
nuevoUpdate = UpdateTable()
res = nuevoUpdate.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
elif hijo.nombreNodo == 'SENTENCIA_ALTER_TABLE':
nuevoAlterT = AlterTable()
res = nuevoAlterT.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
elif hijo.nombreNodo == 'CREATE_INDEX' or hijo.nombreNodo == 'CREATE_UNIQUE_INDEX':
nuevoIndex = Index()
resp = nuevoIndex.execute(hijo)
entornoGlobal.recorrerEntorno()
def compile(self, enviroment):
global variablesProcedure
#region Declaracion de Variables Execute (SQL)
entornoGlobal = Entorno(None)
entornoGlobal.Global = entornoGlobal
entornoGlobal.nombreEntorno = 'Global'
#endregion
#region Recorrido para la sentencia Function
for hijo in self.hijos:
if hijo.nombreNodo == 'SENTENCIA_FUNCTION':
hijo.execute(entornoGlobal)
#endregion
#region Declaracion de las variables
listaInstrucciones = []
listaInstrucciones.append("")
listaInstrucciones.append("")
listaInstrucciones.append("def execute():")
listaInstrucciones.append("\tglobal p")
listaInstrucciones.append("\tp=p-1")
#listaInstrucciones.append("\tprint(display[p])")
listaInstrucciones.append("\tresp = run_method(display[p])")
listaInstrucciones.append("\tresp.execute(None)")
listaInstrucciones.append("\tp=p+1")
listaInstrucciones.append("")
listaInstrucciones.append("")
#endregion
#region Execute de las sentencias no PLSQL y algunas PLSQL
# (solo el exec tendrá C3D ya que se crean primero los procedures)
for hijo in self.hijos:
if hijo.nombreNodo == 'CREATE_DATABASE':
nuevaBase=Database()
message = nuevaBase.execute(hijo)
self.listaSemanticos.append(message)
listaInstrucciones += self.compile1(nuevaBase.compile(hijo)).splitlines()
elif hijo.nombreNodo == 'DECLARACION_VARIABLE':
print(hijo.compile(entornoGlobal))
elif hijo.nombreNodo == 'SENTENCIA_USE':
useDB = Use()
message = useDB.execute(hijo)
self.listaSemanticos.append(message)
listaInstrucciones += self.compile1(useDB.compile(hijo)).splitlines()
elif hijo.nombreNodo == 'CREATE_TABLE':
nuevaTabla = Table()
res = nuevaTabla.execute(hijo, enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
listaInstrucciones += self.compile1(self.getText()).splitlines()
elif hijo.nombreNodo == 'CREATE_TYPE_ENUM':
nuevoEnum = Type()
nuevoEnum.execute(hijo)
elif hijo.nombreNodo == 'SENTENCIA_SELECT' or hijo.nombreNodo == 'SENTENCIA_SELECT_DISTINCT':
hijo.execute(entornoGlobal)
respuesta = hijo.dataResult
if respuesta.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(respuesta.encabezados, respuesta.data)})
listaInstrucciones += self.compile1(self.getText()).splitlines()
elif hijo.nombreNodo == 'E':
hijo.execute(entornoGlobal)
print("Tipo Expresion: "+str(hijo.tipo.data_type))
print("Expresion valor: "+str(hijo.valorExpresion))
listaInstrucciones += self.compile1(hijo.getText()).splitlines()
elif hijo.nombreNodo == 'SENTENCIA_INSERT':
nuevoInsert = InsertTable()
res = nuevoInsert.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
listaInstrucciones += hijo.compile().splitlines()
elif hijo.nombreNodo == "SENTENCIA_SHOW":
self.listaSemanticos.append(hijo.execute(None))
listaInstrucciones += hijo.compile().splitlines()
elif hijo.nombreNodo == "SENTENCIA_ALTER_INDEX":
self.listaSemanticos.append(hijo.execute(None))
listaInstrucciones += hijo.compile().splitlines()
elif hijo.nombreNodo == "SENTENCIA_DROP":
self.listaSemanticos.append(hijo.execute(None))
listaInstrucciones += hijo.compile().splitlines()
elif hijo.nombreNodo == "SENTENCIA_DELETE":
self.listaSemanticos.append(hijo.execute(None))
listaInstrucciones += hijo.compile().splitlines()
elif hijo.nombreNodo == 'SENTENCIA_UNION_ALL':
nuevoUnionAll = UnionAll()
resp = nuevoUnionAll.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
a = nuevoUnionAll.compile(hijo).replace(";"," ",1)
b = a.replace(";"," ",1)
listaInstrucciones += b.splitlines()
elif hijo.nombreNodo == 'SENTENCIA_UNION':
nuevoUnion = Union()
resp = nuevoUnion.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
a = nuevoUnion.compile(hijo).replace(";"," ",1)
b = a.replace(";"," ",1)
listaInstrucciones += b.splitlines()
elif hijo.nombreNodo == 'SENTENCIA_INTERSECT':
nuevoIntersect = Intersect()
resp = nuevoIntersect.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
a = nuevoIntersect.compile(hijo).replace(";"," ",1)
b = a.replace(";"," ",1)
listaInstrucciones += b.splitlines()
elif hijo.nombreNodo == 'SENTENCIA_EXCEPT':
nuevoExcept = Except()
resp = nuevoExcept.execute(hijo)
if resp.data != None:
self.listaSemanticos.append({"Code":"0000","Message": " rows returned", "Data" : self.tabular_data(resp.encabezados, resp.data)})
a = nuevoExcept.compile(hijo).replace(";"," ",1)
b = a.replace(";"," ",1)
listaInstrucciones += b.splitlines()
elif hijo.nombreNodo == 'SENTENCIA_UPDATE':
nuevoUpdate = UpdateTable()
res = nuevoUpdate.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
try:
listaInstrucciones += hijo.compile().splitlines()
except:
pass
elif hijo.nombreNodo == 'SENTENCIA_ALTER_TABLE':
nuevoAlterT = AlterTable()
res = nuevoAlterT.execute(hijo,enviroment)
if res.code != "00000":
self.listaSemanticos.append({"Code":res.code,"Message": res.responseObj.descripcion, "Data" : ""})
else:
self.listaSemanticos.append({"Code":"0000","Message": res.responseObj, "Data" : ""})
elif hijo.nombreNodo == 'CREATE_INDEX' or hijo.nombreNodo == 'CREATE_UNIQUE_INDEX':
nuevoIndex = Index()
resp = nuevoIndex.execute(hijo)
try:
listaInstrucciones += self.compile1(self.getText()).splitlines()
except:
pass
elif hijo.nombreNodo == 'SENTENCIA_PROCEDURE':
print("Sentencia Procedure")
nuevoProcedure = Procedure()
nuevoProcedure.execute(hijo,entornoGlobal)
elif hijo.nombreNodo == 'EXECUTE':
nuevoExecute = Execute()
listaInstrucciones = listaInstrucciones + nuevoExecute.compile(hijo)
elif hijo.nombreNodo == 'SENTENCIA_CASE':
cod = hijo.compile(enviroment)
listaInstrucciones = listaInstrucciones + cod.splitlines()
entornoGlobal.recorrerEntorno()
#endregion
encabezados = self.crearEncabezado()
procedimientos = self.crearListaProcedimientos()
if procedimientos != None:
finalList = encabezados + procedimientos + listaInstrucciones
else:
finalList = encabezados + listaInstrucciones
self.crearArchivo(finalList)
'''def compile(self,enviroment = None):
pilaInstrucciones = []
pilaProcedimientos = []
instanceLabel.labelActual = 1
instanceTemporal.temporalActual = 1
entornoGlobal = Entorno(None)
entornoGlobal.Global = entornoGlobal
entornoGlobal.nombreEntorno = 'Global'
for hijo in self.hijos:
if hijo.nombreNodo == 'CREATE_DATABASE':
nuevaDB = Database()
texto = "listaParams.append(\""
texto = texto + nuevaDB.compile(hijo)
texto = texto + "\")"
pilaInstrucciones.append(texto)
elif hijo.nombreNodo == 'SENTENCIA_USE':
nuevoUse = Use()
texto = "listaParams.append(\""
texto = texto + nuevoUse.compile(hijo)
texto = texto + "\")"
pilaInstrucciones.append(texto)
elif hijo.nombreNodo == 'E':
cod = hijo.compile(enviroment)
print(cod)
elif hijo.nombreNodo == 'SENTENCIA_PROCEDURE':
nuevoProcedure = Procedure()
nuevoProcedure.compile(hijo, entornoGlobal)
pilaInstrucciones = nuevoProcedure.cuerpoResult
elif hijo.nombreNodo == 'SENTENCIA_SELECT' or hijo.nombreNodo == 'SENTENCIA_SELECT_DISTINCT':
respuesta = hijo.compile(enviroment)
elif hijo.nombreNodo == 'SENTENCIA_IF':
print(hijo.compile(entornoGlobal))
elif hijo.nombreNodo == 'EXECUTE':
nuevoExecute = Execute()
nuevoExecute.compile(hijo)
if nuevoExecute.procedimiento != None:
pilaProcedimientos+=nuevoExecute.codigo3Dimensiones
pilaInstrucciones.append(nuevoExecute.procedimiento)
pilaFinal = pilaEncabezados
pilaFinal += pilaProcedimientos
pilaFinal += pilaInstrucciones
archivo = open('src/C3D/CompileFile.py',"w")
for line in pilaFinal:
archivo.write(line)
archivo.write("\n")
archivo.close()
'''
def getText(self):
textoEntrada = ''
for hijo in self.hijos:
if hijo.nombreNodo == 'SENTENCIA_SELECT':
textoEntrada += traduccionSelect(hijo)
elif hijo.nombreNodo == 'CREATE_TYPE_ENUM':
pass
elif hijo.nombreNodo == 'CREATE_DATABASE':
textoEntrada += traduccionCreate_database(hijo)
elif hijo.nombreNodo == 'CREATE_TABLE':
textoEntrada += traduccion_create_table(hijo)
elif hijo.nombreNodo == 'CREATE_INDEX':
textoEntrada += traduccion_index(hijo)
elif hijo.nombreNodo == 'CREATE_UNIQUE_INDEX':
textoEntrada += traduccion_unique_index(hijo)
elif hijo.nombreNodo == 'SENTENCIA_UNION':
ne = Union()
textoEntrada += ne.getText(hijo)
elif hijo.nombreNodo == 'SENTENCIA_UNION_ALL':
ne = UnionAll()
textoEntrada += ne.getText(hijo)
elif hijo.nombreNodo == 'SENTENCIA_INTERSECT':
ne = Intersect()
textoEntrada += ne.getText(hijo)
elif hijo.nombreNodo == 'SENTENCIA_EXCEPT':
ne = Except()
textoEntrada += ne.getText(hijo)
return textoEntrada
def crearEncabezado(self):
pilaEncabezados = []
pilaEncabezados.append("# Seccion de Imports")
pilaEncabezados.append("import sys, os.path")
pilaEncabezados.append("import sys, os.path")
pilaEncabezados.append("gramaticaDir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))")
pilaEncabezados.append("sys.path.append(gramaticaDir)")
pilaEncabezados.append("from goto import with_goto")
pilaEncabezados.append("from gramatica import run_method")
pilaEncabezados.append("")
pilaEncabezados.append("")
pilaEncabezados.append("#Declaracion de variables")
pilaEncabezados.append("display = {}")
pilaEncabezados.append("p = 0")
return pilaEncabezados
def crearListaProcedimientos(self):
# Verifica si hay una base de datos activa, se utiliza para cualquier instrucción
with open('src/Config/Config.json') as file:
config = json.load(file)
dbUse = config['databaseIndex']
if dbUse == None:
#print("Se debe seleccionar una base de datos")
return None
listaProcedimientos = []
res = tc.get_all_procedure(dbUse.upper())
if res != None:
for pro in res:
listaProcedimientos = listaProcedimientos + pro['C3D']
return listaProcedimientos
def crearArchivo(self,listaTexto):
archivo = open('src/C3D/CompileFile.py',"w")
for line in listaTexto:
archivo.write(line)
archivo.write("\n")
archivo.close()
def compile1(self,texto):
tmp = instanceTemporal.getTemporal()
dir = f"{tmp} = \"{texto}\"\n"
dir += f'display[p] = {tmp}\n'
dir += 'p = p + 1\n'
print("EL TEMPORAL",dir)
return dir
| es | 0.834053 | # Importación de Clases para Execute # recursiva por la izquierda #entornoGlobal.recorrerEntorno() # Recibe un json #region Declaracion de Variables Execute (SQL) #endregion #region Recorrido para la sentencia Function #endregion #region Declaracion de las variables #listaInstrucciones.append("\tprint(display[p])") #endregion #region Execute de las sentencias no PLSQL y algunas PLSQL # (solo el exec tendrá C3D ya que se crean primero los procedures) #endregion def compile(self,enviroment = None): pilaInstrucciones = [] pilaProcedimientos = [] instanceLabel.labelActual = 1 instanceTemporal.temporalActual = 1 entornoGlobal = Entorno(None) entornoGlobal.Global = entornoGlobal entornoGlobal.nombreEntorno = 'Global' for hijo in self.hijos: if hijo.nombreNodo == 'CREATE_DATABASE': nuevaDB = Database() texto = "listaParams.append(\"" texto = texto + nuevaDB.compile(hijo) texto = texto + "\")" pilaInstrucciones.append(texto) elif hijo.nombreNodo == 'SENTENCIA_USE': nuevoUse = Use() texto = "listaParams.append(\"" texto = texto + nuevoUse.compile(hijo) texto = texto + "\")" pilaInstrucciones.append(texto) elif hijo.nombreNodo == 'E': cod = hijo.compile(enviroment) print(cod) elif hijo.nombreNodo == 'SENTENCIA_PROCEDURE': nuevoProcedure = Procedure() nuevoProcedure.compile(hijo, entornoGlobal) pilaInstrucciones = nuevoProcedure.cuerpoResult elif hijo.nombreNodo == 'SENTENCIA_SELECT' or hijo.nombreNodo == 'SENTENCIA_SELECT_DISTINCT': respuesta = hijo.compile(enviroment) elif hijo.nombreNodo == 'SENTENCIA_IF': print(hijo.compile(entornoGlobal)) elif hijo.nombreNodo == 'EXECUTE': nuevoExecute = Execute() nuevoExecute.compile(hijo) if nuevoExecute.procedimiento != None: pilaProcedimientos+=nuevoExecute.codigo3Dimensiones pilaInstrucciones.append(nuevoExecute.procedimiento) pilaFinal = pilaEncabezados pilaFinal += pilaProcedimientos pilaFinal += pilaInstrucciones archivo = open('src/C3D/CompileFile.py',"w") for line in pilaFinal: archivo.write(line) archivo.write("\n") archivo.close() # Verifica si hay una base de datos activa, se utiliza para cualquier instrucción #print("Se debe seleccionar una base de datos") | 2.127127 | 2 |
electrumfairchains/network.py | TonyFord/electrumfairchains | 1 | 6624512 | <gh_stars>1-10
# Electrum - Lightweight Bitcoin Client
# Copyright (c) 2011-2016 <NAME>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import time
import queue
import os
import random
import re
from collections import defaultdict
import threading
import socket
import json
import sys
import ipaddress
import asyncio
from typing import NamedTuple, Optional, Sequence, List, Dict, Tuple
import traceback
import dns
import dns.resolver
import aiorpcx
from aiorpcx import TaskGroup
from aiohttp import ClientResponse
from . import util
from .util import (PrintError, print_error, log_exceptions, ignore_exceptions,
bfh, SilentTaskGroup, make_aiohttp_session, send_exception_to_crash_reporter,
is_hash256_str, is_non_negative_integer)
from .bitcoin import COIN
# from . import constants
from . import blockchain
from . import bitcoin
from .blockchain import Blockchain, HEADER_SIZE
from .interface import (Interface, serialize_server, deserialize_server,
RequestTimedOut, NetworkTimeout)
from .version import PROTOCOL_VERSION
from .simple_config import SimpleConfig, FairChains
from .i18n import _
NODES_RETRY_INTERVAL = 60
SERVER_RETRY_INTERVAL = 10
def parse_servers(result: Sequence[Tuple[str, str, List[str]]]) -> Dict[str, dict]:
""" parse servers list into dict format"""
servers = {}
for item in result:
host = item[1]
out = {}
version = None
pruning_level = '-'
if len(item) > 2:
for v in item[2]:
if re.match(r"[st]\d*", v):
protocol, port = v[0], v[1:]
if port == '': port = FairChains.DEFAULT_PORTS[protocol]
out[protocol] = port
elif re.match("v(.?)+", v):
version = v[1:]
elif re.match(r"p\d*", v):
pruning_level = v[1:]
if pruning_level == '': pruning_level = '0'
if out:
out['pruning'] = pruning_level
out['version'] = version
servers[host] = out
return servers
def filter_version(servers):
def is_recent(version):
try:
return util.versiontuple(version) >= util.versiontuple(PROTOCOL_VERSION)
except Exception as e:
return False
return {k: v for k, v in servers.items() if is_recent(v.get('version'))}
def filter_noonion(servers):
return {k: v for k, v in servers.items() if not k.endswith('.onion')}
def filter_protocol(hostmap, protocol='s'):
'''Filters the hostmap for those implementing protocol.
The result is a list in serialized form.'''
eligible = []
for host, portmap in hostmap.items():
port = portmap.get(protocol)
if port:
eligible.append(serialize_server(host, port, protocol))
return eligible
def pick_random_server(hostmap = None, protocol = 's', exclude_set = set()):
if hostmap is None:
hostmap = FairChains.DEFAULT_SERVERS
eligible = list(set(filter_protocol(hostmap, protocol)) - exclude_set)
return random.choice(eligible) if eligible else None
class NetworkParameters(NamedTuple):
host: str
port: str
protocol: str
proxy: Optional[dict]
auto_connect: bool
oneserver: bool = False
proxy_modes = ['socks4', 'socks5']
def serialize_proxy(p):
if not isinstance(p, dict):
return None
return ':'.join([p.get('mode'), p.get('host'), p.get('port'),
p.get('user', ''), p.get('password', '')])
def deserialize_proxy(s: str) -> Optional[dict]:
if not isinstance(s, str):
return None
if s.lower() == 'none':
return None
proxy = { "mode":"socks5", "host":"localhost" }
# FIXME raw IPv6 address fails here
args = s.split(':')
n = 0
if proxy_modes.count(args[n]) == 1:
proxy["mode"] = args[n]
n += 1
if len(args) > n:
proxy["host"] = args[n]
n += 1
if len(args) > n:
proxy["port"] = args[n]
n += 1
else:
proxy["port"] = "8080" if proxy["mode"] == "http" else "1080"
if len(args) > n:
proxy["user"] = args[n]
n += 1
if len(args) > n:
proxy["password"] = args[n]
return proxy
class BestEffortRequestFailed(Exception): pass
class TxBroadcastError(Exception):
def get_message_for_gui(self):
raise NotImplementedError()
class TxBroadcastHashMismatch(TxBroadcastError):
def get_message_for_gui(self):
return "{}\n{}\n\n{}" \
.format(_("The server returned an unexpected transaction ID when broadcasting the transaction."),
_("Consider trying to connect to a different server, or updating ElectrumFairChains."),
str(self))
class TxBroadcastServerReturnedError(TxBroadcastError):
def get_message_for_gui(self):
return "{}\n{}\n\n{}" \
.format(_("The server returned an error when broadcasting the transaction."),
_("Consider trying to connect to a different server, or updating ElectrumFairChains."),
str(self))
class TxBroadcastUnknownError(TxBroadcastError):
def get_message_for_gui(self):
return "{}\n{}" \
.format(_("Unknown error when broadcasting the transaction."),
_("Consider trying to connect to a different server, or updating ElectrumFairChains."))
class UntrustedServerReturnedError(Exception):
def __init__(self, *, original_exception):
self.original_exception = original_exception
def __str__(self):
return _("The server returned an error.")
def __repr__(self):
return f"<UntrustedServerReturnedError original_exception: {repr(self.original_exception)}>"
INSTANCE = None
class Network(PrintError):
"""The Network class manages a set of connections to remote electrumfairchains
servers, each connected socket is handled by an Interface() object.
"""
verbosity_filter = 'n'
def __init__(self, config: SimpleConfig=None):
global INSTANCE
INSTANCE = self
self.asyncio_loop = asyncio.get_event_loop()
assert self.asyncio_loop.is_running(), "event loop not running"
self._loop_thread = None # type: threading.Thread # set by caller; only used for sanity checks
if config is None:
config = {} # Do not use mutables as default values!
self.config = SimpleConfig(config) if isinstance(config, dict) else config # type: SimpleConfig
blockchain.read_blockchains(self.config)
self.print_error("blockchains", list(map(lambda b: b.forkpoint, blockchain.blockchains.values())))
self._blockchain_preferred_block = self.config.get('blockchain_preferred_block', None) # type: Optional[Dict]
self._blockchain = blockchain.get_best_chain()
# Server for addresses and transactions
self.default_server = self.config.get('server', None)
# Sanitize default server
if self.default_server:
try:
deserialize_server(self.default_server)
except:
self.print_error('Warning: failed to parse server-string; falling back to random.')
self.default_server = None
if not self.default_server:
self.default_server = pick_random_server()
self.main_taskgroup = None # type: TaskGroup
# locks
self.restart_lock = asyncio.Lock()
self.bhi_lock = asyncio.Lock()
self.callback_lock = threading.Lock()
self.recent_servers_lock = threading.RLock() # <- re-entrant
self.interfaces_lock = threading.Lock() # for mutating/iterating self.interfaces
self.server_peers = {} # returned by interface (servers that the main interface knows about)
self.recent_servers = self._read_recent_servers() # note: needs self.recent_servers_lock
self.banner = ''
self.donation_address = ''
self.relay_fee = None # type: Optional[int]
# callbacks set by the GUI
self.callbacks = defaultdict(list) # note: needs self.callback_lock
dir_path = os.path.join(self.config.path, 'certs')
util.make_dir(dir_path)
# retry times
self.server_retry_time = time.time()
self.nodes_retry_time = time.time()
# the main server we are currently communicating with
self.interface = None # type: Interface
# set of servers we have an ongoing connection with
self.interfaces = {} # type: Dict[str, Interface]
self.auto_connect = self.config.get('auto_connect', True)
self.connecting = set()
self.server_queue = None
self.proxy = None
# Dump network messages (all interfaces). Set at runtime from the console.
self.debug = False
self._set_status('disconnected')
def run_from_another_thread(self, coro):
assert self._loop_thread != threading.current_thread(), 'must not be called from network thread'
fut = asyncio.run_coroutine_threadsafe(coro, self.asyncio_loop)
return fut.result()
@staticmethod
def get_instance() -> Optional["Network"]:
return INSTANCE
def with_recent_servers_lock(func):
def func_wrapper(self, *args, **kwargs):
with self.recent_servers_lock:
return func(self, *args, **kwargs)
return func_wrapper
def register_callback(self, callback, events):
with self.callback_lock:
for event in events:
self.callbacks[event].append(callback)
def unregister_callback(self, callback):
with self.callback_lock:
for callbacks in self.callbacks.values():
if callback in callbacks:
callbacks.remove(callback)
def trigger_callback(self, event, *args):
with self.callback_lock:
callbacks = self.callbacks[event][:]
for callback in callbacks:
# FIXME: if callback throws, we will lose the traceback
if asyncio.iscoroutinefunction(callback):
asyncio.run_coroutine_threadsafe(callback(event, *args), self.asyncio_loop)
else:
self.asyncio_loop.call_soon_threadsafe(callback, event, *args)
def _read_recent_servers(self):
if not self.config.path:
return []
path = os.path.join(self.config.path, "recent_servers")
try:
with open(path, "r", encoding='utf-8') as f:
data = f.read()
return json.loads(data)
except:
return []
@with_recent_servers_lock
def _save_recent_servers(self):
if not self.config.path:
return
path = os.path.join(self.config.path, "recent_servers")
s = json.dumps(self.recent_servers, indent=4, sort_keys=True)
try:
with open(path, "w", encoding='utf-8') as f:
f.write(s)
except:
pass
def get_server_height(self):
interface = self.interface
return interface.tip if interface else 0
async def _server_is_lagging(self):
sh = self.get_server_height()
if not sh:
self.print_error('no height for main interface')
return True
lh = self.get_local_height()
result = (lh - sh) > 1
if result:
self.print_error(f'{self.default_server} is lagging ({sh} vs {lh})')
return result
def _set_status(self, status):
self.connection_status = status
self.notify('status')
def is_connected(self):
interface = self.interface
return interface is not None and interface.ready.done()
def is_connecting(self):
return self.connection_status == 'connecting'
async def _request_server_info(self, interface):
await interface.ready
session = interface.session
async def get_banner():
self.banner = await session.send_request('server.banner')
self.notify('banner')
async def get_donation_address():
addr = await session.send_request('server.donation_address')
if not bitcoin.is_address(addr):
if addr: # ignore empty string
self.print_error(f"invalid donation address from server: {repr(addr)}")
addr = ''
self.donation_address = addr
async def get_server_peers():
self.server_peers = parse_servers(await session.send_request('server.peers.subscribe'))
self.notify('servers')
async def get_relay_fee():
relayfee = await session.send_request('blockchain.relayfee')
if relayfee is None:
self.relay_fee = None
else:
relayfee = int(relayfee * COIN)
self.relay_fee = max(0, relayfee)
async with TaskGroup() as group:
await group.spawn(get_banner)
await group.spawn(get_donation_address)
await group.spawn(get_server_peers)
await group.spawn(get_relay_fee)
await group.spawn(self._request_fee_estimates(interface))
async def _request_fee_estimates(self, interface):
session = interface.session
from .simple_config import FEE_ETA_TARGETS
self.config.requested_fee_estimates()
async with TaskGroup() as group:
histogram_task = await group.spawn(session.send_request('mempool.get_fee_histogram'))
fee_tasks = []
for i in FEE_ETA_TARGETS:
fee_tasks.append((i, await group.spawn(session.send_request('blockchain.estimatefee', [i]))))
self.config.mempool_fees = histogram = histogram_task.result()
self.print_error(f'fee_histogram {histogram}')
self.notify('fee_histogram')
fee_estimates_eta = {}
for nblock_target, task in fee_tasks:
fee = int(task.result() * COIN)
fee_estimates_eta[nblock_target] = fee
if fee < 0: continue
self.config.update_fee_estimates(nblock_target, fee)
self.print_error(f'fee_estimates {fee_estimates_eta}')
self.notify('fee')
def get_status_value(self, key):
if key == 'status':
value = self.connection_status
elif key == 'banner':
value = self.banner
elif key == 'fee':
value = self.config.fee_estimates
elif key == 'fee_histogram':
value = self.config.mempool_fees
elif key == 'servers':
value = self.get_servers()
else:
raise Exception('unexpected trigger key {}'.format(key))
return value
def notify(self, key):
if key in ['status', 'updated']:
self.trigger_callback(key)
else:
self.trigger_callback(key, self.get_status_value(key))
def get_parameters(self) -> NetworkParameters:
host, port, protocol = deserialize_server(self.default_server)
return NetworkParameters(host=host,
port=port,
protocol=protocol,
proxy=self.proxy,
auto_connect=self.auto_connect,
oneserver=self.oneserver)
def get_donation_address(self):
if self.is_connected():
return self.donation_address
def get_interfaces(self) -> List[str]:
"""The list of servers for the connected interfaces."""
with self.interfaces_lock:
return list(self.interfaces)
@with_recent_servers_lock
def get_servers(self):
# start with hardcoded servers
out = dict(FairChains.DEFAULT_SERVERS) # copy
# add recent servers
for s in self.recent_servers:
try:
host, port, protocol = deserialize_server(s)
except:
continue
if host not in out:
out[host] = {protocol: port}
# add servers received from main interface
server_peers = self.server_peers
if server_peers:
out.update(filter_version(server_peers.copy()))
# potentially filter out some
if self.config.get('noonion'):
out = filter_noonion(out)
return out
def _start_interface(self, server: str):
if server not in self.interfaces and server not in self.connecting:
if server == self.default_server:
self.print_error(f"connecting to {server} as new interface")
self._set_status('connecting')
self.connecting.add(server)
self.server_queue.put(server)
def _start_random_interface(self):
with self.interfaces_lock:
exclude_set = self.disconnected_servers | set(self.interfaces) | self.connecting
server = pick_random_server(self.get_servers(), self.protocol, exclude_set)
if server:
self._start_interface(server)
return server
def _set_proxy(self, proxy: Optional[dict]):
self.proxy = proxy
# Store these somewhere so we can un-monkey-patch
if not hasattr(socket, "_getaddrinfo"):
socket._getaddrinfo = socket.getaddrinfo
if proxy:
self.print_error('setting proxy', proxy)
# prevent dns leaks, see http://stackoverflow.com/questions/13184205/dns-over-proxy
socket.getaddrinfo = lambda *args: [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
else:
if sys.platform == 'win32':
# On Windows, socket.getaddrinfo takes a mutex, and might hold it for up to 10 seconds
# when dns-resolving. To speed it up drastically, we resolve dns ourselves, outside that lock.
# see #4421
socket.getaddrinfo = self._fast_getaddrinfo
else:
socket.getaddrinfo = socket._getaddrinfo
self.trigger_callback('proxy_set', self.proxy)
@staticmethod
def _fast_getaddrinfo(host, *args, **kwargs):
def needs_dns_resolving(host):
try:
ipaddress.ip_address(host)
return False # already valid IP
except ValueError:
pass # not an IP
if str(host) in ('localhost', 'localhost.',):
return False
return True
def resolve_with_dnspython(host):
addrs = []
# try IPv6
try:
answers = dns.resolver.query(host, dns.rdatatype.AAAA)
addrs += [str(answer) for answer in answers]
except dns.exception.DNSException as e:
pass
except BaseException as e:
print_error(f'dnspython failed to resolve dns (AAAA) with error: {e}')
# try IPv4
try:
answers = dns.resolver.query(host, dns.rdatatype.A)
addrs += [str(answer) for answer in answers]
except dns.exception.DNSException as e:
# dns failed for some reason, e.g. dns.resolver.NXDOMAIN this is normal.
# Simply report back failure; except if we already have some results.
if not addrs:
raise socket.gaierror(11001, 'getaddrinfo failed') from e
except BaseException as e:
# Possibly internal error in dnspython :( see #4483
print_error(f'dnspython failed to resolve dns (A) with error: {e}')
if addrs:
return addrs
# Fall back to original socket.getaddrinfo to resolve dns.
return [host]
addrs = [host]
if needs_dns_resolving(host):
addrs = resolve_with_dnspython(host)
list_of_list_of_socketinfos = [socket._getaddrinfo(addr, *args, **kwargs) for addr in addrs]
list_of_socketinfos = [item for lst in list_of_list_of_socketinfos for item in lst]
return list_of_socketinfos
@log_exceptions
async def set_parameters(self, net_params: NetworkParameters):
proxy = net_params.proxy
proxy_str = serialize_proxy(proxy)
host, port, protocol = net_params.host, net_params.port, net_params.protocol
server_str = serialize_server(host, port, protocol)
# sanitize parameters
try:
deserialize_server(serialize_server(host, port, protocol))
if proxy:
proxy_modes.index(proxy['mode']) + 1
int(proxy['port'])
except:
return
self.config.set_key('auto_connect', net_params.auto_connect, False)
self.config.set_key('oneserver', net_params.oneserver, False)
self.config.set_key('proxy', proxy_str, False)
self.config.set_key('server', server_str, True)
# abort if changes were not allowed by config
if self.config.get('server') != server_str \
or self.config.get('proxy') != proxy_str \
or self.config.get('oneserver') != net_params.oneserver:
return
async with self.restart_lock:
self.auto_connect = net_params.auto_connect
if self.proxy != proxy or self.protocol != protocol or self.oneserver != net_params.oneserver:
# Restart the network defaulting to the given server
await self._stop()
self.default_server = server_str
await self._start()
elif self.default_server != server_str:
await self.switch_to_interface(server_str)
else:
await self.switch_lagging_interface()
def _set_oneserver(self, oneserver: bool):
self.num_server = 10 if not oneserver else 0
self.oneserver = bool(oneserver)
async def _switch_to_random_interface(self):
'''Switch to a random connected server other than the current one'''
servers = self.get_interfaces() # Those in connected state
if self.default_server in servers:
servers.remove(self.default_server)
if servers:
await self.switch_to_interface(random.choice(servers))
async def switch_lagging_interface(self):
'''If auto_connect and lagging, switch interface'''
if self.auto_connect and await self._server_is_lagging():
# switch to one that has the correct header (not height)
best_header = self.blockchain().read_header(self.get_local_height())
with self.interfaces_lock: interfaces = list(self.interfaces.values())
filtered = list(filter(lambda iface: iface.tip_header == best_header, interfaces))
if filtered:
chosen_iface = random.choice(filtered)
await self.switch_to_interface(chosen_iface.server)
async def switch_unwanted_fork_interface(self):
"""If auto_connect and main interface is not on preferred fork,
try to switch to preferred fork.
"""
if not self.auto_connect or not self.interface:
return
with self.interfaces_lock: interfaces = list(self.interfaces.values())
# try to switch to preferred fork
if self._blockchain_preferred_block:
pref_height = self._blockchain_preferred_block['height']
pref_hash = self._blockchain_preferred_block['hash']
if self.interface.blockchain.check_hash(pref_height, pref_hash):
return # already on preferred fork
filtered = list(filter(lambda iface: iface.blockchain.check_hash(pref_height, pref_hash),
interfaces))
if filtered:
self.print_error("switching to preferred fork")
chosen_iface = random.choice(filtered)
await self.switch_to_interface(chosen_iface.server)
return
else:
self.print_error("tried to switch to preferred fork but no interfaces are on it")
# try to switch to best chain
if self.blockchain().parent is None:
return # already on best chain
filtered = list(filter(lambda iface: iface.blockchain.parent is None,
interfaces))
if filtered:
self.print_error("switching to best chain")
chosen_iface = random.choice(filtered)
await self.switch_to_interface(chosen_iface.server)
else:
# FIXME switch to best available?
self.print_error("tried to switch to best chain but no interfaces are on it")
async def switch_to_interface(self, server: str):
"""Switch to server as our main interface. If no connection exists,
queue interface to be started. The actual switch will
happen when the interface becomes ready.
"""
self.default_server = server
old_interface = self.interface
old_server = old_interface.server if old_interface else None
# Stop any current interface in order to terminate subscriptions,
# and to cancel tasks in interface.group.
# However, for headers sub, give preference to this interface
# over unknown ones, i.e. start it again right away.
if old_server and old_server != server:
await self._close_interface(old_interface)
if len(self.interfaces) <= self.num_server:
self._start_interface(old_server)
if server not in self.interfaces:
self.interface = None
self._start_interface(server)
return
i = self.interfaces[server]
if old_interface != i:
self.print_error("switching to", server)
blockchain_updated = i.blockchain != self.blockchain()
self.interface = i
await i.group.spawn(self._request_server_info(i))
self.trigger_callback('default_server_changed')
self._set_status('connected')
self.trigger_callback('network_updated')
if blockchain_updated: self.trigger_callback('blockchain_updated')
async def _close_interface(self, interface):
if interface:
with self.interfaces_lock:
if self.interfaces.get(interface.server) == interface:
self.interfaces.pop(interface.server)
if interface.server == self.default_server:
self.interface = None
await interface.close()
@with_recent_servers_lock
def _add_recent_server(self, server):
# list is ordered
if server in self.recent_servers:
self.recent_servers.remove(server)
self.recent_servers.insert(0, server)
self.recent_servers = self.recent_servers[0:20]
self._save_recent_servers()
async def connection_down(self, interface: Interface):
'''A connection to server either went down, or was never made.
We distinguish by whether it is in self.interfaces.'''
if not interface: return
server = interface.server
self.disconnected_servers.add(server)
if server == self.default_server:
self._set_status('disconnected')
await self._close_interface(interface)
self.trigger_callback('network_updated')
def get_network_timeout_seconds(self, request_type=NetworkTimeout.Generic) -> int:
if self.oneserver and not self.auto_connect:
return request_type.MOST_RELAXED
if self.proxy:
return request_type.RELAXED
return request_type.NORMAL
@ignore_exceptions # do not kill main_taskgroup
@log_exceptions
async def _run_new_interface(self, server):
interface = Interface(self, server, self.proxy)
timeout = self.get_network_timeout_seconds(NetworkTimeout.Urgent)
try:
await asyncio.wait_for(interface.ready, timeout)
except BaseException as e:
#traceback.print_exc()
self.print_error(f"couldn't launch iface {server} -- {repr(e)}")
await interface.close()
return
else:
with self.interfaces_lock:
assert server not in self.interfaces
self.interfaces[server] = interface
finally:
try: self.connecting.remove(server)
except KeyError: pass
if server == self.default_server:
await self.switch_to_interface(server)
self._add_recent_server(server)
self.trigger_callback('network_updated')
async def _init_headers_file(self):
b = blockchain.get_best_chain()
filename = b.path()
length = HEADER_SIZE * len(FairChains.CHECKPOINTS) * 2016
if not os.path.exists(filename) or os.path.getsize(filename) < length:
with open(filename, 'wb') as f:
if length > 0:
f.seek(length-1)
f.write(b'\x00')
util.ensure_sparse_file(filename)
with b.lock:
b.update_size()
def best_effort_reliable(func):
async def make_reliable_wrapper(self, *args, **kwargs):
for i in range(10):
iface = self.interface
# retry until there is a main interface
if not iface:
await asyncio.sleep(0.1)
continue # try again
# wait for it to be usable
iface_ready = iface.ready
iface_disconnected = iface.got_disconnected
await asyncio.wait([iface_ready, iface_disconnected], return_when=asyncio.FIRST_COMPLETED)
if not iface_ready.done() or iface_ready.cancelled():
await asyncio.sleep(0.1)
continue # try again
# try actual request
success_fut = asyncio.ensure_future(func(self, *args, **kwargs))
await asyncio.wait([success_fut, iface_disconnected], return_when=asyncio.FIRST_COMPLETED)
if success_fut.done() and not success_fut.cancelled():
if success_fut.exception():
try:
raise success_fut.exception()
except RequestTimedOut:
await iface.close()
await iface_disconnected
continue # try again
return success_fut.result()
# otherwise; try again
raise BestEffortRequestFailed('no interface to do request on... gave up.')
return make_reliable_wrapper
def catch_server_exceptions(func):
async def wrapper(self, *args, **kwargs):
try:
return await func(self, *args, **kwargs)
except aiorpcx.jsonrpc.CodeMessageError as e:
raise UntrustedServerReturnedError(original_exception=e) from e
return wrapper
@best_effort_reliable
@catch_server_exceptions
async def get_merkle_for_transaction(self, tx_hash: str, tx_height: int) -> dict:
if not is_hash256_str(tx_hash):
raise Exception(f"{repr(tx_hash)} is not a txid")
if not is_non_negative_integer(tx_height):
raise Exception(f"{repr(tx_height)} is not a block height")
return await self.interface.session.send_request('blockchain.transaction.get_merkle', [tx_hash, tx_height])
@best_effort_reliable
async def broadcast_transaction(self, tx, *, timeout=None) -> None:
if timeout is None:
timeout = self.get_network_timeout_seconds(NetworkTimeout.Urgent)
try:
out = await self.interface.session.send_request('blockchain.transaction.broadcast', [str(tx)], timeout=timeout)
# note: both 'out' and exception messages are untrusted input from the server
except (RequestTimedOut, asyncio.CancelledError, asyncio.TimeoutError):
raise # pass-through
except aiorpcx.jsonrpc.CodeMessageError as e:
self.print_error(f"broadcast_transaction error: {repr(e)}")
raise TxBroadcastServerReturnedError(self.sanitize_tx_broadcast_response(e.message)) from e
except BaseException as e: # intentional BaseException for sanity!
self.print_error(f"broadcast_transaction error2: {repr(e)}")
send_exception_to_crash_reporter(e)
raise TxBroadcastUnknownError() from e
if out != tx.txid():
self.print_error(f"unexpected txid for broadcast_transaction: {out} != {tx.txid()}")
raise TxBroadcastHashMismatch(_("Server returned unexpected transaction ID."))
@staticmethod
def sanitize_tx_broadcast_response(server_msg) -> str:
# Unfortunately, bitcoind and hence the Electrum protocol doesn't return a useful error code.
# So, we use substring matching to grok the error message.
# server_msg is untrusted input so it should not be shown to the user. see #4968
server_msg = str(server_msg)
server_msg = server_msg.replace("\n", r"\n")
# https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/policy/policy.cpp
# grep "reason ="
policy_error_messages = {
r"version": _("Transaction uses non-standard version."),
r"tx-size": _("The transaction was rejected because it is too large (in bytes)."),
r"scriptsig-size": None,
r"scriptsig-not-pushonly": None,
r"scriptpubkey": None,
r"bare-multisig": None,
r"dust": _("Transaction could not be broadcast due to dust outputs."),
r"multi-op-return": _("The transaction was rejected because it contains multiple OP_RETURN outputs."),
}
for substring in policy_error_messages:
if substring in server_msg:
msg = policy_error_messages[substring]
return msg if msg else substring
# https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/script/script_error.cpp
script_error_messages = {
r"Script evaluated without error but finished with a false/empty top stack element",
r"Script failed an OP_VERIFY operation",
r"Script failed an OP_EQUALVERIFY operation",
r"Script failed an OP_CHECKMULTISIGVERIFY operation",
r"Script failed an OP_CHECKSIGVERIFY operation",
r"Script failed an OP_NUMEQUALVERIFY operation",
r"Script is too big",
r"Push value size limit exceeded",
r"Operation limit exceeded",
r"Stack size limit exceeded",
r"Signature count negative or greater than pubkey count",
r"Pubkey count negative or limit exceeded",
r"Opcode missing or not understood",
r"Attempted to use a disabled opcode",
r"Operation not valid with the current stack size",
r"Operation not valid with the current altstack size",
r"OP_RETURN was encountered",
r"Invalid OP_IF construction",
r"Negative locktime",
r"Locktime requirement not satisfied",
r"Signature hash type missing or not understood",
r"Non-canonical DER signature",
r"Data push larger than necessary",
r"Only non-push operators allowed in signatures",
r"Non-canonical signature: S value is unnecessarily high",
r"Dummy CHECKMULTISIG argument must be zero",
r"OP_IF/NOTIF argument must be minimal",
r"Signature must be zero for failed CHECK(MULTI)SIG operation",
r"NOPx reserved for soft-fork upgrades",
r"Witness version reserved for soft-fork upgrades",
r"Public key is neither compressed or uncompressed",
r"Extra items left on stack after execution",
r"Witness program has incorrect length",
r"Witness program was passed an empty witness",
r"Witness program hash mismatch",
r"Witness requires empty scriptSig",
r"Witness requires only-redeemscript scriptSig",
r"Witness provided for non-witness script",
r"Using non-compressed keys in segwit",
r"Using OP_CODESEPARATOR in non-witness script",
r"Signature is found in scriptCode",
}
for substring in script_error_messages:
if substring in server_msg:
return substring
# https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/validation.cpp
# grep "REJECT_"
# should come after script_error.cpp (due to e.g. non-mandatory-script-verify-flag)
validation_error_messages = {
r"coinbase",
r"tx-size-small",
r"non-final",
r"txn-already-in-mempool",
r"txn-mempool-conflict",
r"txn-already-known",
r"non-BIP68-final",
r"bad-txns-nonstandard-inputs",
r"bad-witness-nonstandard",
r"bad-txns-too-many-sigops",
r"mempool min fee not met",
r"min relay fee not met",
r"absurdly-high-fee",
r"too-long-mempool-chain",
r"bad-txns-spends-conflicting-tx",
r"insufficient fee",
r"too many potential replacements",
r"replacement-adds-unconfirmed",
r"mempool full",
r"non-mandatory-script-verify-flag",
r"mandatory-script-verify-flag-failed",
}
for substring in validation_error_messages:
if substring in server_msg:
return substring
# https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/rpc/rawtransaction.cpp
# grep "RPC_TRANSACTION"
# grep "RPC_DESERIALIZATION_ERROR"
rawtransaction_error_messages = {
r"Missing inputs",
r"transaction already in block chain",
r"TX decode failed",
}
for substring in rawtransaction_error_messages:
if substring in server_msg:
return substring
# https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/consensus/tx_verify.cpp
# grep "REJECT_"
tx_verify_error_messages = {
r"bad-txns-vin-empty",
r"bad-txns-vout-empty",
r"bad-txns-oversize",
r"bad-txns-vout-negative",
r"bad-txns-vout-toolarge",
r"bad-txns-txouttotal-toolarge",
r"bad-txns-inputs-duplicate",
r"bad-cb-length",
r"bad-txns-prevout-null",
r"bad-txns-inputs-missingorspent",
r"bad-txns-premature-spend-of-coinbase",
r"bad-txns-inputvalues-outofrange",
r"bad-txns-in-belowout",
r"bad-txns-fee-outofrange",
}
for substring in tx_verify_error_messages:
if substring in server_msg:
return substring
# otherwise:
return _("Unknown error")
@best_effort_reliable
@catch_server_exceptions
async def request_chunk(self, height: int, tip=None, *, can_return_early=False):
if not is_non_negative_integer(height):
raise Exception(f"{repr(height)} is not a block height")
return await self.interface.request_chunk(height, tip=tip, can_return_early=can_return_early)
@best_effort_reliable
@catch_server_exceptions
async def get_transaction(self, tx_hash: str, *, timeout=None) -> str:
if not is_hash256_str(tx_hash):
raise Exception(f"{repr(tx_hash)} is not a txid")
return await self.interface.session.send_request('blockchain.transaction.get', [tx_hash],
timeout=timeout)
@best_effort_reliable
@catch_server_exceptions
async def get_history_for_scripthash(self, sh: str) -> List[dict]:
if not is_hash256_str(sh):
raise Exception(f"{repr(sh)} is not a scripthash")
return await self.interface.session.send_request('blockchain.scripthash.get_history', [sh])
@best_effort_reliable
@catch_server_exceptions
async def listunspent_for_scripthash(self, sh: str) -> List[dict]:
if not is_hash256_str(sh):
raise Exception(f"{repr(sh)} is not a scripthash")
return await self.interface.session.send_request('blockchain.scripthash.listunspent', [sh])
@best_effort_reliable
@catch_server_exceptions
async def get_balance_for_scripthash(self, sh: str) -> dict:
if not is_hash256_str(sh):
raise Exception(f"{repr(sh)} is not a scripthash")
return await self.interface.session.send_request('blockchain.scripthash.get_balance', [sh])
def blockchain(self) -> Blockchain:
interface = self.interface
if interface and interface.blockchain is not None:
self._blockchain = interface.blockchain
return self._blockchain
def get_blockchains(self):
out = {} # blockchain_id -> list(interfaces)
with blockchain.blockchains_lock: blockchain_items = list(blockchain.blockchains.items())
with self.interfaces_lock: interfaces_values = list(self.interfaces.values())
for chain_id, bc in blockchain_items:
r = list(filter(lambda i: i.blockchain==bc, interfaces_values))
if r:
out[chain_id] = r
return out
def _set_preferred_chain(self, chain: Blockchain):
height = chain.get_max_forkpoint()
header_hash = chain.get_hash(height)
self._blockchain_preferred_block = {
'height': height,
'hash': header_hash,
}
self.config.set_key('blockchain_preferred_block', self._blockchain_preferred_block)
async def follow_chain_given_id(self, chain_id: str) -> None:
bc = blockchain.blockchains.get(chain_id)
if not bc:
raise Exception('blockchain {} not found'.format(chain_id))
self._set_preferred_chain(bc)
# select server on this chain
with self.interfaces_lock: interfaces = list(self.interfaces.values())
interfaces_on_selected_chain = list(filter(lambda iface: iface.blockchain == bc, interfaces))
if len(interfaces_on_selected_chain) == 0: return
chosen_iface = random.choice(interfaces_on_selected_chain)
# switch to server (and save to config)
net_params = self.get_parameters()
host, port, protocol = deserialize_server(chosen_iface.server)
net_params = net_params._replace(host=host, port=port, protocol=protocol)
await self.set_parameters(net_params)
async def follow_chain_given_server(self, server_str: str) -> None:
# note that server_str should correspond to a connected interface
iface = self.interfaces.get(server_str)
if iface is None:
return
self._set_preferred_chain(iface.blockchain)
# switch to server (and save to config)
net_params = self.get_parameters()
host, port, protocol = deserialize_server(server_str)
net_params = net_params._replace(host=host, port=port, protocol=protocol)
await self.set_parameters(net_params)
def get_local_height(self):
return self.blockchain().height()
def export_checkpoints(self, path):
"""Run manually to generate blockchain checkpoints.
Kept for console use only.
"""
cp = self.blockchain().get_checkpoints()
with open(path, 'w', encoding='utf-8') as f:
f.write(json.dumps(cp, indent=4))
async def _start(self):
assert not self.main_taskgroup
self.main_taskgroup = main_taskgroup = SilentTaskGroup()
assert not self.interface and not self.interfaces
assert not self.connecting and not self.server_queue
self.print_error('starting network')
self.disconnected_servers = set([])
self.protocol = deserialize_server(self.default_server)[2]
self.server_queue = queue.Queue()
self._set_proxy(deserialize_proxy(self.config.get('proxy')))
self._set_oneserver(self.config.get('oneserver', False))
self._start_interface(self.default_server)
async def main():
try:
await self._init_headers_file()
# note: if a task finishes with CancelledError, that
# will NOT raise, and the group will keep the other tasks running
async with main_taskgroup as group:
await group.spawn(self._maintain_sessions())
[await group.spawn(job) for job in self._jobs]
except Exception as e:
traceback.print_exc(file=sys.stderr)
raise e
asyncio.run_coroutine_threadsafe(main(), self.asyncio_loop)
self.trigger_callback('network_updated')
def start(self, jobs: List=None):
self._jobs = jobs or []
asyncio.run_coroutine_threadsafe(self._start(), self.asyncio_loop)
@log_exceptions
async def _stop(self, full_shutdown=False):
self.print_error("stopping network")
try:
await asyncio.wait_for(self.main_taskgroup.cancel_remaining(), timeout=2)
except (asyncio.TimeoutError, asyncio.CancelledError) as e:
self.print_error(f"exc during main_taskgroup cancellation: {repr(e)}")
self.main_taskgroup = None # type: TaskGroup
self.interface = None # type: Interface
self.interfaces = {} # type: Dict[str, Interface]
self.connecting.clear()
self.server_queue = None
if not full_shutdown:
self.trigger_callback('network_updated')
def stop(self):
assert self._loop_thread != threading.current_thread(), 'must not be called from network thread'
fut = asyncio.run_coroutine_threadsafe(self._stop(full_shutdown=True), self.asyncio_loop)
try:
fut.result(timeout=2)
except (asyncio.TimeoutError, asyncio.CancelledError): pass
async def _ensure_there_is_a_main_interface(self):
if self.is_connected():
return
now = time.time()
# if auto_connect is set, try a different server
if self.auto_connect and not self.is_connecting():
await self._switch_to_random_interface()
# if auto_connect is not set, or still no main interface, retry current
if not self.is_connected() and not self.is_connecting():
if self.default_server in self.disconnected_servers:
if now - self.server_retry_time > SERVER_RETRY_INTERVAL:
self.disconnected_servers.remove(self.default_server)
self.server_retry_time = now
else:
await self.switch_to_interface(self.default_server)
async def _maintain_sessions(self):
async def launch_already_queued_up_new_interfaces():
while self.server_queue.qsize() > 0:
server = self.server_queue.get()
await self.main_taskgroup.spawn(self._run_new_interface(server))
async def maybe_queue_new_interfaces_to_be_launched_later():
now = time.time()
for i in range(self.num_server - len(self.interfaces) - len(self.connecting)):
self._start_random_interface()
if now - self.nodes_retry_time > NODES_RETRY_INTERVAL:
self.print_error('network: retrying connections')
self.disconnected_servers = set([])
self.nodes_retry_time = now
async def maintain_main_interface():
await self._ensure_there_is_a_main_interface()
if self.is_connected():
if self.config.is_fee_estimates_update_required():
await self.interface.group.spawn(self._request_fee_estimates, self.interface)
while True:
try:
await launch_already_queued_up_new_interfaces()
await maybe_queue_new_interfaces_to_be_launched_later()
await maintain_main_interface()
except asyncio.CancelledError:
# suppress spurious cancellations
group = self.main_taskgroup
if not group or group._closed:
raise
await asyncio.sleep(0.1)
@classmethod
async def _send_http_on_proxy(cls, method: str, url: str, params: str = None,
body: bytes = None, json: dict = None, headers=None,
on_finish=None, timeout=None):
async def default_on_finish(resp: ClientResponse):
resp.raise_for_status()
return await resp.text()
if headers is None:
headers = {}
if on_finish is None:
on_finish = default_on_finish
network = cls.get_instance()
proxy = network.proxy if network else None
async with make_aiohttp_session(proxy, timeout=timeout) as session:
if method == 'get':
async with session.get(url, params=params, headers=headers) as resp:
return await on_finish(resp)
elif method == 'post':
assert body is not None or json is not None, 'body or json must be supplied if method is post'
if body is not None:
async with session.post(url, data=body, headers=headers) as resp:
return await on_finish(resp)
elif json is not None:
async with session.post(url, json=json, headers=headers) as resp:
return await on_finish(resp)
else:
assert False
@classmethod
def send_http_on_proxy(cls, method, url, **kwargs):
network = cls.get_instance()
if network:
assert network._loop_thread is not threading.currentThread()
loop = network.asyncio_loop
else:
loop = asyncio.get_event_loop()
coro = asyncio.run_coroutine_threadsafe(cls._send_http_on_proxy(method, url, **kwargs), loop)
# note: _send_http_on_proxy has its own timeout, so no timeout here:
return coro.result()
# methods used in scripts
async def get_peers(self):
while not self.is_connected():
await asyncio.sleep(1)
session = self.interface.session
return parse_servers(await session.send_request('server.peers.subscribe'))
async def send_multiple_requests(self, servers: List[str], method: str, params: Sequence):
responses = dict()
async def get_response(server):
interface = Interface(self, server, self.proxy)
timeout = self.get_network_timeout_seconds(NetworkTimeout.Urgent)
try:
await asyncio.wait_for(interface.ready, timeout)
except BaseException as e:
await interface.close()
return
try:
res = await interface.session.send_request(method, params, timeout=10)
except Exception as e:
res = e
responses[interface.server] = res
async with TaskGroup() as group:
for server in servers:
await group.spawn(get_response(server))
return responses
| # Electrum - Lightweight Bitcoin Client
# Copyright (c) 2011-2016 <NAME>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import time
import queue
import os
import random
import re
from collections import defaultdict
import threading
import socket
import json
import sys
import ipaddress
import asyncio
from typing import NamedTuple, Optional, Sequence, List, Dict, Tuple
import traceback
import dns
import dns.resolver
import aiorpcx
from aiorpcx import TaskGroup
from aiohttp import ClientResponse
from . import util
from .util import (PrintError, print_error, log_exceptions, ignore_exceptions,
bfh, SilentTaskGroup, make_aiohttp_session, send_exception_to_crash_reporter,
is_hash256_str, is_non_negative_integer)
from .bitcoin import COIN
# from . import constants
from . import blockchain
from . import bitcoin
from .blockchain import Blockchain, HEADER_SIZE
from .interface import (Interface, serialize_server, deserialize_server,
RequestTimedOut, NetworkTimeout)
from .version import PROTOCOL_VERSION
from .simple_config import SimpleConfig, FairChains
from .i18n import _
NODES_RETRY_INTERVAL = 60
SERVER_RETRY_INTERVAL = 10
def parse_servers(result: Sequence[Tuple[str, str, List[str]]]) -> Dict[str, dict]:
""" parse servers list into dict format"""
servers = {}
for item in result:
host = item[1]
out = {}
version = None
pruning_level = '-'
if len(item) > 2:
for v in item[2]:
if re.match(r"[st]\d*", v):
protocol, port = v[0], v[1:]
if port == '': port = FairChains.DEFAULT_PORTS[protocol]
out[protocol] = port
elif re.match("v(.?)+", v):
version = v[1:]
elif re.match(r"p\d*", v):
pruning_level = v[1:]
if pruning_level == '': pruning_level = '0'
if out:
out['pruning'] = pruning_level
out['version'] = version
servers[host] = out
return servers
def filter_version(servers):
def is_recent(version):
try:
return util.versiontuple(version) >= util.versiontuple(PROTOCOL_VERSION)
except Exception as e:
return False
return {k: v for k, v in servers.items() if is_recent(v.get('version'))}
def filter_noonion(servers):
return {k: v for k, v in servers.items() if not k.endswith('.onion')}
def filter_protocol(hostmap, protocol='s'):
'''Filters the hostmap for those implementing protocol.
The result is a list in serialized form.'''
eligible = []
for host, portmap in hostmap.items():
port = portmap.get(protocol)
if port:
eligible.append(serialize_server(host, port, protocol))
return eligible
def pick_random_server(hostmap = None, protocol = 's', exclude_set = set()):
if hostmap is None:
hostmap = FairChains.DEFAULT_SERVERS
eligible = list(set(filter_protocol(hostmap, protocol)) - exclude_set)
return random.choice(eligible) if eligible else None
class NetworkParameters(NamedTuple):
host: str
port: str
protocol: str
proxy: Optional[dict]
auto_connect: bool
oneserver: bool = False
proxy_modes = ['socks4', 'socks5']
def serialize_proxy(p):
if not isinstance(p, dict):
return None
return ':'.join([p.get('mode'), p.get('host'), p.get('port'),
p.get('user', ''), p.get('password', '')])
def deserialize_proxy(s: str) -> Optional[dict]:
if not isinstance(s, str):
return None
if s.lower() == 'none':
return None
proxy = { "mode":"socks5", "host":"localhost" }
# FIXME raw IPv6 address fails here
args = s.split(':')
n = 0
if proxy_modes.count(args[n]) == 1:
proxy["mode"] = args[n]
n += 1
if len(args) > n:
proxy["host"] = args[n]
n += 1
if len(args) > n:
proxy["port"] = args[n]
n += 1
else:
proxy["port"] = "8080" if proxy["mode"] == "http" else "1080"
if len(args) > n:
proxy["user"] = args[n]
n += 1
if len(args) > n:
proxy["password"] = args[n]
return proxy
class BestEffortRequestFailed(Exception): pass
class TxBroadcastError(Exception):
def get_message_for_gui(self):
raise NotImplementedError()
class TxBroadcastHashMismatch(TxBroadcastError):
def get_message_for_gui(self):
return "{}\n{}\n\n{}" \
.format(_("The server returned an unexpected transaction ID when broadcasting the transaction."),
_("Consider trying to connect to a different server, or updating ElectrumFairChains."),
str(self))
class TxBroadcastServerReturnedError(TxBroadcastError):
def get_message_for_gui(self):
return "{}\n{}\n\n{}" \
.format(_("The server returned an error when broadcasting the transaction."),
_("Consider trying to connect to a different server, or updating ElectrumFairChains."),
str(self))
class TxBroadcastUnknownError(TxBroadcastError):
def get_message_for_gui(self):
return "{}\n{}" \
.format(_("Unknown error when broadcasting the transaction."),
_("Consider trying to connect to a different server, or updating ElectrumFairChains."))
class UntrustedServerReturnedError(Exception):
def __init__(self, *, original_exception):
self.original_exception = original_exception
def __str__(self):
return _("The server returned an error.")
def __repr__(self):
return f"<UntrustedServerReturnedError original_exception: {repr(self.original_exception)}>"
INSTANCE = None
class Network(PrintError):
"""The Network class manages a set of connections to remote electrumfairchains
servers, each connected socket is handled by an Interface() object.
"""
verbosity_filter = 'n'
def __init__(self, config: SimpleConfig=None):
global INSTANCE
INSTANCE = self
self.asyncio_loop = asyncio.get_event_loop()
assert self.asyncio_loop.is_running(), "event loop not running"
self._loop_thread = None # type: threading.Thread # set by caller; only used for sanity checks
if config is None:
config = {} # Do not use mutables as default values!
self.config = SimpleConfig(config) if isinstance(config, dict) else config # type: SimpleConfig
blockchain.read_blockchains(self.config)
self.print_error("blockchains", list(map(lambda b: b.forkpoint, blockchain.blockchains.values())))
self._blockchain_preferred_block = self.config.get('blockchain_preferred_block', None) # type: Optional[Dict]
self._blockchain = blockchain.get_best_chain()
# Server for addresses and transactions
self.default_server = self.config.get('server', None)
# Sanitize default server
if self.default_server:
try:
deserialize_server(self.default_server)
except:
self.print_error('Warning: failed to parse server-string; falling back to random.')
self.default_server = None
if not self.default_server:
self.default_server = pick_random_server()
self.main_taskgroup = None # type: TaskGroup
# locks
self.restart_lock = asyncio.Lock()
self.bhi_lock = asyncio.Lock()
self.callback_lock = threading.Lock()
self.recent_servers_lock = threading.RLock() # <- re-entrant
self.interfaces_lock = threading.Lock() # for mutating/iterating self.interfaces
self.server_peers = {} # returned by interface (servers that the main interface knows about)
self.recent_servers = self._read_recent_servers() # note: needs self.recent_servers_lock
self.banner = ''
self.donation_address = ''
self.relay_fee = None # type: Optional[int]
# callbacks set by the GUI
self.callbacks = defaultdict(list) # note: needs self.callback_lock
dir_path = os.path.join(self.config.path, 'certs')
util.make_dir(dir_path)
# retry times
self.server_retry_time = time.time()
self.nodes_retry_time = time.time()
# the main server we are currently communicating with
self.interface = None # type: Interface
# set of servers we have an ongoing connection with
self.interfaces = {} # type: Dict[str, Interface]
self.auto_connect = self.config.get('auto_connect', True)
self.connecting = set()
self.server_queue = None
self.proxy = None
# Dump network messages (all interfaces). Set at runtime from the console.
self.debug = False
self._set_status('disconnected')
def run_from_another_thread(self, coro):
assert self._loop_thread != threading.current_thread(), 'must not be called from network thread'
fut = asyncio.run_coroutine_threadsafe(coro, self.asyncio_loop)
return fut.result()
@staticmethod
def get_instance() -> Optional["Network"]:
return INSTANCE
def with_recent_servers_lock(func):
def func_wrapper(self, *args, **kwargs):
with self.recent_servers_lock:
return func(self, *args, **kwargs)
return func_wrapper
def register_callback(self, callback, events):
with self.callback_lock:
for event in events:
self.callbacks[event].append(callback)
def unregister_callback(self, callback):
with self.callback_lock:
for callbacks in self.callbacks.values():
if callback in callbacks:
callbacks.remove(callback)
def trigger_callback(self, event, *args):
with self.callback_lock:
callbacks = self.callbacks[event][:]
for callback in callbacks:
# FIXME: if callback throws, we will lose the traceback
if asyncio.iscoroutinefunction(callback):
asyncio.run_coroutine_threadsafe(callback(event, *args), self.asyncio_loop)
else:
self.asyncio_loop.call_soon_threadsafe(callback, event, *args)
def _read_recent_servers(self):
if not self.config.path:
return []
path = os.path.join(self.config.path, "recent_servers")
try:
with open(path, "r", encoding='utf-8') as f:
data = f.read()
return json.loads(data)
except:
return []
@with_recent_servers_lock
def _save_recent_servers(self):
if not self.config.path:
return
path = os.path.join(self.config.path, "recent_servers")
s = json.dumps(self.recent_servers, indent=4, sort_keys=True)
try:
with open(path, "w", encoding='utf-8') as f:
f.write(s)
except:
pass
def get_server_height(self):
interface = self.interface
return interface.tip if interface else 0
async def _server_is_lagging(self):
sh = self.get_server_height()
if not sh:
self.print_error('no height for main interface')
return True
lh = self.get_local_height()
result = (lh - sh) > 1
if result:
self.print_error(f'{self.default_server} is lagging ({sh} vs {lh})')
return result
def _set_status(self, status):
self.connection_status = status
self.notify('status')
def is_connected(self):
interface = self.interface
return interface is not None and interface.ready.done()
def is_connecting(self):
return self.connection_status == 'connecting'
async def _request_server_info(self, interface):
await interface.ready
session = interface.session
async def get_banner():
self.banner = await session.send_request('server.banner')
self.notify('banner')
async def get_donation_address():
addr = await session.send_request('server.donation_address')
if not bitcoin.is_address(addr):
if addr: # ignore empty string
self.print_error(f"invalid donation address from server: {repr(addr)}")
addr = ''
self.donation_address = addr
async def get_server_peers():
self.server_peers = parse_servers(await session.send_request('server.peers.subscribe'))
self.notify('servers')
async def get_relay_fee():
relayfee = await session.send_request('blockchain.relayfee')
if relayfee is None:
self.relay_fee = None
else:
relayfee = int(relayfee * COIN)
self.relay_fee = max(0, relayfee)
async with TaskGroup() as group:
await group.spawn(get_banner)
await group.spawn(get_donation_address)
await group.spawn(get_server_peers)
await group.spawn(get_relay_fee)
await group.spawn(self._request_fee_estimates(interface))
async def _request_fee_estimates(self, interface):
session = interface.session
from .simple_config import FEE_ETA_TARGETS
self.config.requested_fee_estimates()
async with TaskGroup() as group:
histogram_task = await group.spawn(session.send_request('mempool.get_fee_histogram'))
fee_tasks = []
for i in FEE_ETA_TARGETS:
fee_tasks.append((i, await group.spawn(session.send_request('blockchain.estimatefee', [i]))))
self.config.mempool_fees = histogram = histogram_task.result()
self.print_error(f'fee_histogram {histogram}')
self.notify('fee_histogram')
fee_estimates_eta = {}
for nblock_target, task in fee_tasks:
fee = int(task.result() * COIN)
fee_estimates_eta[nblock_target] = fee
if fee < 0: continue
self.config.update_fee_estimates(nblock_target, fee)
self.print_error(f'fee_estimates {fee_estimates_eta}')
self.notify('fee')
def get_status_value(self, key):
if key == 'status':
value = self.connection_status
elif key == 'banner':
value = self.banner
elif key == 'fee':
value = self.config.fee_estimates
elif key == 'fee_histogram':
value = self.config.mempool_fees
elif key == 'servers':
value = self.get_servers()
else:
raise Exception('unexpected trigger key {}'.format(key))
return value
def notify(self, key):
if key in ['status', 'updated']:
self.trigger_callback(key)
else:
self.trigger_callback(key, self.get_status_value(key))
def get_parameters(self) -> NetworkParameters:
host, port, protocol = deserialize_server(self.default_server)
return NetworkParameters(host=host,
port=port,
protocol=protocol,
proxy=self.proxy,
auto_connect=self.auto_connect,
oneserver=self.oneserver)
def get_donation_address(self):
if self.is_connected():
return self.donation_address
def get_interfaces(self) -> List[str]:
"""The list of servers for the connected interfaces."""
with self.interfaces_lock:
return list(self.interfaces)
@with_recent_servers_lock
def get_servers(self):
# start with hardcoded servers
out = dict(FairChains.DEFAULT_SERVERS) # copy
# add recent servers
for s in self.recent_servers:
try:
host, port, protocol = deserialize_server(s)
except:
continue
if host not in out:
out[host] = {protocol: port}
# add servers received from main interface
server_peers = self.server_peers
if server_peers:
out.update(filter_version(server_peers.copy()))
# potentially filter out some
if self.config.get('noonion'):
out = filter_noonion(out)
return out
def _start_interface(self, server: str):
if server not in self.interfaces and server not in self.connecting:
if server == self.default_server:
self.print_error(f"connecting to {server} as new interface")
self._set_status('connecting')
self.connecting.add(server)
self.server_queue.put(server)
def _start_random_interface(self):
with self.interfaces_lock:
exclude_set = self.disconnected_servers | set(self.interfaces) | self.connecting
server = pick_random_server(self.get_servers(), self.protocol, exclude_set)
if server:
self._start_interface(server)
return server
def _set_proxy(self, proxy: Optional[dict]):
self.proxy = proxy
# Store these somewhere so we can un-monkey-patch
if not hasattr(socket, "_getaddrinfo"):
socket._getaddrinfo = socket.getaddrinfo
if proxy:
self.print_error('setting proxy', proxy)
# prevent dns leaks, see http://stackoverflow.com/questions/13184205/dns-over-proxy
socket.getaddrinfo = lambda *args: [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
else:
if sys.platform == 'win32':
# On Windows, socket.getaddrinfo takes a mutex, and might hold it for up to 10 seconds
# when dns-resolving. To speed it up drastically, we resolve dns ourselves, outside that lock.
# see #4421
socket.getaddrinfo = self._fast_getaddrinfo
else:
socket.getaddrinfo = socket._getaddrinfo
self.trigger_callback('proxy_set', self.proxy)
@staticmethod
def _fast_getaddrinfo(host, *args, **kwargs):
def needs_dns_resolving(host):
try:
ipaddress.ip_address(host)
return False # already valid IP
except ValueError:
pass # not an IP
if str(host) in ('localhost', 'localhost.',):
return False
return True
def resolve_with_dnspython(host):
addrs = []
# try IPv6
try:
answers = dns.resolver.query(host, dns.rdatatype.AAAA)
addrs += [str(answer) for answer in answers]
except dns.exception.DNSException as e:
pass
except BaseException as e:
print_error(f'dnspython failed to resolve dns (AAAA) with error: {e}')
# try IPv4
try:
answers = dns.resolver.query(host, dns.rdatatype.A)
addrs += [str(answer) for answer in answers]
except dns.exception.DNSException as e:
# dns failed for some reason, e.g. dns.resolver.NXDOMAIN this is normal.
# Simply report back failure; except if we already have some results.
if not addrs:
raise socket.gaierror(11001, 'getaddrinfo failed') from e
except BaseException as e:
# Possibly internal error in dnspython :( see #4483
print_error(f'dnspython failed to resolve dns (A) with error: {e}')
if addrs:
return addrs
# Fall back to original socket.getaddrinfo to resolve dns.
return [host]
addrs = [host]
if needs_dns_resolving(host):
addrs = resolve_with_dnspython(host)
list_of_list_of_socketinfos = [socket._getaddrinfo(addr, *args, **kwargs) for addr in addrs]
list_of_socketinfos = [item for lst in list_of_list_of_socketinfos for item in lst]
return list_of_socketinfos
@log_exceptions
async def set_parameters(self, net_params: NetworkParameters):
proxy = net_params.proxy
proxy_str = serialize_proxy(proxy)
host, port, protocol = net_params.host, net_params.port, net_params.protocol
server_str = serialize_server(host, port, protocol)
# sanitize parameters
try:
deserialize_server(serialize_server(host, port, protocol))
if proxy:
proxy_modes.index(proxy['mode']) + 1
int(proxy['port'])
except:
return
self.config.set_key('auto_connect', net_params.auto_connect, False)
self.config.set_key('oneserver', net_params.oneserver, False)
self.config.set_key('proxy', proxy_str, False)
self.config.set_key('server', server_str, True)
# abort if changes were not allowed by config
if self.config.get('server') != server_str \
or self.config.get('proxy') != proxy_str \
or self.config.get('oneserver') != net_params.oneserver:
return
async with self.restart_lock:
self.auto_connect = net_params.auto_connect
if self.proxy != proxy or self.protocol != protocol or self.oneserver != net_params.oneserver:
# Restart the network defaulting to the given server
await self._stop()
self.default_server = server_str
await self._start()
elif self.default_server != server_str:
await self.switch_to_interface(server_str)
else:
await self.switch_lagging_interface()
def _set_oneserver(self, oneserver: bool):
self.num_server = 10 if not oneserver else 0
self.oneserver = bool(oneserver)
async def _switch_to_random_interface(self):
'''Switch to a random connected server other than the current one'''
servers = self.get_interfaces() # Those in connected state
if self.default_server in servers:
servers.remove(self.default_server)
if servers:
await self.switch_to_interface(random.choice(servers))
async def switch_lagging_interface(self):
'''If auto_connect and lagging, switch interface'''
if self.auto_connect and await self._server_is_lagging():
# switch to one that has the correct header (not height)
best_header = self.blockchain().read_header(self.get_local_height())
with self.interfaces_lock: interfaces = list(self.interfaces.values())
filtered = list(filter(lambda iface: iface.tip_header == best_header, interfaces))
if filtered:
chosen_iface = random.choice(filtered)
await self.switch_to_interface(chosen_iface.server)
async def switch_unwanted_fork_interface(self):
"""If auto_connect and main interface is not on preferred fork,
try to switch to preferred fork.
"""
if not self.auto_connect or not self.interface:
return
with self.interfaces_lock: interfaces = list(self.interfaces.values())
# try to switch to preferred fork
if self._blockchain_preferred_block:
pref_height = self._blockchain_preferred_block['height']
pref_hash = self._blockchain_preferred_block['hash']
if self.interface.blockchain.check_hash(pref_height, pref_hash):
return # already on preferred fork
filtered = list(filter(lambda iface: iface.blockchain.check_hash(pref_height, pref_hash),
interfaces))
if filtered:
self.print_error("switching to preferred fork")
chosen_iface = random.choice(filtered)
await self.switch_to_interface(chosen_iface.server)
return
else:
self.print_error("tried to switch to preferred fork but no interfaces are on it")
# try to switch to best chain
if self.blockchain().parent is None:
return # already on best chain
filtered = list(filter(lambda iface: iface.blockchain.parent is None,
interfaces))
if filtered:
self.print_error("switching to best chain")
chosen_iface = random.choice(filtered)
await self.switch_to_interface(chosen_iface.server)
else:
# FIXME switch to best available?
self.print_error("tried to switch to best chain but no interfaces are on it")
async def switch_to_interface(self, server: str):
"""Switch to server as our main interface. If no connection exists,
queue interface to be started. The actual switch will
happen when the interface becomes ready.
"""
self.default_server = server
old_interface = self.interface
old_server = old_interface.server if old_interface else None
# Stop any current interface in order to terminate subscriptions,
# and to cancel tasks in interface.group.
# However, for headers sub, give preference to this interface
# over unknown ones, i.e. start it again right away.
if old_server and old_server != server:
await self._close_interface(old_interface)
if len(self.interfaces) <= self.num_server:
self._start_interface(old_server)
if server not in self.interfaces:
self.interface = None
self._start_interface(server)
return
i = self.interfaces[server]
if old_interface != i:
self.print_error("switching to", server)
blockchain_updated = i.blockchain != self.blockchain()
self.interface = i
await i.group.spawn(self._request_server_info(i))
self.trigger_callback('default_server_changed')
self._set_status('connected')
self.trigger_callback('network_updated')
if blockchain_updated: self.trigger_callback('blockchain_updated')
async def _close_interface(self, interface):
if interface:
with self.interfaces_lock:
if self.interfaces.get(interface.server) == interface:
self.interfaces.pop(interface.server)
if interface.server == self.default_server:
self.interface = None
await interface.close()
@with_recent_servers_lock
def _add_recent_server(self, server):
# list is ordered
if server in self.recent_servers:
self.recent_servers.remove(server)
self.recent_servers.insert(0, server)
self.recent_servers = self.recent_servers[0:20]
self._save_recent_servers()
async def connection_down(self, interface: Interface):
'''A connection to server either went down, or was never made.
We distinguish by whether it is in self.interfaces.'''
if not interface: return
server = interface.server
self.disconnected_servers.add(server)
if server == self.default_server:
self._set_status('disconnected')
await self._close_interface(interface)
self.trigger_callback('network_updated')
def get_network_timeout_seconds(self, request_type=NetworkTimeout.Generic) -> int:
if self.oneserver and not self.auto_connect:
return request_type.MOST_RELAXED
if self.proxy:
return request_type.RELAXED
return request_type.NORMAL
@ignore_exceptions # do not kill main_taskgroup
@log_exceptions
async def _run_new_interface(self, server):
interface = Interface(self, server, self.proxy)
timeout = self.get_network_timeout_seconds(NetworkTimeout.Urgent)
try:
await asyncio.wait_for(interface.ready, timeout)
except BaseException as e:
#traceback.print_exc()
self.print_error(f"couldn't launch iface {server} -- {repr(e)}")
await interface.close()
return
else:
with self.interfaces_lock:
assert server not in self.interfaces
self.interfaces[server] = interface
finally:
try: self.connecting.remove(server)
except KeyError: pass
if server == self.default_server:
await self.switch_to_interface(server)
self._add_recent_server(server)
self.trigger_callback('network_updated')
async def _init_headers_file(self):
b = blockchain.get_best_chain()
filename = b.path()
length = HEADER_SIZE * len(FairChains.CHECKPOINTS) * 2016
if not os.path.exists(filename) or os.path.getsize(filename) < length:
with open(filename, 'wb') as f:
if length > 0:
f.seek(length-1)
f.write(b'\x00')
util.ensure_sparse_file(filename)
with b.lock:
b.update_size()
def best_effort_reliable(func):
async def make_reliable_wrapper(self, *args, **kwargs):
for i in range(10):
iface = self.interface
# retry until there is a main interface
if not iface:
await asyncio.sleep(0.1)
continue # try again
# wait for it to be usable
iface_ready = iface.ready
iface_disconnected = iface.got_disconnected
await asyncio.wait([iface_ready, iface_disconnected], return_when=asyncio.FIRST_COMPLETED)
if not iface_ready.done() or iface_ready.cancelled():
await asyncio.sleep(0.1)
continue # try again
# try actual request
success_fut = asyncio.ensure_future(func(self, *args, **kwargs))
await asyncio.wait([success_fut, iface_disconnected], return_when=asyncio.FIRST_COMPLETED)
if success_fut.done() and not success_fut.cancelled():
if success_fut.exception():
try:
raise success_fut.exception()
except RequestTimedOut:
await iface.close()
await iface_disconnected
continue # try again
return success_fut.result()
# otherwise; try again
raise BestEffortRequestFailed('no interface to do request on... gave up.')
return make_reliable_wrapper
def catch_server_exceptions(func):
async def wrapper(self, *args, **kwargs):
try:
return await func(self, *args, **kwargs)
except aiorpcx.jsonrpc.CodeMessageError as e:
raise UntrustedServerReturnedError(original_exception=e) from e
return wrapper
@best_effort_reliable
@catch_server_exceptions
async def get_merkle_for_transaction(self, tx_hash: str, tx_height: int) -> dict:
if not is_hash256_str(tx_hash):
raise Exception(f"{repr(tx_hash)} is not a txid")
if not is_non_negative_integer(tx_height):
raise Exception(f"{repr(tx_height)} is not a block height")
return await self.interface.session.send_request('blockchain.transaction.get_merkle', [tx_hash, tx_height])
@best_effort_reliable
async def broadcast_transaction(self, tx, *, timeout=None) -> None:
if timeout is None:
timeout = self.get_network_timeout_seconds(NetworkTimeout.Urgent)
try:
out = await self.interface.session.send_request('blockchain.transaction.broadcast', [str(tx)], timeout=timeout)
# note: both 'out' and exception messages are untrusted input from the server
except (RequestTimedOut, asyncio.CancelledError, asyncio.TimeoutError):
raise # pass-through
except aiorpcx.jsonrpc.CodeMessageError as e:
self.print_error(f"broadcast_transaction error: {repr(e)}")
raise TxBroadcastServerReturnedError(self.sanitize_tx_broadcast_response(e.message)) from e
except BaseException as e: # intentional BaseException for sanity!
self.print_error(f"broadcast_transaction error2: {repr(e)}")
send_exception_to_crash_reporter(e)
raise TxBroadcastUnknownError() from e
if out != tx.txid():
self.print_error(f"unexpected txid for broadcast_transaction: {out} != {tx.txid()}")
raise TxBroadcastHashMismatch(_("Server returned unexpected transaction ID."))
@staticmethod
def sanitize_tx_broadcast_response(server_msg) -> str:
# Unfortunately, bitcoind and hence the Electrum protocol doesn't return a useful error code.
# So, we use substring matching to grok the error message.
# server_msg is untrusted input so it should not be shown to the user. see #4968
server_msg = str(server_msg)
server_msg = server_msg.replace("\n", r"\n")
# https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/policy/policy.cpp
# grep "reason ="
policy_error_messages = {
r"version": _("Transaction uses non-standard version."),
r"tx-size": _("The transaction was rejected because it is too large (in bytes)."),
r"scriptsig-size": None,
r"scriptsig-not-pushonly": None,
r"scriptpubkey": None,
r"bare-multisig": None,
r"dust": _("Transaction could not be broadcast due to dust outputs."),
r"multi-op-return": _("The transaction was rejected because it contains multiple OP_RETURN outputs."),
}
for substring in policy_error_messages:
if substring in server_msg:
msg = policy_error_messages[substring]
return msg if msg else substring
# https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/script/script_error.cpp
script_error_messages = {
r"Script evaluated without error but finished with a false/empty top stack element",
r"Script failed an OP_VERIFY operation",
r"Script failed an OP_EQUALVERIFY operation",
r"Script failed an OP_CHECKMULTISIGVERIFY operation",
r"Script failed an OP_CHECKSIGVERIFY operation",
r"Script failed an OP_NUMEQUALVERIFY operation",
r"Script is too big",
r"Push value size limit exceeded",
r"Operation limit exceeded",
r"Stack size limit exceeded",
r"Signature count negative or greater than pubkey count",
r"Pubkey count negative or limit exceeded",
r"Opcode missing or not understood",
r"Attempted to use a disabled opcode",
r"Operation not valid with the current stack size",
r"Operation not valid with the current altstack size",
r"OP_RETURN was encountered",
r"Invalid OP_IF construction",
r"Negative locktime",
r"Locktime requirement not satisfied",
r"Signature hash type missing or not understood",
r"Non-canonical DER signature",
r"Data push larger than necessary",
r"Only non-push operators allowed in signatures",
r"Non-canonical signature: S value is unnecessarily high",
r"Dummy CHECKMULTISIG argument must be zero",
r"OP_IF/NOTIF argument must be minimal",
r"Signature must be zero for failed CHECK(MULTI)SIG operation",
r"NOPx reserved for soft-fork upgrades",
r"Witness version reserved for soft-fork upgrades",
r"Public key is neither compressed or uncompressed",
r"Extra items left on stack after execution",
r"Witness program has incorrect length",
r"Witness program was passed an empty witness",
r"Witness program hash mismatch",
r"Witness requires empty scriptSig",
r"Witness requires only-redeemscript scriptSig",
r"Witness provided for non-witness script",
r"Using non-compressed keys in segwit",
r"Using OP_CODESEPARATOR in non-witness script",
r"Signature is found in scriptCode",
}
for substring in script_error_messages:
if substring in server_msg:
return substring
# https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/validation.cpp
# grep "REJECT_"
# should come after script_error.cpp (due to e.g. non-mandatory-script-verify-flag)
validation_error_messages = {
r"coinbase",
r"tx-size-small",
r"non-final",
r"txn-already-in-mempool",
r"txn-mempool-conflict",
r"txn-already-known",
r"non-BIP68-final",
r"bad-txns-nonstandard-inputs",
r"bad-witness-nonstandard",
r"bad-txns-too-many-sigops",
r"mempool min fee not met",
r"min relay fee not met",
r"absurdly-high-fee",
r"too-long-mempool-chain",
r"bad-txns-spends-conflicting-tx",
r"insufficient fee",
r"too many potential replacements",
r"replacement-adds-unconfirmed",
r"mempool full",
r"non-mandatory-script-verify-flag",
r"mandatory-script-verify-flag-failed",
}
for substring in validation_error_messages:
if substring in server_msg:
return substring
# https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/rpc/rawtransaction.cpp
# grep "RPC_TRANSACTION"
# grep "RPC_DESERIALIZATION_ERROR"
rawtransaction_error_messages = {
r"Missing inputs",
r"transaction already in block chain",
r"TX decode failed",
}
for substring in rawtransaction_error_messages:
if substring in server_msg:
return substring
# https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/consensus/tx_verify.cpp
# grep "REJECT_"
tx_verify_error_messages = {
r"bad-txns-vin-empty",
r"bad-txns-vout-empty",
r"bad-txns-oversize",
r"bad-txns-vout-negative",
r"bad-txns-vout-toolarge",
r"bad-txns-txouttotal-toolarge",
r"bad-txns-inputs-duplicate",
r"bad-cb-length",
r"bad-txns-prevout-null",
r"bad-txns-inputs-missingorspent",
r"bad-txns-premature-spend-of-coinbase",
r"bad-txns-inputvalues-outofrange",
r"bad-txns-in-belowout",
r"bad-txns-fee-outofrange",
}
for substring in tx_verify_error_messages:
if substring in server_msg:
return substring
# otherwise:
return _("Unknown error")
@best_effort_reliable
@catch_server_exceptions
async def request_chunk(self, height: int, tip=None, *, can_return_early=False):
if not is_non_negative_integer(height):
raise Exception(f"{repr(height)} is not a block height")
return await self.interface.request_chunk(height, tip=tip, can_return_early=can_return_early)
@best_effort_reliable
@catch_server_exceptions
async def get_transaction(self, tx_hash: str, *, timeout=None) -> str:
if not is_hash256_str(tx_hash):
raise Exception(f"{repr(tx_hash)} is not a txid")
return await self.interface.session.send_request('blockchain.transaction.get', [tx_hash],
timeout=timeout)
@best_effort_reliable
@catch_server_exceptions
async def get_history_for_scripthash(self, sh: str) -> List[dict]:
if not is_hash256_str(sh):
raise Exception(f"{repr(sh)} is not a scripthash")
return await self.interface.session.send_request('blockchain.scripthash.get_history', [sh])
@best_effort_reliable
@catch_server_exceptions
async def listunspent_for_scripthash(self, sh: str) -> List[dict]:
if not is_hash256_str(sh):
raise Exception(f"{repr(sh)} is not a scripthash")
return await self.interface.session.send_request('blockchain.scripthash.listunspent', [sh])
@best_effort_reliable
@catch_server_exceptions
async def get_balance_for_scripthash(self, sh: str) -> dict:
if not is_hash256_str(sh):
raise Exception(f"{repr(sh)} is not a scripthash")
return await self.interface.session.send_request('blockchain.scripthash.get_balance', [sh])
def blockchain(self) -> Blockchain:
interface = self.interface
if interface and interface.blockchain is not None:
self._blockchain = interface.blockchain
return self._blockchain
def get_blockchains(self):
out = {} # blockchain_id -> list(interfaces)
with blockchain.blockchains_lock: blockchain_items = list(blockchain.blockchains.items())
with self.interfaces_lock: interfaces_values = list(self.interfaces.values())
for chain_id, bc in blockchain_items:
r = list(filter(lambda i: i.blockchain==bc, interfaces_values))
if r:
out[chain_id] = r
return out
def _set_preferred_chain(self, chain: Blockchain):
height = chain.get_max_forkpoint()
header_hash = chain.get_hash(height)
self._blockchain_preferred_block = {
'height': height,
'hash': header_hash,
}
self.config.set_key('blockchain_preferred_block', self._blockchain_preferred_block)
async def follow_chain_given_id(self, chain_id: str) -> None:
bc = blockchain.blockchains.get(chain_id)
if not bc:
raise Exception('blockchain {} not found'.format(chain_id))
self._set_preferred_chain(bc)
# select server on this chain
with self.interfaces_lock: interfaces = list(self.interfaces.values())
interfaces_on_selected_chain = list(filter(lambda iface: iface.blockchain == bc, interfaces))
if len(interfaces_on_selected_chain) == 0: return
chosen_iface = random.choice(interfaces_on_selected_chain)
# switch to server (and save to config)
net_params = self.get_parameters()
host, port, protocol = deserialize_server(chosen_iface.server)
net_params = net_params._replace(host=host, port=port, protocol=protocol)
await self.set_parameters(net_params)
async def follow_chain_given_server(self, server_str: str) -> None:
# note that server_str should correspond to a connected interface
iface = self.interfaces.get(server_str)
if iface is None:
return
self._set_preferred_chain(iface.blockchain)
# switch to server (and save to config)
net_params = self.get_parameters()
host, port, protocol = deserialize_server(server_str)
net_params = net_params._replace(host=host, port=port, protocol=protocol)
await self.set_parameters(net_params)
def get_local_height(self):
return self.blockchain().height()
def export_checkpoints(self, path):
"""Run manually to generate blockchain checkpoints.
Kept for console use only.
"""
cp = self.blockchain().get_checkpoints()
with open(path, 'w', encoding='utf-8') as f:
f.write(json.dumps(cp, indent=4))
async def _start(self):
assert not self.main_taskgroup
self.main_taskgroup = main_taskgroup = SilentTaskGroup()
assert not self.interface and not self.interfaces
assert not self.connecting and not self.server_queue
self.print_error('starting network')
self.disconnected_servers = set([])
self.protocol = deserialize_server(self.default_server)[2]
self.server_queue = queue.Queue()
self._set_proxy(deserialize_proxy(self.config.get('proxy')))
self._set_oneserver(self.config.get('oneserver', False))
self._start_interface(self.default_server)
async def main():
try:
await self._init_headers_file()
# note: if a task finishes with CancelledError, that
# will NOT raise, and the group will keep the other tasks running
async with main_taskgroup as group:
await group.spawn(self._maintain_sessions())
[await group.spawn(job) for job in self._jobs]
except Exception as e:
traceback.print_exc(file=sys.stderr)
raise e
asyncio.run_coroutine_threadsafe(main(), self.asyncio_loop)
self.trigger_callback('network_updated')
def start(self, jobs: List=None):
self._jobs = jobs or []
asyncio.run_coroutine_threadsafe(self._start(), self.asyncio_loop)
@log_exceptions
async def _stop(self, full_shutdown=False):
self.print_error("stopping network")
try:
await asyncio.wait_for(self.main_taskgroup.cancel_remaining(), timeout=2)
except (asyncio.TimeoutError, asyncio.CancelledError) as e:
self.print_error(f"exc during main_taskgroup cancellation: {repr(e)}")
self.main_taskgroup = None # type: TaskGroup
self.interface = None # type: Interface
self.interfaces = {} # type: Dict[str, Interface]
self.connecting.clear()
self.server_queue = None
if not full_shutdown:
self.trigger_callback('network_updated')
def stop(self):
assert self._loop_thread != threading.current_thread(), 'must not be called from network thread'
fut = asyncio.run_coroutine_threadsafe(self._stop(full_shutdown=True), self.asyncio_loop)
try:
fut.result(timeout=2)
except (asyncio.TimeoutError, asyncio.CancelledError): pass
async def _ensure_there_is_a_main_interface(self):
if self.is_connected():
return
now = time.time()
# if auto_connect is set, try a different server
if self.auto_connect and not self.is_connecting():
await self._switch_to_random_interface()
# if auto_connect is not set, or still no main interface, retry current
if not self.is_connected() and not self.is_connecting():
if self.default_server in self.disconnected_servers:
if now - self.server_retry_time > SERVER_RETRY_INTERVAL:
self.disconnected_servers.remove(self.default_server)
self.server_retry_time = now
else:
await self.switch_to_interface(self.default_server)
async def _maintain_sessions(self):
async def launch_already_queued_up_new_interfaces():
while self.server_queue.qsize() > 0:
server = self.server_queue.get()
await self.main_taskgroup.spawn(self._run_new_interface(server))
async def maybe_queue_new_interfaces_to_be_launched_later():
now = time.time()
for i in range(self.num_server - len(self.interfaces) - len(self.connecting)):
self._start_random_interface()
if now - self.nodes_retry_time > NODES_RETRY_INTERVAL:
self.print_error('network: retrying connections')
self.disconnected_servers = set([])
self.nodes_retry_time = now
async def maintain_main_interface():
await self._ensure_there_is_a_main_interface()
if self.is_connected():
if self.config.is_fee_estimates_update_required():
await self.interface.group.spawn(self._request_fee_estimates, self.interface)
while True:
try:
await launch_already_queued_up_new_interfaces()
await maybe_queue_new_interfaces_to_be_launched_later()
await maintain_main_interface()
except asyncio.CancelledError:
# suppress spurious cancellations
group = self.main_taskgroup
if not group or group._closed:
raise
await asyncio.sleep(0.1)
@classmethod
async def _send_http_on_proxy(cls, method: str, url: str, params: str = None,
body: bytes = None, json: dict = None, headers=None,
on_finish=None, timeout=None):
async def default_on_finish(resp: ClientResponse):
resp.raise_for_status()
return await resp.text()
if headers is None:
headers = {}
if on_finish is None:
on_finish = default_on_finish
network = cls.get_instance()
proxy = network.proxy if network else None
async with make_aiohttp_session(proxy, timeout=timeout) as session:
if method == 'get':
async with session.get(url, params=params, headers=headers) as resp:
return await on_finish(resp)
elif method == 'post':
assert body is not None or json is not None, 'body or json must be supplied if method is post'
if body is not None:
async with session.post(url, data=body, headers=headers) as resp:
return await on_finish(resp)
elif json is not None:
async with session.post(url, json=json, headers=headers) as resp:
return await on_finish(resp)
else:
assert False
@classmethod
def send_http_on_proxy(cls, method, url, **kwargs):
network = cls.get_instance()
if network:
assert network._loop_thread is not threading.currentThread()
loop = network.asyncio_loop
else:
loop = asyncio.get_event_loop()
coro = asyncio.run_coroutine_threadsafe(cls._send_http_on_proxy(method, url, **kwargs), loop)
# note: _send_http_on_proxy has its own timeout, so no timeout here:
return coro.result()
# methods used in scripts
async def get_peers(self):
while not self.is_connected():
await asyncio.sleep(1)
session = self.interface.session
return parse_servers(await session.send_request('server.peers.subscribe'))
async def send_multiple_requests(self, servers: List[str], method: str, params: Sequence):
responses = dict()
async def get_response(server):
interface = Interface(self, server, self.proxy)
timeout = self.get_network_timeout_seconds(NetworkTimeout.Urgent)
try:
await asyncio.wait_for(interface.ready, timeout)
except BaseException as e:
await interface.close()
return
try:
res = await interface.session.send_request(method, params, timeout=10)
except Exception as e:
res = e
responses[interface.server] = res
async with TaskGroup() as group:
for server in servers:
await group.spawn(get_response(server))
return responses | en | 0.779847 | # Electrum - Lightweight Bitcoin Client # Copyright (c) 2011-2016 <NAME> # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation files # (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, merge, # publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, # subject to the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # from . import constants parse servers list into dict format Filters the hostmap for those implementing protocol. The result is a list in serialized form. # FIXME raw IPv6 address fails here The Network class manages a set of connections to remote electrumfairchains servers, each connected socket is handled by an Interface() object. # type: threading.Thread # set by caller; only used for sanity checks # Do not use mutables as default values! # type: SimpleConfig # type: Optional[Dict] # Server for addresses and transactions # Sanitize default server # type: TaskGroup # locks # <- re-entrant # for mutating/iterating self.interfaces # returned by interface (servers that the main interface knows about) # note: needs self.recent_servers_lock # type: Optional[int] # callbacks set by the GUI # note: needs self.callback_lock # retry times # the main server we are currently communicating with # type: Interface # set of servers we have an ongoing connection with # type: Dict[str, Interface] # Dump network messages (all interfaces). Set at runtime from the console. # FIXME: if callback throws, we will lose the traceback # ignore empty string The list of servers for the connected interfaces. # start with hardcoded servers # copy # add recent servers # add servers received from main interface # potentially filter out some # Store these somewhere so we can un-monkey-patch # prevent dns leaks, see http://stackoverflow.com/questions/13184205/dns-over-proxy # On Windows, socket.getaddrinfo takes a mutex, and might hold it for up to 10 seconds # when dns-resolving. To speed it up drastically, we resolve dns ourselves, outside that lock. # see #4421 # already valid IP # not an IP # try IPv6 # try IPv4 # dns failed for some reason, e.g. dns.resolver.NXDOMAIN this is normal. # Simply report back failure; except if we already have some results. # Possibly internal error in dnspython :( see #4483 # Fall back to original socket.getaddrinfo to resolve dns. # sanitize parameters # abort if changes were not allowed by config # Restart the network defaulting to the given server Switch to a random connected server other than the current one # Those in connected state If auto_connect and lagging, switch interface # switch to one that has the correct header (not height) If auto_connect and main interface is not on preferred fork, try to switch to preferred fork. # try to switch to preferred fork # already on preferred fork # try to switch to best chain # already on best chain # FIXME switch to best available? Switch to server as our main interface. If no connection exists, queue interface to be started. The actual switch will happen when the interface becomes ready. # Stop any current interface in order to terminate subscriptions, # and to cancel tasks in interface.group. # However, for headers sub, give preference to this interface # over unknown ones, i.e. start it again right away. # list is ordered A connection to server either went down, or was never made. We distinguish by whether it is in self.interfaces. # do not kill main_taskgroup #traceback.print_exc() # retry until there is a main interface # try again # wait for it to be usable # try again # try actual request # try again # otherwise; try again # note: both 'out' and exception messages are untrusted input from the server # pass-through # intentional BaseException for sanity! # Unfortunately, bitcoind and hence the Electrum protocol doesn't return a useful error code. # So, we use substring matching to grok the error message. # server_msg is untrusted input so it should not be shown to the user. see #4968 # https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/policy/policy.cpp # grep "reason =" # https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/script/script_error.cpp # https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/validation.cpp # grep "REJECT_" # should come after script_error.cpp (due to e.g. non-mandatory-script-verify-flag) # https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/rpc/rawtransaction.cpp # grep "RPC_TRANSACTION" # grep "RPC_DESERIALIZATION_ERROR" # https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/consensus/tx_verify.cpp # grep "REJECT_" # otherwise: # blockchain_id -> list(interfaces) # select server on this chain # switch to server (and save to config) # note that server_str should correspond to a connected interface # switch to server (and save to config) Run manually to generate blockchain checkpoints. Kept for console use only. # note: if a task finishes with CancelledError, that # will NOT raise, and the group will keep the other tasks running # type: TaskGroup # type: Interface # type: Dict[str, Interface] # if auto_connect is set, try a different server # if auto_connect is not set, or still no main interface, retry current # suppress spurious cancellations # note: _send_http_on_proxy has its own timeout, so no timeout here: # methods used in scripts | 1.454109 | 1 |
auth.py | girish97115/anonymail | 0 | 6624513 | <reponame>girish97115/anonymail
import functools
from flask import (Blueprint, flash, g, redirect, render_template, request, session, url_for, current_app)
from werkzeug.security import check_password_hash, generate_password_hash
from werkzeug.exceptions import abort
import smtplib,time
from itsdangerous import URLSafeTimedSerializer
from anonymail.db import get_db
import datetime
now = datetime.datetime.now()
current_year = now.year
#Retrieving Gmail credentials
import secrets
bp = Blueprint('auth', __name__, url_prefix='/auth')
@bp.route('/recover',methods=('GET','POST'))
def recover():
if(request.method == 'POST'):
username=request.form['username']
db=get_db()
dest = db.execute(
'SELECT email from user where username= ?', (username,)
).fetchone()
if (dest is None):
abort(404)
else:
s = smtplib.SMTP(host='smtp.gmail.com', port=587)
s.starttls()
sender = secrets.EMAIL
rec = dest['email']
s.login(secrets.EMAIL, secrets.PASSWORD)
password_reset_serializer = URLSafeTimedSerializer(current_app.config['SECRET_KEY'])
password_reset_url = request.host+'/auth/reset/'+password_reset_serializer.dumps(rec, salt='password-reset-salt')
SUBJECT = 'Password Recovery - Anonymail'
TEXT = 'Hello {}, \n\nPlease click the link below to reset your password. \n{} \n\nRegards,\nAnonymail'.format(username,password_reset_url)
message = 'Subject: {}\n\n{}'.format(SUBJECT, TEXT)
s.sendmail(sender,rec,message)
return render_template('auth/mailsent.html',user=username)
else:
try:
if (session['user_id']):
return redirect(url_for('index'))
except:
return render_template('auth/recover.html', year = current_year)
@bp.route('/register', methods=('GET', 'POST'))
def register():
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
name = request.form['name']
email=request.form['email']
db = get_db()
error = None
if not username:
error = 'Username is required.'
elif not password:
error = 'Password is required.'
elif db.execute(
'SELECT id FROM user WHERE username = ?', (username,)
).fetchone() is not None:
error = 'User {} is already registered.'.format(username)
if error is None:
db.execute(
'INSERT INTO user (username, password, name, email) VALUES (?, ?, ?, ?)',
(username, generate_password_hash(password), name, email)
)
db.commit()
return redirect(url_for('auth.login'))
flash(error)
try:
if(session['user_id']):
return redirect(url_for('index'))
except:
return render_template('auth/register.html',year= current_year)
@bp.route('/login', methods=('GET', 'POST'))
def login():
if request.method == 'POST':
try:
if (session['user_id']):
return redirect(url_for('index'))
except:
username = request.form['username']
password = request.form['password']
db = get_db()
error = None
user = db.execute(
'SELECT * FROM user WHERE username = ?', (username,)
).fetchone()
if user is None:
error = 'Incorrect username.'
elif not check_password_hash(user['password'], password):
error = 'Incorrect password.'
if error is None:
session.clear()
session['user_id'] = user['id']
return redirect(url_for('index'))
flash(error)
try:
if(session['user_id']):
return redirect(url_for('index'))
except:
return render_template('auth/login.html', year = current_year)
@bp.before_app_request
def load_logged_in_user():
user_id = session.get('user_id')
if user_id is None:
g.user = None
else:
g.user = get_db().execute(
'SELECT * FROM user WHERE id = ?', (user_id,)
).fetchone()
@bp.route('/logout')
def logout():
session.clear()
return redirect(url_for('index'))
def login_required(view):
@functools.wraps(view)
def wrapped_view(**kwargs):
if g.user is None:
return redirect(url_for('auth.login'))
return view(**kwargs)
return wrapped_view
@bp.route('/reset/<string:token>',methods=('GET','POST'))
def reset(token):
if(request.method=='POST'):
try:
password_reset_serializer = URLSafeTimedSerializer(current_app.config['SECRET_KEY'])
email = password_reset_serializer.loads(token, salt='password-reset-salt', max_age=3600)
if(email is None):
abort(403)
password=request.form['password']
repassword=request.form['repassword']
if(password == repassword):
db=get_db()
db.execute('UPDATE user set password = ? where email = ?',(generate_password_hash(password),email))
db.commit()
return redirect(url_for('auth.login'))
else:
flash('Both passwords are not same')
return render_template('auth/reset.html')
except Exception as e:
abort(403,str(e))
else:
return render_template('auth/reset.html', year =current_year) | import functools
from flask import (Blueprint, flash, g, redirect, render_template, request, session, url_for, current_app)
from werkzeug.security import check_password_hash, generate_password_hash
from werkzeug.exceptions import abort
import smtplib,time
from itsdangerous import URLSafeTimedSerializer
from anonymail.db import get_db
import datetime
now = datetime.datetime.now()
current_year = now.year
#Retrieving Gmail credentials
import secrets
bp = Blueprint('auth', __name__, url_prefix='/auth')
@bp.route('/recover',methods=('GET','POST'))
def recover():
if(request.method == 'POST'):
username=request.form['username']
db=get_db()
dest = db.execute(
'SELECT email from user where username= ?', (username,)
).fetchone()
if (dest is None):
abort(404)
else:
s = smtplib.SMTP(host='smtp.gmail.com', port=587)
s.starttls()
sender = secrets.EMAIL
rec = dest['email']
s.login(secrets.EMAIL, secrets.PASSWORD)
password_reset_serializer = URLSafeTimedSerializer(current_app.config['SECRET_KEY'])
password_reset_url = request.host+'/auth/reset/'+password_reset_serializer.dumps(rec, salt='password-reset-salt')
SUBJECT = 'Password Recovery - Anonymail'
TEXT = 'Hello {}, \n\nPlease click the link below to reset your password. \n{} \n\nRegards,\nAnonymail'.format(username,password_reset_url)
message = 'Subject: {}\n\n{}'.format(SUBJECT, TEXT)
s.sendmail(sender,rec,message)
return render_template('auth/mailsent.html',user=username)
else:
try:
if (session['user_id']):
return redirect(url_for('index'))
except:
return render_template('auth/recover.html', year = current_year)
@bp.route('/register', methods=('GET', 'POST'))
def register():
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
name = request.form['name']
email=request.form['email']
db = get_db()
error = None
if not username:
error = 'Username is required.'
elif not password:
error = 'Password is required.'
elif db.execute(
'SELECT id FROM user WHERE username = ?', (username,)
).fetchone() is not None:
error = 'User {} is already registered.'.format(username)
if error is None:
db.execute(
'INSERT INTO user (username, password, name, email) VALUES (?, ?, ?, ?)',
(username, generate_password_hash(password), name, email)
)
db.commit()
return redirect(url_for('auth.login'))
flash(error)
try:
if(session['user_id']):
return redirect(url_for('index'))
except:
return render_template('auth/register.html',year= current_year)
@bp.route('/login', methods=('GET', 'POST'))
def login():
if request.method == 'POST':
try:
if (session['user_id']):
return redirect(url_for('index'))
except:
username = request.form['username']
password = request.form['password']
db = get_db()
error = None
user = db.execute(
'SELECT * FROM user WHERE username = ?', (username,)
).fetchone()
if user is None:
error = 'Incorrect username.'
elif not check_password_hash(user['password'], password):
error = 'Incorrect password.'
if error is None:
session.clear()
session['user_id'] = user['id']
return redirect(url_for('index'))
flash(error)
try:
if(session['user_id']):
return redirect(url_for('index'))
except:
return render_template('auth/login.html', year = current_year)
@bp.before_app_request
def load_logged_in_user():
user_id = session.get('user_id')
if user_id is None:
g.user = None
else:
g.user = get_db().execute(
'SELECT * FROM user WHERE id = ?', (user_id,)
).fetchone()
@bp.route('/logout')
def logout():
session.clear()
return redirect(url_for('index'))
def login_required(view):
@functools.wraps(view)
def wrapped_view(**kwargs):
if g.user is None:
return redirect(url_for('auth.login'))
return view(**kwargs)
return wrapped_view
@bp.route('/reset/<string:token>',methods=('GET','POST'))
def reset(token):
if(request.method=='POST'):
try:
password_reset_serializer = URLSafeTimedSerializer(current_app.config['SECRET_KEY'])
email = password_reset_serializer.loads(token, salt='password-reset-salt', max_age=3600)
if(email is None):
abort(403)
password=request.form['password']
repassword=request.form['repassword']
if(password == repassword):
db=get_db()
db.execute('UPDATE user set password = ? where email = ?',(generate_password_hash(password),email))
db.commit()
return redirect(url_for('auth.login'))
else:
flash('Both passwords are not same')
return render_template('auth/reset.html')
except Exception as e:
abort(403,str(e))
else:
return render_template('auth/reset.html', year =current_year) | en | 0.532095 | #Retrieving Gmail credentials | 2.220839 | 2 |
phyton/Exercicio021_Musica_ pygame.py | felipebaloneker/Practice | 0 | 6624514 | <reponame>felipebaloneker/Practice
import pygame
pygame.init()
pygame.mixer.init()
pygame.mixer.music.load('song021.mp3')
pygame.mixer.music.play()
pygame.event.wait()
| import pygame
pygame.init()
pygame.mixer.init()
pygame.mixer.music.load('song021.mp3')
pygame.mixer.music.play()
pygame.event.wait() | none | 1 | 2.179234 | 2 | |
code/dependency/prepare_norm_data.py | adapt-sjtu/commonsense-locatednear | 1 | 6624515 | import os
import networkx as nx
import sys
from pycorenlp import StanfordCoreNLP
from pprint import pprint
import json
subdir = sys.argv[1]
port = sys.argv[2]
nlp = StanfordCoreNLP('http://localhost:{0}'.format(port))
def get_vocab_dic_split(filename):
with open(filename) as inf:
vocab = {}
words = [line.strip().split() for line in inf]
for index, word in enumerate(words):
for w in word:
vocab[w] = index
return vocab
def get_vocab_dic(filename):
with open(filename) as inf:
vocab = {}
words = [line.strip() for line in inf]
for index, word in enumerate(words):
vocab[word] = index
return vocab
def get_stanford_annotations(text, port=9000,
annotators='tokenize,ssplit,pos,lemma,depparse,parse'):
output = nlp.annotate(text, properties={
"timeout": "10000",
"ssplit.isOneSentence": "true",
'annotators': annotators,
})
return output
adv_dic = get_vocab_dic('adverbs.txt')
prep1_dic = get_vocab_dic_split('prep-1.txt')
prep2_dic = get_vocab_dic('prep-2.txt')
DIR_NAME = os.path.join('/home/frank/relation/dataset/gutenberg/pair_sentences', subdir)
for filename in os.listdir(DIR_NAME):
if filename.endswith('.txt'):
file = os.path.join(DIR_NAME, filename)
with open(file, encoding='utf-8') as in_file, \
open(os.path.splitext(file)[0] + '.norm_termab', 'w', encoding='utf-8') as norm_termab_file, \
open(os.path.splitext(file)[0] + '.pos', 'w', encoding='utf-8') as pos_file, \
open(os.path.splitext(file)[0] + '.lemma', 'w', encoding='utf-8') as lemma_file:
print(filename)
termab = filename.split('.txt')[0]
token1 = termab.split('_')[0]
token2 = termab.split("_")[1]
for line in in_file:
ls = line.strip().split('\t')
sent_id = ls[0]
document = ls[1].strip()
# The code expects the document to contains exactly one sentence.
# document = 'The men, crowded upon each other, stared stupidly like a flock of sheep.'
# print('document: {0}'.format(document))
# Parse the text
annotations = get_stanford_annotations(document, port=9000,
annotators='tokenize,ssplit,pos,lemma,depparse')
annotations = json.loads(annotations, encoding="utf-8", strict=False)
tokens = annotations['sentences'][0]['tokens']
# Load Stanford CoreNLP's dependency tree into a networkx graph
edges = []
dependencies = {}
root_index = annotations['sentences'][0]['basic-dependencies'][0]["dependent"]
for edge in annotations['sentences'][0]['basic-dependencies']:
edges.append((edge['governor'], edge['dependent']))
dependencies[(edge['governor'], edge['dependent'])] = edge
graph = nx.DiGraph(edges)
# Find the shortest path
# print(token1)
# print(token2)
token_list = [token['originalText'].lower() for token in tokens]
pos_list = [token['pos'] for token in tokens]
lemma_list = [token['lemma'].lower() for token in tokens]
if token1 in lemma_list and token2 in lemma_list:
lemma_file.write(' '.join(lemma_list) + '\n')
# self-designed sentence normalization
# norm changed termab
sent_norm = [pos[:2] for pos in pos_list]
pos_file.write(' '.join(sent_norm) + '\n')
changed = [False] * len(pos_list)
changeda = False
changedb = False
for i, pos in enumerate(pos_list):
# change terma and termb
if lemma_list[i] == token1 and not changeda:
sent_norm[i] = 'terma'
changeda = True
changed[i] = True
elif lemma_list[i] == token2 and not changedb:
sent_norm[i] = 'termb'
changedb = True
changed[i] = True
# change verb and nsubj and dobj back
elif pos.startswith('V') and not changed[i]:
verb = lemma_list[i]
sent_norm[i] = verb
changed[i] = True
for child in graph.successors(i + 1):
if dependencies[(i + 1, child)]['dep'] == "nsubj" and not changed[child - 1]:
sent_norm[child - 1] = verb + 'xxs'
changed[child - 1] = True
elif dependencies[(i + 1, child)]['dep'] == "dobj" and not changed[child - 1]:
sent_norm[child - 1] = verb + 'xxo'
changed[child - 1] = True
# change prep and its father back
elif pos.startswith('IN') and not changed[i] and lemma_list[i] in prep1_dic:
prep = lemma_list[i]
sent_norm[i] = prep
changed[i] = True
father = graph.predecessors(i + 1)[0]
if dependencies[(father, i + 1)]['dep'] == 'case' and not changed[father - 1]:
sent_norm[father - 1] = prep + 'xxc'
changed[father - 1] = True
# change adv and its father back
elif pos.startswith('RB') and not changed[i] and lemma_list[i] in adv_dic:
adv = lemma_list[i]
sent_norm[i] = adv
changed[i] = True
# father = graph.predecessors(i + 1)[0]
# sent_norm[father - 1] = adv + '#predecessor'
# changed[father - 1] = True
norm_termab_file.write(' '.join(sent_norm) + '\n')
| import os
import networkx as nx
import sys
from pycorenlp import StanfordCoreNLP
from pprint import pprint
import json
subdir = sys.argv[1]
port = sys.argv[2]
nlp = StanfordCoreNLP('http://localhost:{0}'.format(port))
def get_vocab_dic_split(filename):
with open(filename) as inf:
vocab = {}
words = [line.strip().split() for line in inf]
for index, word in enumerate(words):
for w in word:
vocab[w] = index
return vocab
def get_vocab_dic(filename):
with open(filename) as inf:
vocab = {}
words = [line.strip() for line in inf]
for index, word in enumerate(words):
vocab[word] = index
return vocab
def get_stanford_annotations(text, port=9000,
annotators='tokenize,ssplit,pos,lemma,depparse,parse'):
output = nlp.annotate(text, properties={
"timeout": "10000",
"ssplit.isOneSentence": "true",
'annotators': annotators,
})
return output
adv_dic = get_vocab_dic('adverbs.txt')
prep1_dic = get_vocab_dic_split('prep-1.txt')
prep2_dic = get_vocab_dic('prep-2.txt')
DIR_NAME = os.path.join('/home/frank/relation/dataset/gutenberg/pair_sentences', subdir)
for filename in os.listdir(DIR_NAME):
if filename.endswith('.txt'):
file = os.path.join(DIR_NAME, filename)
with open(file, encoding='utf-8') as in_file, \
open(os.path.splitext(file)[0] + '.norm_termab', 'w', encoding='utf-8') as norm_termab_file, \
open(os.path.splitext(file)[0] + '.pos', 'w', encoding='utf-8') as pos_file, \
open(os.path.splitext(file)[0] + '.lemma', 'w', encoding='utf-8') as lemma_file:
print(filename)
termab = filename.split('.txt')[0]
token1 = termab.split('_')[0]
token2 = termab.split("_")[1]
for line in in_file:
ls = line.strip().split('\t')
sent_id = ls[0]
document = ls[1].strip()
# The code expects the document to contains exactly one sentence.
# document = 'The men, crowded upon each other, stared stupidly like a flock of sheep.'
# print('document: {0}'.format(document))
# Parse the text
annotations = get_stanford_annotations(document, port=9000,
annotators='tokenize,ssplit,pos,lemma,depparse')
annotations = json.loads(annotations, encoding="utf-8", strict=False)
tokens = annotations['sentences'][0]['tokens']
# Load Stanford CoreNLP's dependency tree into a networkx graph
edges = []
dependencies = {}
root_index = annotations['sentences'][0]['basic-dependencies'][0]["dependent"]
for edge in annotations['sentences'][0]['basic-dependencies']:
edges.append((edge['governor'], edge['dependent']))
dependencies[(edge['governor'], edge['dependent'])] = edge
graph = nx.DiGraph(edges)
# Find the shortest path
# print(token1)
# print(token2)
token_list = [token['originalText'].lower() for token in tokens]
pos_list = [token['pos'] for token in tokens]
lemma_list = [token['lemma'].lower() for token in tokens]
if token1 in lemma_list and token2 in lemma_list:
lemma_file.write(' '.join(lemma_list) + '\n')
# self-designed sentence normalization
# norm changed termab
sent_norm = [pos[:2] for pos in pos_list]
pos_file.write(' '.join(sent_norm) + '\n')
changed = [False] * len(pos_list)
changeda = False
changedb = False
for i, pos in enumerate(pos_list):
# change terma and termb
if lemma_list[i] == token1 and not changeda:
sent_norm[i] = 'terma'
changeda = True
changed[i] = True
elif lemma_list[i] == token2 and not changedb:
sent_norm[i] = 'termb'
changedb = True
changed[i] = True
# change verb and nsubj and dobj back
elif pos.startswith('V') and not changed[i]:
verb = lemma_list[i]
sent_norm[i] = verb
changed[i] = True
for child in graph.successors(i + 1):
if dependencies[(i + 1, child)]['dep'] == "nsubj" and not changed[child - 1]:
sent_norm[child - 1] = verb + 'xxs'
changed[child - 1] = True
elif dependencies[(i + 1, child)]['dep'] == "dobj" and not changed[child - 1]:
sent_norm[child - 1] = verb + 'xxo'
changed[child - 1] = True
# change prep and its father back
elif pos.startswith('IN') and not changed[i] and lemma_list[i] in prep1_dic:
prep = lemma_list[i]
sent_norm[i] = prep
changed[i] = True
father = graph.predecessors(i + 1)[0]
if dependencies[(father, i + 1)]['dep'] == 'case' and not changed[father - 1]:
sent_norm[father - 1] = prep + 'xxc'
changed[father - 1] = True
# change adv and its father back
elif pos.startswith('RB') and not changed[i] and lemma_list[i] in adv_dic:
adv = lemma_list[i]
sent_norm[i] = adv
changed[i] = True
# father = graph.predecessors(i + 1)[0]
# sent_norm[father - 1] = adv + '#predecessor'
# changed[father - 1] = True
norm_termab_file.write(' '.join(sent_norm) + '\n')
| en | 0.859561 | # The code expects the document to contains exactly one sentence. # document = 'The men, crowded upon each other, stared stupidly like a flock of sheep.' # print('document: {0}'.format(document)) # Parse the text # Load Stanford CoreNLP's dependency tree into a networkx graph # Find the shortest path # print(token1) # print(token2) # self-designed sentence normalization # norm changed termab # change terma and termb # change verb and nsubj and dobj back # change prep and its father back # change adv and its father back # father = graph.predecessors(i + 1)[0] # sent_norm[father - 1] = adv + '#predecessor' # changed[father - 1] = True | 2.608255 | 3 |
preprocessing_add_features.py | mr-cloud/tollgate | 0 | 6624516 | import numpy as np
import os
import time
import pandas as pd
holidays = {
'moon': {
'begin': time.strptime('2016-09-15', '%Y-%m-%d'),
'end': time.strptime('2016-09-17 23:59:59', '%Y-%m-%d %H:%M:%S')
},
'national': {
'begin': time.strptime('2016-10-01', '%Y-%m-%d'),
'end': time.strptime('2016-10-07 23:59:59', '%Y-%m-%d %H:%M:%S')
}
}
routes_df = pd.read_csv('dataSets/training/routes (table 4).csv')
routes = dict()
for i in range(routes_df.shape[0]):
key = str(routes_df.loc[i, 'intersection_id']) + ',' + str(routes_df.loc[i, 'tollgate_id'])
val = str(routes_df.loc[i, 'link_seq']).split(',')
routes[key] = val
links_df = pd.read_csv('dataSets/training/links (table 3).csv')
links = dict()
for i in range(links_df.shape[0]):
key = str(links_df.loc[i, 'link_id'])
val = [float(links_df.loc[i, 'length']), float(links_df.loc[i, 'width']), float(links_df.loc[i, 'lanes'])]
links[key] = val
travel_features_ndim = 9
volume_features_ndim = 10
def get_weather(in_filename):
weather_file_postfix = os.path.basename(in_filename).split('_')[0]
weather_filename = os.path.join(
os.path.abspath(os.path.join(in_filename, os.pardir)),
'weather (table 7)_' + weather_file_postfix + '.csv')
weather_df = pd.read_csv(weather_filename)
weather = dict()
for i in range(weather_df.shape[0]):
key = str(weather_df.loc[i, 'date']) + ',' + str(weather_df.loc[i, 'hour'])
val = [float(weather_df.loc[i, 'wind_direction']), float(weather_df.loc[i, 'wind_speed']),
float(weather_df.loc[i, 'temperature']), float(weather_df.loc[i, 'precipitation'])]
weather[key] = val
return weather
def build_travel_time_examples(in_filename):
file_suffix = '_travel_time_dataset.csv'
out_prefix = in_filename.split('_')[0]
out_filename = out_prefix + file_suffix
weather = get_weather(in_filename)
travel_df = pd.read_csv(in_filename, dtype=str)
print('dataset size: ', travel_df.shape)
print('cleaning...')
travel_ds = np.ndarray(shape=[travel_df.shape[0], travel_features_ndim+1], dtype=np.float)
# features: <route_quality,
# wind_direction, wind_speed, temperature, precipitation,
# weekend, time_of_day>
for i in range(0, travel_df.shape[0]):
# calculate the route's quality.
# quality = length / (width * lanes)
link_seq = routes.get(travel_df.loc[i, 'intersection_id'] + ',' + travel_df.loc[i, 'tollgate_id'])
links_quality = []
for link_id in link_seq:
links_quality.append(links.get(link_id)[0] / (links.get(link_id)[1] * links.get(link_id)[2]))
travel_ds[i, 0] = float(str.format('%.2f' % np.sum(links_quality)))
time_window = travel_df.loc[i, 'time_window'][1: -1].split(',')
start_time = time.strptime(time_window[0], '%Y-%m-%d %H:%M:%S')
travel_ds[i, 1:5] = weather.get(time_window[0].split(' ')[0] + ',' + str(start_time.tm_hour//3 * 3))
travel_ds[i, 5] = 1 if start_time.tm_wday in [5, 6] else 0
travel_ds[i, 6] = start_time.tm_hour * 3 + start_time.tm_min // 20
travel_ds[i, 7] = 1 if (start_time >= holidays['moon']['begin'] and start_time < holidays['moon']['end'])\
or (start_time >= holidays['national']['begin'] and start_time < holidays['national']['end']) else 0
travel_ds[i, 8] = 1 if start_time >= holidays['national']['begin'] and start_time < holidays['national']['end'] else 0
travel_ds[i, 9] = float(travel_df.loc[i, 'avg_travel_time'])
dataset = pd.DataFrame(data=travel_ds, dtype=float, columns=['route_quality', 'wind_direction',
'wind_speed', 'temperature',
'precipitation', 'weekend',
'time_of_day', 'holiday',
'free', 'avg_travel_time'
])
dataset.to_csv(path_or_buf=out_filename)
print('finished.')
build_travel_time_examples('dataSets/testing-phase1/test1_20min_avg_travel_time.csv')
build_travel_time_examples('dataSets/training/training_20min_avg_travel_time.csv')
def build_volume_examples(in_filename):
file_suffix = '_volume_dataset.csv'
out_prefix = in_filename.split('_')[0]
out_filename = out_prefix + file_suffix
weather = get_weather(in_filename)
volume_df = pd.read_csv(in_filename, dtype=str)
print('dataset size: ', volume_df.shape)
print('cleaning...')
volume_ds = np.ndarray(shape=[volume_df.shape[0], volume_features_ndim+1], dtype=np.float)
# features: <tollgate_scale,
# wind_direction, wind_speed, temperature, precipitation,
# weekend, time_of_day,
# direction>
tollgate_scale = dict()
for inter_toll, link_seq in routes.items():
links_quality = [] # the shorter, the better
for link_id in link_seq:
links_quality.append(links.get(link_id)[0] / (links.get(link_id)[1] * links.get(link_id)[2]))
tollgate_id = inter_toll.split(',')[1]
if tollgate_id in tollgate_scale.keys():
tollgate_scale[tollgate_id] += np.divide(1.0, np.sum(links_quality))
else:
tollgate_scale[tollgate_id] = np.divide(1.0, np.sum(links_quality))
for i in range(volume_df.shape[0]):
# calculate the scale of tollgate
volume_ds[i, 0] = float(str.format('%.3f' % tollgate_scale.get(volume_df.loc[i, 'tollgate_id'])))
time_window = volume_df.loc[i, 'time_window'][1: -1].split(',')
start_time = time.strptime(time_window[0], '%Y-%m-%d %H:%M:%S')
volume_ds[i, 1:5] = weather.get(time_window[0].split(' ')[0] + ',' + str(start_time.tm_hour // 3 * 3))
volume_ds[i, 5] = 1 if start_time.tm_wday in [5, 6] else 0
volume_ds[i, 6] = start_time.tm_hour * 3 + start_time.tm_min // 20
volume_ds[i, 7] = float(volume_df.loc[i, 'direction'])
volume_ds[i, 8] = 1 if (start_time >= holidays['moon']['begin'] and start_time < holidays['moon']['end'])\
or (start_time >= holidays['national']['begin'] and start_time < holidays['national']['end']) else 0
volume_ds[i, 9] = 1 if start_time >= holidays['national']['begin'] and start_time < holidays['national']['end'] else 0
volume_ds[i, 10] = float(volume_df.loc[i, 'volume'])
dataset = pd.DataFrame(data=volume_ds, dtype=float, columns=['tollgate_scale', 'wind_direction',
'wind_speed', 'temperature',
'precipitation', 'weekend',
'time_of_day', 'direction',
'holiday', 'free',
'volume'
])
dataset.to_csv(path_or_buf=out_filename)
print('finished.')
build_volume_examples('dataSets/testing-phase1/test1_20min_avg_volume.csv')
build_volume_examples('dataSets/training/training_20min_avg_volume.csv')
build_travel_time_examples('dataSets/testing-phase1/submission_sample_travelTime.csv')
build_volume_examples('dataSets/testing-phase1/submission_sample_volume.csv')
| import numpy as np
import os
import time
import pandas as pd
holidays = {
'moon': {
'begin': time.strptime('2016-09-15', '%Y-%m-%d'),
'end': time.strptime('2016-09-17 23:59:59', '%Y-%m-%d %H:%M:%S')
},
'national': {
'begin': time.strptime('2016-10-01', '%Y-%m-%d'),
'end': time.strptime('2016-10-07 23:59:59', '%Y-%m-%d %H:%M:%S')
}
}
routes_df = pd.read_csv('dataSets/training/routes (table 4).csv')
routes = dict()
for i in range(routes_df.shape[0]):
key = str(routes_df.loc[i, 'intersection_id']) + ',' + str(routes_df.loc[i, 'tollgate_id'])
val = str(routes_df.loc[i, 'link_seq']).split(',')
routes[key] = val
links_df = pd.read_csv('dataSets/training/links (table 3).csv')
links = dict()
for i in range(links_df.shape[0]):
key = str(links_df.loc[i, 'link_id'])
val = [float(links_df.loc[i, 'length']), float(links_df.loc[i, 'width']), float(links_df.loc[i, 'lanes'])]
links[key] = val
travel_features_ndim = 9
volume_features_ndim = 10
def get_weather(in_filename):
weather_file_postfix = os.path.basename(in_filename).split('_')[0]
weather_filename = os.path.join(
os.path.abspath(os.path.join(in_filename, os.pardir)),
'weather (table 7)_' + weather_file_postfix + '.csv')
weather_df = pd.read_csv(weather_filename)
weather = dict()
for i in range(weather_df.shape[0]):
key = str(weather_df.loc[i, 'date']) + ',' + str(weather_df.loc[i, 'hour'])
val = [float(weather_df.loc[i, 'wind_direction']), float(weather_df.loc[i, 'wind_speed']),
float(weather_df.loc[i, 'temperature']), float(weather_df.loc[i, 'precipitation'])]
weather[key] = val
return weather
def build_travel_time_examples(in_filename):
file_suffix = '_travel_time_dataset.csv'
out_prefix = in_filename.split('_')[0]
out_filename = out_prefix + file_suffix
weather = get_weather(in_filename)
travel_df = pd.read_csv(in_filename, dtype=str)
print('dataset size: ', travel_df.shape)
print('cleaning...')
travel_ds = np.ndarray(shape=[travel_df.shape[0], travel_features_ndim+1], dtype=np.float)
# features: <route_quality,
# wind_direction, wind_speed, temperature, precipitation,
# weekend, time_of_day>
for i in range(0, travel_df.shape[0]):
# calculate the route's quality.
# quality = length / (width * lanes)
link_seq = routes.get(travel_df.loc[i, 'intersection_id'] + ',' + travel_df.loc[i, 'tollgate_id'])
links_quality = []
for link_id in link_seq:
links_quality.append(links.get(link_id)[0] / (links.get(link_id)[1] * links.get(link_id)[2]))
travel_ds[i, 0] = float(str.format('%.2f' % np.sum(links_quality)))
time_window = travel_df.loc[i, 'time_window'][1: -1].split(',')
start_time = time.strptime(time_window[0], '%Y-%m-%d %H:%M:%S')
travel_ds[i, 1:5] = weather.get(time_window[0].split(' ')[0] + ',' + str(start_time.tm_hour//3 * 3))
travel_ds[i, 5] = 1 if start_time.tm_wday in [5, 6] else 0
travel_ds[i, 6] = start_time.tm_hour * 3 + start_time.tm_min // 20
travel_ds[i, 7] = 1 if (start_time >= holidays['moon']['begin'] and start_time < holidays['moon']['end'])\
or (start_time >= holidays['national']['begin'] and start_time < holidays['national']['end']) else 0
travel_ds[i, 8] = 1 if start_time >= holidays['national']['begin'] and start_time < holidays['national']['end'] else 0
travel_ds[i, 9] = float(travel_df.loc[i, 'avg_travel_time'])
dataset = pd.DataFrame(data=travel_ds, dtype=float, columns=['route_quality', 'wind_direction',
'wind_speed', 'temperature',
'precipitation', 'weekend',
'time_of_day', 'holiday',
'free', 'avg_travel_time'
])
dataset.to_csv(path_or_buf=out_filename)
print('finished.')
build_travel_time_examples('dataSets/testing-phase1/test1_20min_avg_travel_time.csv')
build_travel_time_examples('dataSets/training/training_20min_avg_travel_time.csv')
def build_volume_examples(in_filename):
file_suffix = '_volume_dataset.csv'
out_prefix = in_filename.split('_')[0]
out_filename = out_prefix + file_suffix
weather = get_weather(in_filename)
volume_df = pd.read_csv(in_filename, dtype=str)
print('dataset size: ', volume_df.shape)
print('cleaning...')
volume_ds = np.ndarray(shape=[volume_df.shape[0], volume_features_ndim+1], dtype=np.float)
# features: <tollgate_scale,
# wind_direction, wind_speed, temperature, precipitation,
# weekend, time_of_day,
# direction>
tollgate_scale = dict()
for inter_toll, link_seq in routes.items():
links_quality = [] # the shorter, the better
for link_id in link_seq:
links_quality.append(links.get(link_id)[0] / (links.get(link_id)[1] * links.get(link_id)[2]))
tollgate_id = inter_toll.split(',')[1]
if tollgate_id in tollgate_scale.keys():
tollgate_scale[tollgate_id] += np.divide(1.0, np.sum(links_quality))
else:
tollgate_scale[tollgate_id] = np.divide(1.0, np.sum(links_quality))
for i in range(volume_df.shape[0]):
# calculate the scale of tollgate
volume_ds[i, 0] = float(str.format('%.3f' % tollgate_scale.get(volume_df.loc[i, 'tollgate_id'])))
time_window = volume_df.loc[i, 'time_window'][1: -1].split(',')
start_time = time.strptime(time_window[0], '%Y-%m-%d %H:%M:%S')
volume_ds[i, 1:5] = weather.get(time_window[0].split(' ')[0] + ',' + str(start_time.tm_hour // 3 * 3))
volume_ds[i, 5] = 1 if start_time.tm_wday in [5, 6] else 0
volume_ds[i, 6] = start_time.tm_hour * 3 + start_time.tm_min // 20
volume_ds[i, 7] = float(volume_df.loc[i, 'direction'])
volume_ds[i, 8] = 1 if (start_time >= holidays['moon']['begin'] and start_time < holidays['moon']['end'])\
or (start_time >= holidays['national']['begin'] and start_time < holidays['national']['end']) else 0
volume_ds[i, 9] = 1 if start_time >= holidays['national']['begin'] and start_time < holidays['national']['end'] else 0
volume_ds[i, 10] = float(volume_df.loc[i, 'volume'])
dataset = pd.DataFrame(data=volume_ds, dtype=float, columns=['tollgate_scale', 'wind_direction',
'wind_speed', 'temperature',
'precipitation', 'weekend',
'time_of_day', 'direction',
'holiday', 'free',
'volume'
])
dataset.to_csv(path_or_buf=out_filename)
print('finished.')
build_volume_examples('dataSets/testing-phase1/test1_20min_avg_volume.csv')
build_volume_examples('dataSets/training/training_20min_avg_volume.csv')
build_travel_time_examples('dataSets/testing-phase1/submission_sample_travelTime.csv')
build_volume_examples('dataSets/testing-phase1/submission_sample_volume.csv')
| en | 0.781732 | # features: <route_quality, # wind_direction, wind_speed, temperature, precipitation, # weekend, time_of_day> # calculate the route's quality. # quality = length / (width * lanes) # features: <tollgate_scale, # wind_direction, wind_speed, temperature, precipitation, # weekend, time_of_day, # direction> # the shorter, the better # calculate the scale of tollgate | 2.638321 | 3 |
tccli/services/gaap/gaap_client.py | ws0416/tencentcloud-cli-intl-en | 0 | 6624517 | <reponame>ws0416/tencentcloud-cli-intl-en<filename>tccli/services/gaap/gaap_client.py
# -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.gaap.v20180529 import gaap_client as gaap_client_v20180529
from tencentcloud.gaap.v20180529 import models as models_v20180529
def doDescribeProxyGroupList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyGroupListRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyGroupList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.OpenSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCertificates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCertificatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCertificates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSecurityRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSecurityRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSecurityRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRemoveRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RemoveRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.RemoveRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeHTTPSListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeHTTPSListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeHTTPSListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateHTTPSListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateHTTPSListenerRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateHTTPSListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSetAuthentication(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SetAuthenticationRequest()
model.from_json_string(json.dumps(args))
rsp = client.SetAuthentication(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteDomainErrorPageInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteDomainErrorPageInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteDomainErrorPageInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyCertificate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyCertificateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyCertificate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDestRegions(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDestRegionsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDestRegions(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDomainErrorPageInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDomainErrorPageInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateDomainErrorPageInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyGroupStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyGroupStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyGroupStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSecurityPolicyDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityPolicyDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSecurityPolicyDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyCertificateAttributes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyCertificateAttributesRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyCertificateAttributes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.CloseProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.OpenProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyRealServerName(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyRealServerNameRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyRealServerName(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeHTTPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeHTTPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeHTTPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCheckProxyCreate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CheckProxyCreateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CheckProxyCreate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.OpenProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAccessRegions(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAccessRegionsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAccessRegions(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSecurityRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSecurityRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteSecurityRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyGroupDetails(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyGroupDetailsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyGroupDetails(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProxy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProxyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProxy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteCertificate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteCertificateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteCertificate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyHTTPListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyHTTPListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyHTTPListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxiesProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxiesProjectRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxiesProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeGroupDomainConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeGroupDomainConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeGroupDomainConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindRuleRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindRuleRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindRuleRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAccessRegionsByDestRegion(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAccessRegionsByDestRegionRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAccessRegionsByDestRegion(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyHTTPSListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyHTTPSListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyHTTPSListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyRuleAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyRuleAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyRuleAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCertificateDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCertificateDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCertificateDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CloseSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyGroupDomainConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyGroupDomainConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyGroupDomainConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTCPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTCPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTCPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDomainErrorPageInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDomainErrorPageInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDomainErrorPageInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDomainErrorPageInfoByIds(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDomainErrorPageInfoByIdsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDomainErrorPageInfoByIds(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyUDPListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyUDPListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyUDPListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyAndStatisticsListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyAndStatisticsListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyAndStatisticsListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateHTTPListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateHTTPListenerRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateHTTPListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeUDPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeUDPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeUDPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxyConfiguration(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxyConfigurationRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxyConfiguration(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.CloseProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTCPListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTCPListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyTCPListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxyGroupAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxyGroupAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxyGroupAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeGroupAndStatisticsProxy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeGroupAndStatisticsProxyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeGroupAndStatisticsProxy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRealServerStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRealServerStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRealServerStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindListenerRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindListenerRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindListenerRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProxyGroupDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProxyGroupDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProxyGroupDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateUDPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateUDPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateUDPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRegionAndPrice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRegionAndPriceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRegionAndPrice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySecurityRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySecurityRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifySecurityRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeListenerStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeListenerStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeListenerStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRuleRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRuleRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRuleRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAddRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AddRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.AddRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxiesStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxiesStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxiesStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSecurityRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSecurityRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateCertificate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateCertificateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateCertificate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTCPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTCPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTCPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxiesAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxiesAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxiesAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDestroyProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DestroyProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DestroyProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeResourcesByTag(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeResourcesByTagRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeResourcesByTag(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCountryAreaMapping(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCountryAreaMappingRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCountryAreaMapping(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeListenerRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeListenerRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeListenerRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRealServersStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRealServersStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRealServersStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRulesByRuleIds(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRulesByRuleIdsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRulesByRuleIds(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doInquiryPriceCreateProxy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.InquiryPriceCreateProxyRequest()
model.from_json_string(json.dumps(args))
rsp = client.InquiryPriceCreateProxy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20180529": gaap_client_v20180529,
}
MODELS_MAP = {
"v20180529": models_v20180529,
}
ACTION_MAP = {
"DescribeProxyGroupList": doDescribeProxyGroupList,
"OpenSecurityPolicy": doOpenSecurityPolicy,
"DescribeCertificates": doDescribeCertificates,
"CreateSecurityRules": doCreateSecurityRules,
"RemoveRealServers": doRemoveRealServers,
"DescribeHTTPSListeners": doDescribeHTTPSListeners,
"CreateHTTPSListener": doCreateHTTPSListener,
"DeleteSecurityPolicy": doDeleteSecurityPolicy,
"SetAuthentication": doSetAuthentication,
"DeleteRule": doDeleteRule,
"DeleteDomainErrorPageInfo": doDeleteDomainErrorPageInfo,
"ModifyCertificate": doModifyCertificate,
"DescribeProxyStatistics": doDescribeProxyStatistics,
"CreateRule": doCreateRule,
"DescribeDestRegions": doDescribeDestRegions,
"CreateDomainErrorPageInfo": doCreateDomainErrorPageInfo,
"DescribeProxyGroupStatistics": doDescribeProxyGroupStatistics,
"DescribeSecurityPolicyDetail": doDescribeSecurityPolicyDetail,
"ModifyDomain": doModifyDomain,
"ModifyCertificateAttributes": doModifyCertificateAttributes,
"CloseProxies": doCloseProxies,
"OpenProxies": doOpenProxies,
"ModifyRealServerName": doModifyRealServerName,
"DescribeHTTPListeners": doDescribeHTTPListeners,
"CheckProxyCreate": doCheckProxyCreate,
"OpenProxyGroup": doOpenProxyGroup,
"DescribeAccessRegions": doDescribeAccessRegions,
"DeleteSecurityRules": doDeleteSecurityRules,
"DescribeProxyGroupDetails": doDescribeProxyGroupDetails,
"CreateProxy": doCreateProxy,
"DeleteCertificate": doDeleteCertificate,
"CreateSecurityPolicy": doCreateSecurityPolicy,
"DescribeProxies": doDescribeProxies,
"ModifyHTTPListenerAttribute": doModifyHTTPListenerAttribute,
"ModifyProxiesProject": doModifyProxiesProject,
"DescribeGroupDomainConfig": doDescribeGroupDomainConfig,
"BindRuleRealServers": doBindRuleRealServers,
"DeleteProxyGroup": doDeleteProxyGroup,
"DescribeAccessRegionsByDestRegion": doDescribeAccessRegionsByDestRegion,
"ModifyHTTPSListenerAttribute": doModifyHTTPSListenerAttribute,
"CreateDomain": doCreateDomain,
"ModifyRuleAttribute": doModifyRuleAttribute,
"DescribeCertificateDetail": doDescribeCertificateDetail,
"CloseSecurityPolicy": doCloseSecurityPolicy,
"ModifyGroupDomainConfig": doModifyGroupDomainConfig,
"DescribeTCPListeners": doDescribeTCPListeners,
"DescribeDomainErrorPageInfo": doDescribeDomainErrorPageInfo,
"DescribeRealServers": doDescribeRealServers,
"DescribeDomainErrorPageInfoByIds": doDescribeDomainErrorPageInfoByIds,
"ModifyUDPListenerAttribute": doModifyUDPListenerAttribute,
"DescribeProxyAndStatisticsListeners": doDescribeProxyAndStatisticsListeners,
"CreateHTTPListener": doCreateHTTPListener,
"DescribeUDPListeners": doDescribeUDPListeners,
"ModifyProxyConfiguration": doModifyProxyConfiguration,
"CloseProxyGroup": doCloseProxyGroup,
"ModifyTCPListenerAttribute": doModifyTCPListenerAttribute,
"ModifyProxyGroupAttribute": doModifyProxyGroupAttribute,
"DescribeGroupAndStatisticsProxy": doDescribeGroupAndStatisticsProxy,
"DescribeRealServerStatistics": doDescribeRealServerStatistics,
"BindListenerRealServers": doBindListenerRealServers,
"CreateProxyGroupDomain": doCreateProxyGroupDomain,
"CreateProxyGroup": doCreateProxyGroup,
"CreateUDPListeners": doCreateUDPListeners,
"DescribeRegionAndPrice": doDescribeRegionAndPrice,
"ModifySecurityRule": doModifySecurityRule,
"DescribeProxyDetail": doDescribeProxyDetail,
"DescribeListenerStatistics": doDescribeListenerStatistics,
"DescribeRuleRealServers": doDescribeRuleRealServers,
"AddRealServers": doAddRealServers,
"DescribeProxiesStatus": doDescribeProxiesStatus,
"DescribeSecurityRules": doDescribeSecurityRules,
"DeleteDomain": doDeleteDomain,
"CreateCertificate": doCreateCertificate,
"CreateTCPListeners": doCreateTCPListeners,
"DescribeRules": doDescribeRules,
"ModifyProxiesAttribute": doModifyProxiesAttribute,
"DestroyProxies": doDestroyProxies,
"DescribeResourcesByTag": doDescribeResourcesByTag,
"DescribeCountryAreaMapping": doDescribeCountryAreaMapping,
"DescribeListenerRealServers": doDescribeListenerRealServers,
"DeleteListeners": doDeleteListeners,
"DescribeRealServersStatus": doDescribeRealServersStatus,
"DescribeRulesByRuleIds": doDescribeRulesByRuleIds,
"InquiryPriceCreateProxy": doInquiryPriceCreateProxy,
}
AVAILABLE_VERSION_LIST = [
"v20180529",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["gaap"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["gaap"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| # -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.gaap.v20180529 import gaap_client as gaap_client_v20180529
from tencentcloud.gaap.v20180529 import models as models_v20180529
def doDescribeProxyGroupList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyGroupListRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyGroupList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.OpenSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCertificates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCertificatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCertificates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSecurityRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSecurityRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSecurityRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRemoveRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RemoveRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.RemoveRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeHTTPSListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeHTTPSListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeHTTPSListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateHTTPSListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateHTTPSListenerRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateHTTPSListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSetAuthentication(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SetAuthenticationRequest()
model.from_json_string(json.dumps(args))
rsp = client.SetAuthentication(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteDomainErrorPageInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteDomainErrorPageInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteDomainErrorPageInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyCertificate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyCertificateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyCertificate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDestRegions(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDestRegionsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDestRegions(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDomainErrorPageInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDomainErrorPageInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateDomainErrorPageInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyGroupStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyGroupStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyGroupStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSecurityPolicyDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityPolicyDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSecurityPolicyDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyCertificateAttributes(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyCertificateAttributesRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyCertificateAttributes(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.CloseProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.OpenProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyRealServerName(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyRealServerNameRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyRealServerName(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeHTTPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeHTTPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeHTTPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCheckProxyCreate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CheckProxyCreateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CheckProxyCreate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.OpenProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAccessRegions(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAccessRegionsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAccessRegions(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSecurityRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSecurityRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteSecurityRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyGroupDetails(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyGroupDetailsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyGroupDetails(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProxy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProxyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProxy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteCertificate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteCertificateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteCertificate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyHTTPListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyHTTPListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyHTTPListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxiesProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxiesProjectRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxiesProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeGroupDomainConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeGroupDomainConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeGroupDomainConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindRuleRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindRuleRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindRuleRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAccessRegionsByDestRegion(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAccessRegionsByDestRegionRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAccessRegionsByDestRegion(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyHTTPSListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyHTTPSListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyHTTPSListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyRuleAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyRuleAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyRuleAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCertificateDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCertificateDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCertificateDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseSecurityPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseSecurityPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CloseSecurityPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyGroupDomainConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyGroupDomainConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyGroupDomainConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTCPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTCPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTCPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDomainErrorPageInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDomainErrorPageInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDomainErrorPageInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDomainErrorPageInfoByIds(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDomainErrorPageInfoByIdsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDomainErrorPageInfoByIds(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyUDPListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyUDPListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyUDPListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyAndStatisticsListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyAndStatisticsListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyAndStatisticsListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateHTTPListener(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateHTTPListenerRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateHTTPListener(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeUDPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeUDPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeUDPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxyConfiguration(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxyConfigurationRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxyConfiguration(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.CloseProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTCPListenerAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTCPListenerAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyTCPListenerAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxyGroupAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxyGroupAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxyGroupAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeGroupAndStatisticsProxy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeGroupAndStatisticsProxyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeGroupAndStatisticsProxy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRealServerStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRealServerStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRealServerStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindListenerRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindListenerRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindListenerRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProxyGroupDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProxyGroupDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProxyGroupDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProxyGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProxyGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProxyGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateUDPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateUDPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateUDPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRegionAndPrice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRegionAndPriceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRegionAndPrice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySecurityRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySecurityRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifySecurityRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxyDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxyDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxyDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeListenerStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeListenerStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeListenerStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRuleRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRuleRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRuleRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAddRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AddRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.AddRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProxiesStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProxiesStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProxiesStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSecurityRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSecurityRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSecurityRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateCertificate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateCertificateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateCertificate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTCPListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTCPListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTCPListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRules(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRulesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRules(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProxiesAttribute(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProxiesAttributeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProxiesAttribute(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDestroyProxies(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DestroyProxiesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DestroyProxies(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeResourcesByTag(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeResourcesByTagRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeResourcesByTag(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCountryAreaMapping(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCountryAreaMappingRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCountryAreaMapping(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeListenerRealServers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeListenerRealServersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeListenerRealServers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteListeners(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteListenersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteListeners(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRealServersStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRealServersStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRealServersStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRulesByRuleIds(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRulesByRuleIdsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRulesByRuleIds(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doInquiryPriceCreateProxy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.GaapClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.InquiryPriceCreateProxyRequest()
model.from_json_string(json.dumps(args))
rsp = client.InquiryPriceCreateProxy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20180529": gaap_client_v20180529,
}
MODELS_MAP = {
"v20180529": models_v20180529,
}
ACTION_MAP = {
"DescribeProxyGroupList": doDescribeProxyGroupList,
"OpenSecurityPolicy": doOpenSecurityPolicy,
"DescribeCertificates": doDescribeCertificates,
"CreateSecurityRules": doCreateSecurityRules,
"RemoveRealServers": doRemoveRealServers,
"DescribeHTTPSListeners": doDescribeHTTPSListeners,
"CreateHTTPSListener": doCreateHTTPSListener,
"DeleteSecurityPolicy": doDeleteSecurityPolicy,
"SetAuthentication": doSetAuthentication,
"DeleteRule": doDeleteRule,
"DeleteDomainErrorPageInfo": doDeleteDomainErrorPageInfo,
"ModifyCertificate": doModifyCertificate,
"DescribeProxyStatistics": doDescribeProxyStatistics,
"CreateRule": doCreateRule,
"DescribeDestRegions": doDescribeDestRegions,
"CreateDomainErrorPageInfo": doCreateDomainErrorPageInfo,
"DescribeProxyGroupStatistics": doDescribeProxyGroupStatistics,
"DescribeSecurityPolicyDetail": doDescribeSecurityPolicyDetail,
"ModifyDomain": doModifyDomain,
"ModifyCertificateAttributes": doModifyCertificateAttributes,
"CloseProxies": doCloseProxies,
"OpenProxies": doOpenProxies,
"ModifyRealServerName": doModifyRealServerName,
"DescribeHTTPListeners": doDescribeHTTPListeners,
"CheckProxyCreate": doCheckProxyCreate,
"OpenProxyGroup": doOpenProxyGroup,
"DescribeAccessRegions": doDescribeAccessRegions,
"DeleteSecurityRules": doDeleteSecurityRules,
"DescribeProxyGroupDetails": doDescribeProxyGroupDetails,
"CreateProxy": doCreateProxy,
"DeleteCertificate": doDeleteCertificate,
"CreateSecurityPolicy": doCreateSecurityPolicy,
"DescribeProxies": doDescribeProxies,
"ModifyHTTPListenerAttribute": doModifyHTTPListenerAttribute,
"ModifyProxiesProject": doModifyProxiesProject,
"DescribeGroupDomainConfig": doDescribeGroupDomainConfig,
"BindRuleRealServers": doBindRuleRealServers,
"DeleteProxyGroup": doDeleteProxyGroup,
"DescribeAccessRegionsByDestRegion": doDescribeAccessRegionsByDestRegion,
"ModifyHTTPSListenerAttribute": doModifyHTTPSListenerAttribute,
"CreateDomain": doCreateDomain,
"ModifyRuleAttribute": doModifyRuleAttribute,
"DescribeCertificateDetail": doDescribeCertificateDetail,
"CloseSecurityPolicy": doCloseSecurityPolicy,
"ModifyGroupDomainConfig": doModifyGroupDomainConfig,
"DescribeTCPListeners": doDescribeTCPListeners,
"DescribeDomainErrorPageInfo": doDescribeDomainErrorPageInfo,
"DescribeRealServers": doDescribeRealServers,
"DescribeDomainErrorPageInfoByIds": doDescribeDomainErrorPageInfoByIds,
"ModifyUDPListenerAttribute": doModifyUDPListenerAttribute,
"DescribeProxyAndStatisticsListeners": doDescribeProxyAndStatisticsListeners,
"CreateHTTPListener": doCreateHTTPListener,
"DescribeUDPListeners": doDescribeUDPListeners,
"ModifyProxyConfiguration": doModifyProxyConfiguration,
"CloseProxyGroup": doCloseProxyGroup,
"ModifyTCPListenerAttribute": doModifyTCPListenerAttribute,
"ModifyProxyGroupAttribute": doModifyProxyGroupAttribute,
"DescribeGroupAndStatisticsProxy": doDescribeGroupAndStatisticsProxy,
"DescribeRealServerStatistics": doDescribeRealServerStatistics,
"BindListenerRealServers": doBindListenerRealServers,
"CreateProxyGroupDomain": doCreateProxyGroupDomain,
"CreateProxyGroup": doCreateProxyGroup,
"CreateUDPListeners": doCreateUDPListeners,
"DescribeRegionAndPrice": doDescribeRegionAndPrice,
"ModifySecurityRule": doModifySecurityRule,
"DescribeProxyDetail": doDescribeProxyDetail,
"DescribeListenerStatistics": doDescribeListenerStatistics,
"DescribeRuleRealServers": doDescribeRuleRealServers,
"AddRealServers": doAddRealServers,
"DescribeProxiesStatus": doDescribeProxiesStatus,
"DescribeSecurityRules": doDescribeSecurityRules,
"DeleteDomain": doDeleteDomain,
"CreateCertificate": doCreateCertificate,
"CreateTCPListeners": doCreateTCPListeners,
"DescribeRules": doDescribeRules,
"ModifyProxiesAttribute": doModifyProxiesAttribute,
"DestroyProxies": doDestroyProxies,
"DescribeResourcesByTag": doDescribeResourcesByTag,
"DescribeCountryAreaMapping": doDescribeCountryAreaMapping,
"DescribeListenerRealServers": doDescribeListenerRealServers,
"DeleteListeners": doDeleteListeners,
"DescribeRealServersStatus": doDescribeRealServersStatus,
"DescribeRulesByRuleIds": doDescribeRulesByRuleIds,
"InquiryPriceCreateProxy": doInquiryPriceCreateProxy,
}
AVAILABLE_VERSION_LIST = [
"v20180529",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["gaap"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["gaap"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param | en | 0.62608 | # -*- coding: utf-8 -*- # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 # python3.3 | 1.749657 | 2 |
server/models/event_category.py | Synergize-Southwest-Detroit/api | 0 | 6624518 | <gh_stars>0
from server.models.db import db
class EventCategory(db.Model):
__tablename__ = 'event_categories'
category_id = db.Column('category_id', db.Integer, db.ForeignKey('categories.id'), primary_key=True)
event_id = db.Column('event_id', db.Integer, db.ForeignKey('events.id'), primary_key=True)
event = db.relationship('Event', backref=db.backref('event_category'))
category = db.relationship('Category', backref=db.backref('event_category'))
def __init__(self, event=None, category=None):
self.event = event
self.category = category | from server.models.db import db
class EventCategory(db.Model):
__tablename__ = 'event_categories'
category_id = db.Column('category_id', db.Integer, db.ForeignKey('categories.id'), primary_key=True)
event_id = db.Column('event_id', db.Integer, db.ForeignKey('events.id'), primary_key=True)
event = db.relationship('Event', backref=db.backref('event_category'))
category = db.relationship('Category', backref=db.backref('event_category'))
def __init__(self, event=None, category=None):
self.event = event
self.category = category | none | 1 | 2.435905 | 2 | |
nova/tests/api/ec2/test_cloud.py | SnabbCo/nova | 2 | 6624519 | # Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import copy
import datetime
import functools
import os
import string
import tempfile
import fixtures
import iso8601
import mock
from oslo.config import cfg
from nova.api.ec2 import cloud
from nova.api.ec2 import ec2utils
from nova.api.ec2 import inst_state
from nova.api.metadata import password
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import power_state
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova import context
from nova import db
from nova import exception
from nova.image import s3
from nova.network import api as network_api
from nova.network import base_api as base_network_api
from nova.network import model
from nova.network import neutronv2
from nova import objects
from nova.objects import base as obj_base
from nova.openstack.common import log as logging
from nova.openstack.common import policy as common_policy
from nova.openstack.common import timeutils
from nova.openstack.common import uuidutils
from nova import policy
from nova import test
from nova.tests.api.openstack.compute.contrib import (
test_neutron_security_groups as test_neutron)
from nova.tests import cast_as_call
from nova.tests import fake_block_device
from nova.tests import fake_network
from nova.tests import fake_notifier
from nova.tests import fake_utils
from nova.tests.image import fake
from nova.tests import matchers
from nova import utils
from nova.virt import fake as fake_virt
from nova import volume
CONF = cfg.CONF
CONF.import_opt('compute_driver', 'nova.virt.driver')
CONF.import_opt('default_flavor', 'nova.compute.flavors')
CONF.import_opt('use_ipv6', 'nova.netconf')
LOG = logging.getLogger(__name__)
HOST = "testhost"
def get_fake_cache(get_floating):
def _ip(ip, fixed=True, floats=None):
ip_dict = {'address': ip, 'type': 'fixed'}
if not fixed:
ip_dict['type'] = 'floating'
if fixed and floats:
ip_dict['floating_ips'] = [_ip(f, fixed=False) for f in floats]
return ip_dict
if get_floating:
ip_info = [_ip('192.168.0.3',
floats=['1.2.3.4', '5.6.7.8']),
_ip('192.168.0.4')]
else:
ip_info = [_ip('192.168.0.3'),
_ip('192.168.0.4')]
info = [{'address': 'aa:bb:cc:dd:ee:ff',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': ip_info}]}}]
if CONF.use_ipv6:
ipv6_addr = 'fe80:b33f::a8bb:ccff:fedd:eeff'
info[0]['network']['subnets'].append({'cidr': 'fe80:b33f::/64',
'ips': [_ip(ipv6_addr)]})
return model.NetworkInfo.hydrate(info)
def get_instances_with_cached_ips(orig_func, get_floating,
*args, **kwargs):
"""Kludge the cache into instance(s) without having to create DB
entries
"""
instances = orig_func(*args, **kwargs)
if kwargs.get('want_objects', False):
info_cache = objects.InstanceInfoCache()
info_cache.network_info = get_fake_cache(get_floating)
info_cache.obj_reset_changes()
else:
info_cache = {'network_info': get_fake_cache(get_floating)}
if isinstance(instances, (list, obj_base.ObjectListBase)):
for instance in instances:
instance['info_cache'] = info_cache
else:
instances['info_cache'] = info_cache
return instances
class CloudTestCase(test.TestCase):
def setUp(self):
super(CloudTestCase, self).setUp()
self.useFixture(test.SampleNetworks())
ec2utils.reset_cache()
self.flags(compute_driver='nova.virt.fake.FakeDriver',
volume_api_class='nova.tests.fake_volume.API')
self.useFixture(fixtures.FakeLogger('boto'))
fake_utils.stub_out_utils_spawn_n(self.stubs)
def fake_show(meh, context, id, **kwargs):
return {'id': id,
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine',
'image_state': 'available'}}
def fake_detail(_self, context, **kwargs):
image = fake_show(None, context, None)
image['name'] = kwargs.get('filters', {}).get('name')
return [image]
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
fake.stub_out_image_service(self.stubs)
def dumb(*args, **kwargs):
pass
self.stubs.Set(compute_utils, 'notify_about_instance_usage', dumb)
fake_network.set_stub_network_methods(self.stubs)
# set up our cloud
self.cloud = cloud.CloudController()
self.flags(scheduler_driver='nova.scheduler.chance.ChanceScheduler')
# Short-circuit the conductor service
self.flags(use_local=True, group='conductor')
# Stub out the notification service so we use the no-op serializer
# and avoid lazy-load traces with the wrap_exception decorator in
# the compute service.
fake_notifier.stub_notifier(self.stubs)
self.addCleanup(fake_notifier.reset)
# set up services
self.conductor = self.start_service('conductor',
manager=CONF.conductor.manager)
self.compute = self.start_service('compute')
self.scheduler = self.start_service('scheduler')
self.network = self.start_service('network')
self.consoleauth = self.start_service('consoleauth')
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
self.volume_api = volume.API()
self.useFixture(cast_as_call.CastAsCall(self.stubs))
# make sure we can map ami-00000001/2 to a uuid in FakeImageService
db.s3_image_create(self.context,
'cedef40a-ed67-4d10-800e-17455edce175')
db.s3_image_create(self.context,
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6')
def tearDown(self):
self.volume_api.reset_fake_api(self.context)
super(CloudTestCase, self).tearDown()
fake.FakeImageService_reset()
def fake_get_target(obj, iqn):
return 1
def fake_remove_iscsi_target(obj, tid, lun, vol_id, **kwargs):
pass
def _stub_instance_get_with_fixed_ips(self,
func_name, get_floating=True):
orig_func = getattr(self.cloud.compute_api, func_name)
def fake_get(*args, **kwargs):
return get_instances_with_cached_ips(orig_func, get_floating,
*args, **kwargs)
self.stubs.Set(self.cloud.compute_api, func_name, fake_get)
def _create_key(self, name):
# NOTE(vish): create depends on pool, so just call helper directly
keypair_api = compute_api.KeypairAPI()
return keypair_api.create_key_pair(self.context, self.context.user_id,
name)
def test_describe_regions(self):
# Makes sure describe regions runs without raising an exception.
result = self.cloud.describe_regions(self.context)
self.assertEqual(len(result['regionInfo']), 1)
self.flags(region_list=["one=test_host1", "two=test_host2"])
result = self.cloud.describe_regions(self.context)
self.assertEqual(len(result['regionInfo']), 2)
def test_describe_addresses(self):
# Makes sure describe addresses runs without raising an exception.
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.flags(network_api_class='nova.network.api.API')
self.cloud.allocate_address(self.context)
self.cloud.describe_addresses(self.context)
self.cloud.release_address(self.context,
public_ip=address)
db.floating_ip_destroy(self.context, address)
def test_describe_addresses_in_neutron(self):
# Makes sure describe addresses runs without raising an exception.
address = "10.10.10.10"
self.flags(network_api_class='nova.network.neutronv2.api.API')
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
self.cloud.describe_addresses(self.context)
self.cloud.release_address(self.context,
public_ip=address)
db.floating_ip_destroy(self.context, address)
def test_describe_specific_address(self):
# Makes sure describe specific address works.
addresses = ["10.10.10.10", "10.10.10.11"]
for address in addresses:
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
result = self.cloud.describe_addresses(self.context)
self.assertEqual(len(result['addressesSet']), 2)
result = self.cloud.describe_addresses(self.context,
public_ip=['10.10.10.10'])
self.assertEqual(len(result['addressesSet']), 1)
for address in addresses:
self.cloud.release_address(self.context,
public_ip=address)
db.floating_ip_destroy(self.context, address)
def test_allocate_address(self):
address = "10.10.10.10"
allocate = self.cloud.allocate_address
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.assertEqual(allocate(self.context)['publicIp'], address)
db.floating_ip_destroy(self.context, address)
self.assertRaises(exception.NoMoreFloatingIps,
allocate,
self.context)
def test_release_address(self):
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova',
'project_id': self.project_id})
result = self.cloud.release_address(self.context, address)
self.assertEqual(result.get('return', None), 'true')
def test_associate_disassociate_address(self):
# Verifies associate runs cleanly without raising an exception.
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
# TODO(jkoelker) Probably need to query for instance_type_id and
# make sure we get a valid one
inst = db.instance_create(self.context, {'host': self.compute.host,
'display_name': HOST,
'instance_type_id': 1})
networks = db.network_get_all(self.context)
for network in networks:
db.network_update(self.context, network['id'],
{'host': self.network.host})
project_id = self.context.project_id
nw_info = self.network.allocate_for_instance(self.context,
instance_id=inst['id'],
instance_uuid=inst['uuid'],
host=inst['host'],
vpn=None,
rxtx_factor=3,
project_id=project_id,
macs=None)
fixed_ips = nw_info.fixed_ips()
ec2_id = ec2utils.id_to_ec2_inst_id(inst['uuid'])
self.stubs.Set(ec2utils, 'get_ip_info_for_instance',
lambda *args: {'fixed_ips': ['10.0.0.1'],
'fixed_ip6s': [],
'floating_ips': []})
self.stubs.Set(network_api.API, 'get_instance_id_by_floating_address',
lambda *args: 1)
def fake_update_instance_cache_with_nw_info(api, context, instance,
nw_info=None,
update_cells=True):
return
self.stubs.Set(base_network_api, "update_instance_cache_with_nw_info",
fake_update_instance_cache_with_nw_info)
self.cloud.associate_address(self.context,
instance_id=ec2_id,
public_ip=address)
self.cloud.disassociate_address(self.context,
public_ip=address)
self.cloud.release_address(self.context,
public_ip=address)
self.network.deallocate_fixed_ip(self.context, fixed_ips[0]['address'],
inst['host'])
db.instance_destroy(self.context, inst['uuid'])
db.floating_ip_destroy(self.context, address)
def test_disassociate_auto_assigned_address(self):
"""Verifies disassociating auto assigned floating IP
raises an exception
"""
address = "10.10.10.10"
def fake_get(*args, **kwargs):
pass
def fake_disassociate_floating_ip(*args, **kwargs):
raise exception.CannotDisassociateAutoAssignedFloatingIP()
self.stubs.Set(network_api.API, 'get_instance_id_by_floating_address',
lambda *args: 1)
self.stubs.Set(self.cloud.compute_api, 'get', fake_get)
self.stubs.Set(network_api.API, 'disassociate_floating_ip',
fake_disassociate_floating_ip)
self.assertRaises(exception.CannotDisassociateAutoAssignedFloatingIP,
self.cloud.disassociate_address,
self.context, public_ip=address)
def test_disassociate_unassociated_address(self):
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
self.cloud.describe_addresses(self.context)
result = self.cloud.disassociate_address(self.context,
public_ip=address)
self.assertEqual(result['return'], 'true')
db.floating_ip_destroy(self.context, address)
def test_describe_security_groups(self):
# Makes sure describe_security_groups works and filters results.
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
result = self.cloud.describe_security_groups(self.context)
# NOTE(vish): should have the default group as well
self.assertEqual(len(result['securityGroupInfo']), 2)
result = self.cloud.describe_security_groups(self.context,
group_name=[sec['name']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
sec['name'])
db.security_group_destroy(self.context, sec['id'])
def test_describe_security_groups_all_tenants(self):
# Makes sure describe_security_groups works and filters results.
sec = db.security_group_create(self.context,
{'project_id': 'foobar',
'name': 'test'})
def _check_name(result, i, expected):
self.assertEqual(result['securityGroupInfo'][i]['groupName'],
expected)
# include all tenants
filter = [{'name': 'all-tenants', 'value': {'1': 1}}]
result = self.cloud.describe_security_groups(self.context,
filter=filter)
self.assertEqual(len(result['securityGroupInfo']), 2)
_check_name(result, 0, 'default')
_check_name(result, 1, sec['name'])
# exclude all tenants
filter = [{'name': 'all-tenants', 'value': {'1': 0}}]
result = self.cloud.describe_security_groups(self.context,
filter=filter)
self.assertEqual(len(result['securityGroupInfo']), 1)
_check_name(result, 0, 'default')
# default all tenants
result = self.cloud.describe_security_groups(self.context)
self.assertEqual(len(result['securityGroupInfo']), 1)
_check_name(result, 0, 'default')
db.security_group_destroy(self.context, sec['id'])
def test_describe_security_groups_by_id(self):
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
result = self.cloud.describe_security_groups(self.context,
group_id=[sec['id']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
sec['name'])
default = db.security_group_get_by_name(self.context,
self.context.project_id,
'default')
result = self.cloud.describe_security_groups(self.context,
group_id=[default['id']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
'default')
db.security_group_destroy(self.context, sec['id'])
def test_create_delete_security_group(self):
descript = 'test description'
create = self.cloud.create_security_group
result = create(self.context, 'testgrp', descript)
group_descript = result['securityGroupSet'][0]['groupDescription']
self.assertEqual(descript, group_descript)
delete = self.cloud.delete_security_group
self.assertTrue(delete(self.context, 'testgrp'))
def test_security_group_quota_limit(self):
self.flags(quota_security_groups=10)
for i in range(1, CONF.quota_security_groups):
name = 'test name %i' % i
descript = 'test description %i' % i
create = self.cloud.create_security_group
create(self.context, name, descript)
# 11'th group should fail
self.assertRaises(exception.SecurityGroupLimitExceeded,
create, self.context, 'foo', 'bar')
def test_delete_security_group_by_id(self):
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
delete = self.cloud.delete_security_group
self.assertTrue(delete(self.context, group_id=sec['id']))
def test_delete_security_group_with_bad_name(self):
delete = self.cloud.delete_security_group
notfound = exception.SecurityGroupNotFound
self.assertRaises(notfound, delete, self.context, 'badname')
def test_delete_security_group_with_bad_group_id(self):
delete = self.cloud.delete_security_group
notfound = exception.SecurityGroupNotFound
self.assertRaises(notfound, delete, self.context, group_id=999)
def test_delete_security_group_no_params(self):
delete = self.cloud.delete_security_group
self.assertRaises(exception.MissingParameter, delete, self.context)
def test_delete_security_group_policy_not_allowed(self):
rules = {'compute_extension:security_groups':
common_policy.parse_rule('project_id:%(project_id)s')}
policy.set_rules(rules)
with mock.patch.object(self.cloud.security_group_api,
'get') as get:
get.return_value = {'project_id': 'invalid'}
self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.delete_security_group, self.context,
'fake-name', 'fake-id')
def test_authorize_security_group_ingress_policy_not_allowed(self):
rules = {'compute_extension:security_groups':
common_policy.parse_rule('project_id:%(project_id)s')}
policy.set_rules(rules)
with mock.patch.object(self.cloud.security_group_api,
'get') as get:
get.return_value = {'project_id': 'invalid'}
self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.authorize_security_group_ingress, self.context,
'fake-name', 'fake-id')
def test_authorize_security_group_ingress(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
self.assertTrue(authz(self.context, group_name=sec['name'], **kwargs))
def test_authorize_security_group_ingress_ip_permissions_ip_ranges(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [{'to_port': 81, 'from_port': 81,
'ip_ranges':
{'1': {'cidr_ip': u'0.0.0.0/0'},
'2': {'cidr_ip': u'10.10.10.10/32'}},
'ip_protocol': u'tcp'}]}
self.assertTrue(authz(self.context, group_name=sec['name'], **kwargs))
def test_authorize_security_group_fail_missing_source_group(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [{'to_port': 81, 'from_port': 81,
'ip_ranges': {'1': {'cidr_ip': u'0.0.0.0/0'},
'2': {'cidr_ip': u'10.10.10.10/32'}},
'groups': {'1': {'user_id': u'someuser',
'group_name': u'somegroup1'}},
'ip_protocol': u'tcp'}]}
self.assertRaises(exception.SecurityGroupNotFound, authz,
self.context, group_name=sec['name'], **kwargs)
def test_authorize_security_group_ingress_ip_permissions_groups(self):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'name': 'test'
}
sec = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'somegroup1'})
sec = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'othergroup2'})
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [{'to_port': 81, 'from_port': 81,
'groups': {'1': {'user_id': u'someuser',
'group_name': u'somegroup1'},
'2': {'user_id': u'someuser',
'group_name': u'othergroup2'}},
'ip_protocol': u'tcp'}]}
self.assertTrue(authz(self.context, group_name=sec['name'], **kwargs))
def test_describe_security_group_ingress_groups(self):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'name': 'test'
}
sec1 = db.security_group_create(self.context, kwargs)
sec2 = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'somegroup1'})
sec3 = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'othergroup2'})
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [
{'groups': {'1': {'user_id': u'someuser',
'group_name': u'somegroup1'}}},
{'ip_protocol': 'tcp',
'from_port': 80,
'to_port': 80,
'groups': {'1': {'user_id': u'someuser',
'group_name': u'othergroup2'}}}]}
self.assertTrue(authz(self.context, group_name=sec1['name'], **kwargs))
describe = self.cloud.describe_security_groups
groups = describe(self.context, group_name=['test'])
self.assertEqual(len(groups['securityGroupInfo']), 1)
actual_rules = groups['securityGroupInfo'][0]['ipPermissions']
self.assertEqual(len(actual_rules), 4)
expected_rules = [{'fromPort': -1,
'groups': [{'groupName': 'somegroup1',
'userId': 'someuser'}],
'ipProtocol': 'icmp',
'ipRanges': [],
'toPort': -1},
{'fromPort': 1,
'groups': [{'groupName': u'somegroup1',
'userId': u'someuser'}],
'ipProtocol': 'tcp',
'ipRanges': [],
'toPort': 65535},
{'fromPort': 1,
'groups': [{'groupName': u'somegroup1',
'userId': u'someuser'}],
'ipProtocol': 'udp',
'ipRanges': [],
'toPort': 65535},
{'fromPort': 80,
'groups': [{'groupName': u'othergroup2',
'userId': u'someuser'}],
'ipProtocol': u'tcp',
'ipRanges': [],
'toPort': 80}]
for rule in expected_rules:
self.assertIn(rule, actual_rules)
db.security_group_destroy(self.context, sec3['id'])
db.security_group_destroy(self.context, sec2['id'])
db.security_group_destroy(self.context, sec1['id'])
def test_revoke_security_group_ingress_policy_not_allowed(self):
rules = {'compute_extension:security_groups':
common_policy.parse_rule('project_id:%(project_id)s')}
policy.set_rules(rules)
with mock.patch.object(self.cloud.security_group_api,
'get') as get:
get.return_value = {'project_id': 'invalid'}
self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.revoke_security_group_ingress, self.context,
'fake-name', 'fake-id')
def test_revoke_security_group_ingress(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
authz(self.context, group_id=sec['id'], **kwargs)
revoke = self.cloud.revoke_security_group_ingress
self.assertTrue(revoke(self.context, group_name=sec['name'], **kwargs))
def test_authorize_revoke_security_group_ingress_by_id(self):
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
authz(self.context, group_id=sec['id'], **kwargs)
revoke = self.cloud.revoke_security_group_ingress
self.assertTrue(revoke(self.context, group_id=sec['id'], **kwargs))
def test_authorize_security_group_ingress_missing_protocol_params(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
self.assertRaises(exception.MissingParameter, authz, self.context,
'test')
def test_authorize_security_group_ingress_missing_group_name_or_id(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
authz = self.cloud.authorize_security_group_ingress
self.assertRaises(exception.MissingParameter, authz, self.context,
**kwargs)
def test_authorize_security_group_ingress_already_exists(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
authz(self.context, group_name=sec['name'], **kwargs)
self.assertRaises(exception.SecurityGroupRuleExists, authz,
self.context, group_name=sec['name'], **kwargs)
def test_security_group_ingress_quota_limit(self):
self.flags(quota_security_group_rules=20)
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec_group = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
for i in range(100, 120):
kwargs = {'to_port': i, 'from_port': i, 'ip_protocol': 'tcp'}
authz(self.context, group_id=sec_group['id'], **kwargs)
kwargs = {'to_port': 121, 'from_port': 121, 'ip_protocol': 'tcp'}
self.assertRaises(exception.SecurityGroupLimitExceeded, authz,
self.context, group_id=sec_group['id'], **kwargs)
def _test_authorize_security_group_no_ports_with_source_group(self, proto):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'description': '',
'name': 'test'
}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
auth_kwargs = {'ip_protocol': proto,
'groups': {'1': {'user_id': self.context.user_id,
'group_name': u'test'}}}
self.assertTrue(authz(self.context, group_name=sec['name'],
**auth_kwargs))
describe = self.cloud.describe_security_groups
groups = describe(self.context, group_name=['test'])
self.assertEqual(len(groups['securityGroupInfo']), 1)
actual_rules = groups['securityGroupInfo'][0]['ipPermissions']
expected_rules = [{'groups': [{'groupName': 'test',
'userId': self.context.user_id}],
'ipProtocol': proto,
'ipRanges': []}]
if proto == 'icmp':
expected_rules[0]['fromPort'] = -1
expected_rules[0]['toPort'] = -1
else:
expected_rules[0]['fromPort'] = 1
expected_rules[0]['toPort'] = 65535
self.assertTrue(expected_rules == actual_rules)
describe = self.cloud.describe_security_groups
groups = describe(self.context, group_name=['test'])
db.security_group_destroy(self.context, sec['id'])
def _test_authorize_security_group_no_ports_no_source_group(self, proto):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'description': '',
'name': 'test'
}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
auth_kwargs = {'ip_protocol': proto}
self.assertRaises(exception.MissingParameter, authz, self.context,
group_name=sec['name'], **auth_kwargs)
db.security_group_destroy(self.context, sec['id'])
def test_authorize_security_group_no_ports_icmp(self):
self._test_authorize_security_group_no_ports_with_source_group('icmp')
self._test_authorize_security_group_no_ports_no_source_group('icmp')
def test_authorize_security_group_no_ports_tcp(self):
self._test_authorize_security_group_no_ports_with_source_group('tcp')
self._test_authorize_security_group_no_ports_no_source_group('tcp')
def test_authorize_security_group_no_ports_udp(self):
self._test_authorize_security_group_no_ports_with_source_group('udp')
self._test_authorize_security_group_no_ports_no_source_group('udp')
def test_revoke_security_group_ingress_missing_group_name_or_id(self):
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
revoke = self.cloud.revoke_security_group_ingress
self.assertRaises(exception.MissingParameter, revoke,
self.context, **kwargs)
def test_delete_security_group_in_use_by_group(self):
self.cloud.create_security_group(self.context, 'testgrp1',
"test group 1")
self.cloud.create_security_group(self.context, 'testgrp2',
"test group 2")
kwargs = {'groups': {'1': {'user_id': u'%s' % self.context.user_id,
'group_name': u'testgrp2'}},
}
self.cloud.authorize_security_group_ingress(self.context,
group_name='testgrp1', **kwargs)
group1 = db.security_group_get_by_name(self.context,
self.project_id, 'testgrp1')
get_rules = db.security_group_rule_get_by_security_group
self.assertTrue(get_rules(self.context, group1['id']))
self.cloud.delete_security_group(self.context, 'testgrp2')
self.assertFalse(get_rules(self.context, group1['id']))
def test_delete_security_group_in_use_by_instance(self):
# Ensure that a group can not be deleted if in use by an instance.
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
args = {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active'}
inst = db.instance_create(self.context, args)
args = {'user_id': self.context.user_id,
'project_id': self.context.project_id,
'name': 'testgrp',
'description': 'Test group'}
group = db.security_group_create(self.context, args)
db.instance_add_security_group(self.context, inst['uuid'], group['id'])
self.assertRaises(exception.InvalidGroup,
self.cloud.delete_security_group,
self.context, 'testgrp')
db.instance_destroy(self.context, inst['uuid'])
self.cloud.delete_security_group(self.context, 'testgrp')
def test_describe_availability_zones(self):
# Makes sure describe_availability_zones works and filters results.
service1 = db.service_create(self.context, {'host': 'host1_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
service2 = db.service_create(self.context, {'host': 'host2_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
# Aggregate based zones
agg = db.aggregate_create(self.context,
{'name': 'agg1'}, {'availability_zone': 'zone1'})
db.aggregate_host_add(self.context, agg['id'], 'host1_zones')
agg = db.aggregate_create(self.context,
{'name': 'agg2'}, {'availability_zone': 'zone2'})
db.aggregate_host_add(self.context, agg['id'], 'host2_zones')
result = self.cloud.describe_availability_zones(self.context)
self.assertEqual(len(result['availabilityZoneInfo']), 3)
admin_ctxt = context.get_admin_context(read_deleted="no")
result = self.cloud.describe_availability_zones(admin_ctxt,
zone_name='verbose')
self.assertEqual(len(result['availabilityZoneInfo']), 18)
db.service_destroy(self.context, service1['id'])
db.service_destroy(self.context, service2['id'])
def test_describe_availability_zones_verbose(self):
# Makes sure describe_availability_zones works and filters results.
service1 = db.service_create(self.context, {'host': 'host1_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
service2 = db.service_create(self.context, {'host': 'host2_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
agg = db.aggregate_create(self.context,
{'name': 'agg1'}, {'availability_zone': 'second_zone'})
db.aggregate_host_add(self.context, agg['id'], 'host2_zones')
admin_ctxt = context.get_admin_context(read_deleted="no")
result = self.cloud.describe_availability_zones(admin_ctxt,
zone_name='verbose')
self.assertEqual(len(result['availabilityZoneInfo']), 17)
db.service_destroy(self.context, service1['id'])
db.service_destroy(self.context, service2['id'])
def assertEqualSorted(self, x, y):
self.assertEqual(sorted(x), sorted(y))
def test_describe_instances(self):
# Makes sure describe_instances works and filters results.
self.flags(use_ipv6=True)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
sys_meta['EC2_client_token'] = "<PASSWORD>"
inst1 = db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'hostname': 'server-1234',
'vm_state': 'active',
'system_metadata': sys_meta})
sys_meta['EC2_client_token'] = "<PASSWORD>"
inst2 = db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host2',
'hostname': 'server-4321',
'vm_state': 'active',
'system_metadata': sys_meta})
comp1 = db.service_create(self.context, {'host': 'host1',
'topic': "compute"})
agg = db.aggregate_create(self.context,
{'name': 'agg1'}, {'availability_zone': 'zone1'})
db.aggregate_host_add(self.context, agg['id'], 'host1')
comp2 = db.service_create(self.context, {'host': 'host2',
'topic': "compute"})
agg2 = db.aggregate_create(self.context,
{'name': 'agg2'}, {'availability_zone': 'zone2'})
db.aggregate_host_add(self.context, agg2['id'], 'host2')
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
self.assertEqual(len(result['instancesSet']), 2)
# Now try filtering.
instance_id = ec2utils.id_to_ec2_inst_id(inst2['uuid'])
result = self.cloud.describe_instances(self.context,
instance_id=[instance_id])
result = result['reservationSet'][0]
self.assertEqual(len(result['instancesSet']), 1)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], instance_id)
self.assertEqual(instance['placement']['availabilityZone'], 'zone2')
self.assertEqual(instance['ipAddress'], '172.16.58.3')
self.assertEqual(instance['dnsName'], '172.16.58.3')
self.assertEqual(instance['tagSet'], [])
self.assertEqual(instance['privateDnsName'], 'server-4321')
self.assertEqual(instance['privateIpAddress'], '192.168.0.3')
self.assertEqual(instance['dnsNameV6'],
'fe80:b33f::a8bb:ccff:fedd:eeff')
self.assertEqual(instance['clientToken'], '<PASSWORD>')
# A filter with even one invalid id should cause an exception to be
# raised
self.assertRaises(exception.InstanceNotFound,
self.cloud.describe_instances, self.context,
instance_id=[instance_id, '435679'])
db.instance_destroy(self.context, inst1['uuid'])
db.instance_destroy(self.context, inst2['uuid'])
db.service_destroy(self.context, comp1['id'])
db.service_destroy(self.context, comp2['id'])
def test_describe_instances_all_invalid(self):
# Makes sure describe_instances works and filters results.
self.flags(use_ipv6=True)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
instance_id = ec2utils.id_to_ec2_inst_id('435679')
self.assertRaises(exception.InstanceNotFound,
self.cloud.describe_instances, self.context,
instance_id=[instance_id])
def test_describe_instances_with_filters(self):
# Makes sure describe_instances works and filters results.
filters = {'filter': [{'name': 'test',
'value': ['a', 'b']},
{'name': 'another_test',
'value': 'a string'}]}
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': []})
def test_describe_instances_with_filters_tags(self):
# Makes sure describe_instances works and filters tag results.
# We need to stub network calls
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
# We need to stub out the MQ call - it won't succeed. We do want
# to check that the method is called, though
meta_changes = [None]
def fake_change_instance_metadata(inst, ctxt, diff, instance=None,
instance_uuid=None):
meta_changes[0] = diff
self.stubs.Set(compute_rpcapi.ComputeAPI, 'change_instance_metadata',
fake_change_instance_metadata)
utc = iso8601.iso8601.Utc()
# Create some test images
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
inst1_kwargs = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1111',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 1,
tzinfo=utc),
'system_metadata': sys_meta
}
inst2_kwargs = {
'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host2',
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1112',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 2,
tzinfo=utc),
'system_metadata': sys_meta
}
inst1 = db.instance_create(self.context, inst1_kwargs)
ec2_id1 = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
inst2 = db.instance_create(self.context, inst2_kwargs)
ec2_id2 = ec2utils.id_to_ec2_inst_id(inst2['uuid'])
# Create some tags
# We get one overlapping pair, one overlapping key, and a
# disparate pair
# inst1 : {'foo': 'bar', 'baz': 'wibble', 'bax': 'wobble'}
# inst2 : {'foo': 'bar', 'baz': 'quux', 'zog': 'bobble'}
md = {'key': 'foo', 'value': 'bar'}
self.cloud.create_tags(self.context, resource_id=[ec2_id1, ec2_id2],
tag=[md])
md2 = {'key': 'baz', 'value': 'wibble'}
md3 = {'key': 'bax', 'value': 'wobble'}
self.cloud.create_tags(self.context, resource_id=[ec2_id1],
tag=[md2, md3])
md4 = {'key': 'baz', 'value': 'quux'}
md5 = {'key': 'zog', 'value': 'bobble'}
self.cloud.create_tags(self.context, resource_id=[ec2_id2],
tag=[md4, md5])
# We should be able to search by:
inst1_ret = {
'groupSet': None,
'instancesSet': [{'amiLaunchIndex': None,
'dnsName': '172.16.58.3',
'dnsNameV6': 'fe80:b33f::a8bb:ccff:fedd:eeff',
'imageId': 'ami-00000001',
'instanceId': 'i-00000001',
'instanceState': {'code': 16,
'name': 'running'},
'instanceType': u'm1.medium',
'ipAddress': '172.16.58.3',
'keyName': 'None (None, host1)',
'launchTime':
datetime.datetime(2012, 5, 1, 1, 1, 1,
tzinfo=utc),
'placement': {
'availabilityZone': 'nova'},
'privateDnsName': u'server-1111',
'privateIpAddress': '192.168.0.3',
'productCodesSet': None,
'rootDeviceName': '/dev/sda1',
'rootDeviceType': 'instance-store',
'tagSet': [{'key': u'foo',
'value': u'bar'},
{'key': u'baz',
'value': u'wibble'},
{'key': u'bax',
'value': u'wobble'}]}],
'ownerId': None,
'reservationId': u'a'}
inst2_ret = {
'groupSet': None,
'instancesSet': [{'amiLaunchIndex': None,
'dnsName': '172.16.58.3',
'dnsNameV6': 'fe80:b33f::a8bb:ccff:fedd:eeff',
'imageId': 'ami-00000001',
'instanceId': 'i-00000002',
'instanceState': {'code': 16,
'name': 'running'},
'instanceType': u'm1.medium',
'ipAddress': '172.16.58.3',
'keyName': u'None (None, host2)',
'launchTime':
datetime.datetime(2012, 5, 1, 1, 1, 2,
tzinfo=utc),
'placement': {
'availabilityZone': 'nova'},
'privateDnsName': u'server-1112',
'privateIpAddress': '192.168.0.3',
'productCodesSet': None,
'rootDeviceName': '/dev/sda1',
'rootDeviceType': 'instance-store',
'tagSet': [{'key': u'foo',
'value': u'bar'},
{'key': u'baz',
'value': u'quux'},
{'key': u'zog',
'value': u'bobble'}]}],
'ownerId': None,
'reservationId': u'b'}
# No filter
result = self.cloud.describe_instances(self.context)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Key search
# Both should have tags with key 'foo' and value 'bar'
filters = {'filter': [{'name': 'tag:foo',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Both should have tags with key 'foo'
filters = {'filter': [{'name': 'tag-key',
'value': ['foo']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Value search
# Only inst2 should have tags with key 'baz' and value 'quux'
filters = {'filter': [{'name': 'tag:baz',
'value': ['quux']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# Only inst2 should have tags with value 'quux'
filters = {'filter': [{'name': 'tag-value',
'value': ['quux']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# Multiple values
# Both should have tags with key 'baz' and values in the set
# ['quux', 'wibble']
filters = {'filter': [{'name': 'tag:baz',
'value': ['quux', 'wibble']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Both should have tags with key 'baz' or tags with value 'bar'
filters = {'filter': [{'name': 'tag-key',
'value': ['baz']},
{'name': 'tag-value',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Confirm deletion of tags
# Check for format 'tag:'
self.cloud.delete_tags(self.context, resource_id=[ec2_id1], tag=[md])
filters = {'filter': [{'name': 'tag:foo',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# Check for format 'tag-'
filters = {'filter': [{'name': 'tag-key',
'value': ['foo']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
filters = {'filter': [{'name': 'tag-value',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# destroy the test instances
db.instance_destroy(self.context, inst1['uuid'])
db.instance_destroy(self.context, inst2['uuid'])
def test_describe_instances_sorting(self):
# Makes sure describe_instances works and is sorted as expected.
self.flags(use_ipv6=True)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
inst_base = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'system_metadata': sys_meta,
}
utc = iso8601.iso8601.Utc()
inst1_kwargs = {}
inst1_kwargs.update(inst_base)
inst1_kwargs['host'] = 'host1'
inst1_kwargs['hostname'] = 'server-1111'
inst1_kwargs['created_at'] = datetime.datetime(2012, 5, 1, 1, 1, 1,
tzinfo=utc)
inst1 = db.instance_create(self.context, inst1_kwargs)
inst2_kwargs = {}
inst2_kwargs.update(inst_base)
inst2_kwargs['host'] = 'host2'
inst2_kwargs['hostname'] = 'server-2222'
inst2_kwargs['created_at'] = datetime.datetime(2012, 2, 1, 1, 1, 1,
tzinfo=utc)
inst2 = db.instance_create(self.context, inst2_kwargs)
inst3_kwargs = {}
inst3_kwargs.update(inst_base)
inst3_kwargs['host'] = 'host3'
inst3_kwargs['hostname'] = 'server-3333'
inst3_kwargs['created_at'] = datetime.datetime(2012, 2, 5, 1, 1, 1,
tzinfo=utc)
inst3 = db.instance_create(self.context, inst3_kwargs)
comp1 = db.service_create(self.context, {'host': 'host1',
'topic': "compute"})
comp2 = db.service_create(self.context, {'host': 'host2',
'topic': "compute"})
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]['instancesSet']
self.assertEqual(result[0]['launchTime'], inst2_kwargs['created_at'])
self.assertEqual(result[1]['launchTime'], inst3_kwargs['created_at'])
self.assertEqual(result[2]['launchTime'], inst1_kwargs['created_at'])
db.instance_destroy(self.context, inst1['uuid'])
db.instance_destroy(self.context, inst2['uuid'])
db.instance_destroy(self.context, inst3['uuid'])
db.service_destroy(self.context, comp1['id'])
db.service_destroy(self.context, comp2['id'])
def test_describe_instance_state(self):
# Makes sure describe_instances for instanceState works.
def test_instance_state(expected_code, expected_name,
power_state_, vm_state_, values=None):
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
values = values or {}
values.update({'image_ref': image_uuid, 'instance_type_id': 1,
'power_state': power_state_, 'vm_state': vm_state_,
'system_metadata': sys_meta})
inst = db.instance_create(self.context, values)
instance_id = ec2utils.id_to_ec2_inst_id(inst['uuid'])
result = self.cloud.describe_instances(self.context,
instance_id=[instance_id])
result = result['reservationSet'][0]
result = result['instancesSet'][0]['instanceState']
name = result['name']
code = result['code']
self.assertEqual(code, expected_code)
self.assertEqual(name, expected_name)
db.instance_destroy(self.context, inst['uuid'])
test_instance_state(inst_state.RUNNING_CODE, inst_state.RUNNING,
power_state.RUNNING, vm_states.ACTIVE)
test_instance_state(inst_state.STOPPED_CODE, inst_state.STOPPED,
power_state.NOSTATE, vm_states.STOPPED,
{'shutdown_terminate': False})
def test_describe_instances_no_ipv6(self):
# Makes sure describe_instances w/ no ipv6 works.
self.flags(use_ipv6=False)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
inst1 = db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'hostname': 'server-1234',
'vm_state': 'active',
'system_metadata': sys_meta})
comp1 = db.service_create(self.context, {'host': 'host1',
'topic': "compute"})
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
self.assertEqual(len(result['instancesSet']), 1)
instance = result['instancesSet'][0]
instance_id = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
self.assertEqual(instance['instanceId'], instance_id)
self.assertEqual(instance['ipAddress'], '172.16.58.3')
self.assertEqual(instance['dnsName'], '172.16.58.3')
self.assertEqual(instance['privateDnsName'], 'server-1234')
self.assertEqual(instance['privateIpAddress'], '192.168.0.3')
self.assertNotIn('dnsNameV6', instance)
db.instance_destroy(self.context, inst1['uuid'])
db.service_destroy(self.context, comp1['id'])
def test_describe_instances_deleted(self):
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
args1 = {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
inst1 = db.instance_create(self.context, args1)
args2 = {'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
inst2 = db.instance_create(self.context, args2)
db.instance_destroy(self.context, inst1['uuid'])
result = self.cloud.describe_instances(self.context)
self.assertEqual(len(result['reservationSet']), 1)
result1 = result['reservationSet'][0]['instancesSet']
self.assertEqual(result1[0]['instanceId'],
ec2utils.id_to_ec2_inst_id(inst2['uuid']))
def test_describe_instances_with_image_deleted(self):
image_uuid = 'aebef54a-ed67-4d10-912f-14455edce176'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
args1 = {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
db.instance_create(self.context, args1)
args2 = {'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
db.instance_create(self.context, args2)
result = self.cloud.describe_instances(self.context)
self.assertEqual(len(result['reservationSet']), 2)
def test_describe_instances_dnsName_set(self):
# Verifies dnsName doesn't get set if floating IP is set.
self._stub_instance_get_with_fixed_ips('get_all', get_floating=False)
self._stub_instance_get_with_fixed_ips('get', get_floating=False)
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'hostname': 'server-1234',
'vm_state': 'active',
'system_metadata': sys_meta})
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
instance = result['instancesSet'][0]
self.assertIsNone(instance['dnsName'])
def test_describe_instances_booting_from_a_volume(self):
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
inst = objects.Instance(self.context)
inst.reservation_id = 'a'
inst.image_ref = ''
inst.root_device_name = '/dev/sdh'
inst.instance_type_id = 1
inst.vm_state = vm_states.ACTIVE
inst.host = 'host1'
inst.system_metadata = sys_meta
inst.create()
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
instance = result['instancesSet'][0]
self.assertIsNone(instance['imageId'])
def test_describe_images(self):
describe_images = self.cloud.describe_images
def fake_detail(meh, context, **kwargs):
return [{'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'}}]
def fake_show_none(meh, context, id):
raise exception.ImageNotFound(image_id='bad_image_id')
def fake_detail_none(self, context, **kwargs):
return []
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
# list all
result1 = describe_images(self.context)
result1 = result1['imagesSet'][0]
self.assertEqual(result1['imageId'], 'ami-00000001')
# provided a valid image_id
result2 = describe_images(self.context, ['ami-00000001'])
self.assertEqual(1, len(result2['imagesSet']))
# provide more than 1 valid image_id
result3 = describe_images(self.context, ['ami-00000001',
'ami-00000002'])
self.assertEqual(2, len(result3['imagesSet']))
# provide a non-existing image_id
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show_none)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail_none)
self.assertRaises(exception.ImageNotFound, describe_images,
self.context, ['ami-fake'])
def assertDictListUnorderedMatch(self, L1, L2, key):
self.assertEqual(len(L1), len(L2))
for d1 in L1:
self.assertIn(key, d1)
for d2 in L2:
self.assertIn(key, d2)
if d1[key] == d2[key]:
self.assertThat(d1, matchers.DictMatches(d2))
def _setUpImageSet(self, create_volumes_and_snapshots=False):
self.flags(max_local_block_devices=-1)
mappings1 = [
{'device': '/dev/sda1', 'virtual': 'root'},
{'device': 'sdb0', 'virtual': 'ephemeral0'},
{'device': 'sdb1', 'virtual': 'ephemeral1'},
{'device': 'sdb2', 'virtual': 'ephemeral2'},
{'device': 'sdb3', 'virtual': 'ephemeral3'},
{'device': 'sdb4', 'virtual': 'ephemeral4'},
{'device': 'sdc0', 'virtual': 'swap'},
{'device': 'sdc1', 'virtual': 'swap'},
{'device': 'sdc2', 'virtual': 'swap'},
{'device': 'sdc3', 'virtual': 'swap'},
{'device': 'sdc4', 'virtual': 'swap'}]
block_device_mapping1 = [
{'device_name': '/dev/sdb1',
'snapshot_id': 'ccec42a2-c220-4806-b762-6b12fbb592e3'},
{'device_name': '/dev/sdb2',
'volume_id': 'ccec42a2-c220-4806-b762-6b12fbb592e4'},
{'device_name': '/dev/sdb3', 'virtual_name': 'ephemeral5'},
{'device_name': '/dev/sdb4', 'no_device': True},
{'device_name': '/dev/sdc1',
'snapshot_id': 'ccec42a2-c220-4806-b762-6b12fbb592e5'},
{'device_name': '/dev/sdc2',
'volume_id': 'ccec42a2-c220-4806-b762-6b12fbb592e6'},
{'device_name': '/dev/sdc3', 'virtual_name': 'ephemeral6'},
{'device_name': '/dev/sdc4', 'no_device': True}]
image1 = {
'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine',
'image_state': 'available',
'mappings': mappings1,
'block_device_mapping': block_device_mapping1,
}
}
mappings2 = [{'device': '/dev/sda1', 'virtual': 'root'}]
block_device_mapping2 = [{'device_name': '/dev/sdb1',
'snapshot_id': 'ccec42a2-c220-4806-b762-6b12fbb592e7',
'volume_id': None}]
image2 = {
'id': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
'name': 'fake_name',
'status': 'active',
'properties': {
'kernel_id': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
'type': 'machine',
'root_device_name': '/dev/sdb1',
'mappings': mappings2,
'block_device_mapping': block_device_mapping2}}
def fake_show(meh, context, image_id, **kwargs):
_images = [copy.deepcopy(image1), copy.deepcopy(image2)]
for i in _images:
if str(i['id']) == str(image_id):
return i
raise exception.ImageNotFound(image_id=image_id)
def fake_detail(meh, context, **kwargs):
return [copy.deepcopy(image1), copy.deepcopy(image2)]
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
volumes = []
snapshots = []
if create_volumes_and_snapshots:
for bdm in block_device_mapping1:
if 'volume_id' in bdm:
vol = self._volume_create(bdm['volume_id'])
volumes.append(vol['id'])
if 'snapshot_id' in bdm:
snap = self._snapshot_create(bdm['snapshot_id'])
snapshots.append(snap['id'])
return (volumes, snapshots)
def _assertImageSet(self, result, root_device_type, root_device_name):
self.assertEqual(1, len(result['imagesSet']))
result = result['imagesSet'][0]
self.assertIn('rootDeviceType', result)
self.assertEqual(result['rootDeviceType'], root_device_type)
self.assertIn('rootDeviceName', result)
self.assertEqual(result['rootDeviceName'], root_device_name)
self.assertIn('blockDeviceMapping', result)
return result
_expected_root_device_name1 = '/dev/sda1'
# NOTE(yamahata): noDevice doesn't make sense when returning mapping
# It makes sense only when user overriding existing
# mapping.
_expected_bdms1 = [
{'deviceName': '/dev/sdb0', 'virtualName': 'ephemeral0'},
{'deviceName': '/dev/sdb1', 'ebs': {'snapshotId':
'snap-00000001'}},
{'deviceName': '/dev/sdb2', 'ebs': {'snapshotId':
'vol-00000001'}},
{'deviceName': '/dev/sdb3', 'virtualName': 'ephemeral5'},
# {'deviceName': '/dev/sdb4', 'noDevice': True},
{'deviceName': '/dev/sdc0', 'virtualName': 'swap'},
{'deviceName': '/dev/sdc1', 'ebs': {'snapshotId':
'snap-00000002'}},
{'deviceName': '/dev/sdc2', 'ebs': {'snapshotId':
'vol-00000002'}},
{'deviceName': '/dev/sdc3', 'virtualName': 'ephemeral6'},
# {'deviceName': '/dev/sdc4', 'noDevice': True}
]
_expected_root_device_name2 = '/dev/sdb1'
_expected_bdms2 = [{'deviceName': '/dev/sdb1',
'ebs': {'snapshotId': 'snap-00000003'}}]
# NOTE(yamahata):
# InstanceBlockDeviceMappingItemType
# rootDeviceType
# rootDeviceName
# blockDeviceMapping
# deviceName
# virtualName
# ebs
# snapshotId
# volumeSize
# deleteOnTermination
# noDevice
def test_describe_image_mapping(self):
# test for rootDeviceName and blockDeviceMapping.
describe_images = self.cloud.describe_images
self._setUpImageSet()
result = describe_images(self.context, ['ami-00000001'])
result = self._assertImageSet(result, 'instance-store',
self._expected_root_device_name1)
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms1, 'deviceName')
result = describe_images(self.context, ['ami-00000002'])
result = self._assertImageSet(result, 'ebs',
self._expected_root_device_name2)
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms2, 'deviceName')
def test_describe_image_attribute(self):
describe_image_attribute = self.cloud.describe_image_attribute
def fake_show(meh, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'is_public': True}
def fake_detail(self, context, **kwargs):
image = fake_show(None, context, None)
image['name'] = kwargs.get('filters', {}).get('name')
return [image]
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
result = describe_image_attribute(self.context, 'ami-00000001',
'launchPermission')
self.assertEqual([{'group': 'all'}], result['launchPermission'])
result = describe_image_attribute(self.context, 'ami-00000001',
'kernel')
self.assertEqual('aki-00000001', result['kernel']['value'])
result = describe_image_attribute(self.context, 'ami-00000001',
'ramdisk')
self.assertEqual('ari-00000001', result['ramdisk']['value'])
def test_describe_image_attribute_root_device_name(self):
describe_image_attribute = self.cloud.describe_image_attribute
self._setUpImageSet()
result = describe_image_attribute(self.context, 'ami-00000001',
'rootDeviceName')
self.assertEqual(result['rootDeviceName'],
self._expected_root_device_name1)
result = describe_image_attribute(self.context, 'ami-00000002',
'rootDeviceName')
self.assertEqual(result['rootDeviceName'],
self._expected_root_device_name2)
def test_describe_image_attribute_block_device_mapping(self):
describe_image_attribute = self.cloud.describe_image_attribute
self._setUpImageSet()
result = describe_image_attribute(self.context, 'ami-00000001',
'blockDeviceMapping')
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms1, 'deviceName')
result = describe_image_attribute(self.context, 'ami-00000002',
'blockDeviceMapping')
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms2, 'deviceName')
def test_modify_image_attribute(self):
modify_image_attribute = self.cloud.modify_image_attribute
fake_metadata = {
'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'is_public': False}
def fake_show(meh, context, id, **kwargs):
return copy.deepcopy(fake_metadata)
def fake_detail(self, context, **kwargs):
image = fake_show(None, context, None)
image['name'] = kwargs.get('filters', {}).get('name')
return [image]
def fake_update(meh, context, image_id, metadata, data=None):
self.assertEqual(metadata['properties']['kernel_id'],
fake_metadata['properties']['kernel_id'])
self.assertEqual(metadata['properties']['ramdisk_id'],
fake_metadata['properties']['ramdisk_id'])
self.assertTrue(metadata['is_public'])
image = copy.deepcopy(fake_metadata)
image.update(metadata)
return image
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
self.stubs.Set(fake._FakeImageService, 'update', fake_update)
result = modify_image_attribute(self.context, 'ami-00000001',
'launchPermission', 'add',
user_group=['all'])
self.assertTrue(result['is_public'])
def test_register_image(self):
register_image = self.cloud.register_image
def fake_create(*args, **kwargs):
# NOTE(vish): We are mocking s3 so make sure we have converted
# to ids instead of uuids.
return {'id': 1,
'name': 'fake_name',
'container_format': 'ami',
'properties': {'kernel_id': 1,
'ramdisk_id': 1,
'type': 'machine'
},
'is_public': False
}
self.stubs.Set(s3.S3ImageService, 'create', fake_create)
image_location = 'fake_bucket/fake.img.manifest.xml'
result = register_image(self.context, image_location)
self.assertEqual(result['imageId'], 'ami-00000001')
def test_register_image_empty(self):
register_image = self.cloud.register_image
self.assertRaises(exception.MissingParameter, register_image,
self.context, image_location=None)
def test_register_image_name(self):
register_image = self.cloud.register_image
def fake_create(_self, context, metadata, data=None):
self.assertEqual(metadata['name'], self.expected_name)
metadata['id'] = 1
metadata['container_format'] = 'ami'
metadata['is_public'] = False
return metadata
self.stubs.Set(s3.S3ImageService, 'create', fake_create)
self.expected_name = 'fake_bucket/fake.img.manifest.xml'
register_image(self.context,
image_location=self.expected_name,
name=None)
self.expected_name = 'an image name'
register_image(self.context,
image_location='some_location',
name=self.expected_name)
def test_format_image(self):
image = {
'id': 1,
'container_format': 'ami',
'name': 'name',
'owner': 'someone',
'properties': {
'image_location': 'location',
'kernel_id': 1,
'ramdisk_id': 1,
'type': 'machine'},
'is_public': False}
expected = {'name': 'name',
'imageOwnerId': 'someone',
'isPublic': False,
'imageId': 'ami-00000001',
'imageState': None,
'rootDeviceType': 'instance-store',
'architecture': None,
'imageLocation': 'location',
'kernelId': 'aki-00000001',
'ramdiskId': 'ari-00000001',
'rootDeviceName': '/dev/sda1',
'imageType': 'machine',
'description': None}
result = self.cloud._format_image(image)
self.assertThat(result, matchers.DictMatches(expected))
image['properties']['image_location'] = None
expected['imageLocation'] = 'None (name)'
result = self.cloud._format_image(image)
self.assertThat(result, matchers.DictMatches(expected))
image['name'] = None
image['properties']['image_location'] = 'location'
expected['imageLocation'] = 'location'
expected['name'] = 'location'
result = self.cloud._format_image(image)
self.assertThat(result, matchers.DictMatches(expected))
def test_deregister_image(self):
deregister_image = self.cloud.deregister_image
def fake_delete(self, context, id):
return None
self.stubs.Set(fake._FakeImageService, 'delete', fake_delete)
# valid image
result = deregister_image(self.context, 'ami-00000001')
self.assertTrue(result)
# invalid image
self.stubs.UnsetAll()
def fake_detail_empty(self, context, **kwargs):
return []
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail_empty)
self.assertRaises(exception.ImageNotFound, deregister_image,
self.context, 'ami-bad001')
def test_deregister_image_wrong_container_type(self):
deregister_image = self.cloud.deregister_image
def fake_delete(self, context, id):
return None
self.stubs.Set(fake._FakeImageService, 'delete', fake_delete)
self.assertRaises(exception.NotFound, deregister_image, self.context,
'aki-00000001')
def _run_instance(self, **kwargs):
rv = self.cloud.run_instances(self.context, **kwargs)
instance_id = rv['instancesSet'][0]['instanceId']
return instance_id
def test_get_password_data(self):
instance_id = self._run_instance(
image_id='ami-1',
instance_type=CONF.default_flavor,
max_count=1)
self.stubs.Set(password, 'extract_password', lambda i: '<PASSWORD>')
output = self.cloud.get_password_data(context=self.context,
instance_id=[instance_id])
self.assertEqual(output['passwordData'], '<PASSWORD>')
self.cloud.terminate_instances(self.context, [instance_id])
def test_console_output(self):
instance_id = self._run_instance(
image_id='ami-1',
instance_type=CONF.default_flavor,
max_count=1)
output = self.cloud.get_console_output(context=self.context,
instance_id=[instance_id])
self.assertEqual(base64.b64decode(output['output']),
'FAKE CONSOLE OUTPUT\nANOTHER\nLAST LINE')
# TODO(soren): We need this until we can stop polling in the rpc code
# for unit tests.
self.cloud.terminate_instances(self.context, [instance_id])
def test_key_generation(self):
result, private_key = self._create_key('test')
expected = db.key_pair_get(self.context,
self.context.user_id,
'test')['public_key']
(fd, fname) = tempfile.mkstemp()
os.write(fd, private_key)
public_key, err = utils.execute('ssh-keygen', '-e', '-f', fname)
os.unlink(fname)
# assert key fields are equal
self.assertEqual(''.join(public_key.split("\n")[2:-2]),
expected.split(" ")[1].strip())
def test_describe_key_pairs(self):
self._create_key('test1')
self._create_key('test2')
result = self.cloud.describe_key_pairs(self.context)
keys = result["keySet"]
self.assertTrue(filter(lambda k: k['keyName'] == 'test1', keys))
self.assertTrue(filter(lambda k: k['keyName'] == 'test2', keys))
def test_describe_bad_key_pairs(self):
self.assertRaises(exception.KeypairNotFound,
self.cloud.describe_key_pairs, self.context,
key_name=['DoesNotExist'])
def test_import_key_pair(self):
pubkey_path = os.path.join(os.path.dirname(__file__), 'public_key')
with open(pubkey_path + '/dummy.pub') as f:
dummypub = f.readline().rstrip()
with open(pubkey_path + '/dummy.fingerprint') as f:
dummyfprint = f.readline().rstrip()
key_name = 'testimportkey'
public_key_material = base64.b64encode(dummypub)
result = self.cloud.import_key_pair(self.context,
key_name,
public_key_material)
self.assertEqual(result['keyName'], key_name)
self.assertEqual(result['keyFingerprint'], dummyfprint)
keydata = db.key_pair_get(self.context,
self.context.user_id,
key_name)
self.assertEqual(dummypub, keydata['public_key'])
self.assertEqual(dummyfprint, keydata['fingerprint'])
def test_import_key_pair_quota_limit(self):
self.flags(quota_key_pairs=0)
pubkey_path = os.path.join(os.path.dirname(__file__), 'public_key')
f = open(pubkey_path + '/dummy.pub', 'r')
dummypub = f.readline().rstrip()
f.close
f = open(pubkey_path + '/dummy.fingerprint', 'r')
f.readline().rstrip()
f.close
key_name = 'testimportkey'
public_key_material = base64.b64encode(dummypub)
self.assertRaises(exception.KeypairLimitExceeded,
self.cloud.import_key_pair, self.context, key_name,
public_key_material)
def test_create_key_pair(self):
good_names = ('a', 'a' * 255, string.ascii_letters + ' -_')
bad_names = ('', 'a' * 256, '*', '/')
for key_name in good_names:
result = self.cloud.create_key_pair(self.context,
key_name)
self.assertEqual(result['keyName'], key_name)
for key_name in bad_names:
self.assertRaises(exception.InvalidKeypair,
self.cloud.create_key_pair,
self.context,
key_name)
def test_create_key_pair_quota_limit(self):
self.flags(quota_key_pairs=10)
for i in range(0, 10):
key_name = 'key_%i' % i
result = self.cloud.create_key_pair(self.context,
key_name)
self.assertEqual(result['keyName'], key_name)
# 11'th group should fail
self.assertRaises(exception.KeypairLimitExceeded,
self.cloud.create_key_pair,
self.context,
'foo')
def test_delete_key_pair(self):
self._create_key('test')
self.cloud.delete_key_pair(self.context, 'test')
def test_run_instances(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'status': 'active'}
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
def dumb(*args, **kwargs):
pass
self.stubs.Set(compute_utils, 'notify_about_instance_usage', dumb)
self.useFixture(cast_as_call.CastAsCall(self.stubs))
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['imageId'], 'ami-00000001')
self.assertEqual(instance['instanceId'], 'i-00000001')
self.assertEqual(instance['instanceState']['name'], 'running')
self.assertEqual(instance['instanceType'], 'm1.small')
def test_run_instances_invalid_maxcount(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 0}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InvalidInput, run_instances,
self.context, **kwargs)
def test_run_instances_invalid_mincount(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'min_count': 0}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InvalidInput, run_instances,
self.context, **kwargs)
def test_run_instances_invalid_count(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1,
'min_count': 2}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InvalidInput, run_instances,
self.context, **kwargs)
def test_run_instances_availability_zone(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1,
'placement': {'availability_zone': 'fake'},
}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'status': 'active'}
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.useFixture(cast_as_call.CastAsCall(self.stubs))
def fake_format(*args, **kwargs):
pass
self.stubs.Set(self.cloud, '_format_run_instances', fake_format)
def fake_create(*args, **kwargs):
self.assertEqual(kwargs['availability_zone'], 'fake')
return ({'id': 'fake-instance'}, 'fake-res-id')
self.stubs.Set(self.cloud.compute_api, 'create', fake_create)
# NOTE(vish) the assert for this call is in the fake_create method.
run_instances(self.context, **kwargs)
def test_empty_reservation_id_from_token(self):
client_token = '<PASSWORD>'
def fake_get_all_system_metadata(context, search_filts):
reference = [{'key': ['EC2_client_token']},
{'value': ['client-token-1']}]
self.assertEqual(search_filts, reference)
return []
self.stubs.Set(self.cloud.compute_api, 'get_all_system_metadata',
fake_get_all_system_metadata)
resv_id = self.cloud._resv_id_from_token(self.context, client_token)
self.assertIsNone(resv_id)
def test_run_instances_idempotent(self):
# Ensure subsequent run_instances calls with same client token
# are idempotent and that ones with different client_token are not
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'status': 'active'}
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
def dumb(*args, **kwargs):
pass
self.stubs.Set(compute_utils, 'notify_about_instance_usage', dumb)
self.useFixture(cast_as_call.CastAsCall(self.stubs))
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000001')
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000002')
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000002')
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000001')
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000003')
# make sure terminated instances lose their client tokens
self.cloud.stop_instances(self.context,
instance_id=[instance['instanceId']])
self.cloud.terminate_instances(self.context,
instance_id=[instance['instanceId']])
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000004')
def test_run_instances_image_state_none(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show_no_state(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'}, 'container_format': 'ami'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show_no_state)
self.assertRaises(exception.ImageNotActive, run_instances,
self.context, **kwargs)
def test_run_instances_image_state_invalid(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show_decrypt(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine', 'image_state': 'decrypting'}}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show_decrypt)
self.assertRaises(exception.ImageNotActive, run_instances,
self.context, **kwargs)
def test_run_instances_image_status_active(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show_stat_active(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
def fake_id_to_glance_id(context, id):
return 'cedef40a-ed67-4d10-800e-17455edce175'
self.stubs.Set(fake._FakeImageService, 'show', fake_show_stat_active)
self.stubs.Set(ec2utils, 'id_to_glance_id', fake_id_to_glance_id)
result = run_instances(self.context, **kwargs)
self.assertEqual(len(result['instancesSet']), 1)
def _restart_compute_service(self, periodic_interval_max=None):
"""restart compute service. NOTE: fake driver forgets all instances."""
self.compute.kill()
if periodic_interval_max:
self.compute = self.start_service(
'compute', periodic_interval_max=periodic_interval_max)
else:
self.compute = self.start_service('compute')
def test_stop_start_instance(self):
# Makes sure stop/start instance works.
# enforce periodic tasks run in short time to avoid wait for 60s.
self._restart_compute_service(periodic_interval_max=0.3)
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
# a running instance can't be started.
self.assertRaises(exception.InstanceInvalidState,
self.cloud.start_instances,
self.context, [instance_id])
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 80,
'name': 'stopped'}}]}
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 16,
'name': 'running'}}]}
result = self.cloud.start_instances(self.context, [instance_id])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 80,
'name': 'stopped'}}]}
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
def test_start_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertTrue(result)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 16,
'name': 'running'}}]}
result = self.cloud.start_instances(self.context, [instance_id])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_start_instances_policy_failed(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
rules = {
"compute:start":
common_policy.parse_rule("project_id:non_fake"),
}
policy.set_rules(rules)
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.start_instances,
self.context, [instance_id])
self.assertIn("compute:start", exc.format_message())
self._restart_compute_service()
def test_stop_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 80,
'name': 'stopped'}}]}
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_stop_instances_policy_failed(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
rules = {
"compute:stop":
common_policy.parse_rule("project_id:non_fake")
}
policy.set_rules(rules)
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.stop_instances,
self.context, [instance_id])
self.assertIn("compute:stop", exc.format_message())
self._restart_compute_service()
def test_terminate_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
# a running instance can't be started.
self.assertRaises(exception.InstanceInvalidState,
self.cloud.start_instances,
self.context, [instance_id])
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_terminate_instances_invalid_instance_id(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
self._run_instance(**kwargs)
self.assertRaises(exception.InstanceNotFound,
self.cloud.terminate_instances,
self.context, ['i-2'])
self._restart_compute_service()
def test_terminate_instances_disable_terminate(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
internal_uuid = db.get_instance_uuid_by_ec2_id(self.context,
ec2utils.ec2_id_to_id(instance_id))
db.instance_update(self.context, internal_uuid,
{'disable_terminate': True})
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 16,
'name': 'running'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
db.instance_update(self.context, internal_uuid,
{'disable_terminate': False})
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_terminate_instances_two_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
inst1 = self._run_instance(**kwargs)
inst2 = self._run_instance(**kwargs)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 80,
'name': 'stopped'}}]}
result = self.cloud.stop_instances(self.context, [inst1])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 48,
'name': 'terminated'}},
{'instanceId': 'i-00000002',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [inst1, inst2])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_reboot_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
# a running instance can't be started.
self.assertRaises(exception.InstanceInvalidState,
self.cloud.start_instances,
self.context, [instance_id])
result = self.cloud.reboot_instances(self.context, [instance_id])
self.assertTrue(result)
def _volume_create(self, volume_id=None):
kwargs = {'name': 'test-volume',
'description': 'test volume description',
'status': 'available',
'host': 'fake',
'size': 1,
'attach_status': 'detached'}
if volume_id:
kwargs['volume_id'] = volume_id
return self.volume_api.create_with_kwargs(self.context, **kwargs)
def _snapshot_create(self, snapshot_id=None):
kwargs = {'volume_id': 'ccec42a2-c220-4806-b762-6b12fbb592e4',
'status': "available",
'volume_size': 1}
if snapshot_id:
kwargs['snap_id'] = snapshot_id
return self.volume_api.create_snapshot_with_kwargs(self.context,
**kwargs)
def _create_snapshot(self, ec2_volume_id):
result = self.cloud.create_snapshot(self.context,
volume_id=ec2_volume_id)
return result['snapshotId']
def _do_test_create_image(self, no_reboot):
"""Make sure that CreateImage works."""
# enforce periodic tasks run in short time to avoid wait for 60s.
self._restart_compute_service(periodic_interval_max=0.3)
(volumes, snapshots) = self._setUpImageSet(
create_volumes_and_snapshots=True)
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1}
ec2_instance_id = self._run_instance(**kwargs)
def fake_show(meh, context, id, **kwargs):
bdm = [dict(snapshot_id=snapshots[0],
volume_size=1,
device_name='sda1',
delete_on_termination=False)]
props = dict(kernel_id='cedef40a-ed67-4d10-800e-17455edce175',
ramdisk_id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
root_device_name='/dev/sda1',
block_device_mapping=bdm)
return dict(id=id,
properties=props,
container_format='ami',
status='active',
is_public=True)
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': volumes[0],
'snapshot_id': snapshots[0],
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'sda1',
'boot_index': 0,
'delete_on_termination': False,
'connection_info': '{"foo":"bar"}',
'no_device': None})]
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
virt_driver = {}
def fake_power_on(self, context, instance, network_info,
block_device_info):
virt_driver['powered_on'] = True
self.stubs.Set(fake_virt.FakeDriver, 'power_on', fake_power_on)
def fake_power_off(self, instance):
virt_driver['powered_off'] = True
self.stubs.Set(fake_virt.FakeDriver, 'power_off', fake_power_off)
result = self.cloud.create_image(self.context, ec2_instance_id,
no_reboot=no_reboot)
ec2_ids = [result['imageId']]
created_image = self.cloud.describe_images(self.context,
ec2_ids)['imagesSet'][0]
self.assertIn('blockDeviceMapping', created_image)
bdm = created_image['blockDeviceMapping'][0]
self.assertEqual(bdm.get('deviceName'), 'sda1')
self.assertIn('ebs', bdm)
self.assertEqual(bdm['ebs'].get('snapshotId'),
ec2utils.id_to_ec2_snap_id(snapshots[0]))
self.assertEqual(created_image.get('kernelId'), 'aki-00000001')
self.assertEqual(created_image.get('ramdiskId'), 'ari-00000002')
self.assertEqual(created_image.get('rootDeviceType'), 'ebs')
self.assertNotEqual(virt_driver.get('powered_on'), no_reboot)
self.assertNotEqual(virt_driver.get('powered_off'), no_reboot)
self.cloud.terminate_instances(self.context, [ec2_instance_id])
self._restart_compute_service()
def test_create_image_no_reboot(self):
# Make sure that CreateImage works.
self._do_test_create_image(True)
def test_create_image_with_reboot(self):
# Make sure that CreateImage works.
self._do_test_create_image(False)
def test_create_image_instance_store(self):
"""Ensure CreateImage fails as expected for an instance-store-backed
instance
"""
# enforce periodic tasks run in short time to avoid wait for 60s.
self._restart_compute_service(periodic_interval_max=0.3)
(volumes, snapshots) = self._setUpImageSet(
create_volumes_and_snapshots=True)
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1}
ec2_instance_id = self._run_instance(**kwargs)
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': volumes[0],
'snapshot_id': snapshots[0],
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'vda',
'delete_on_termination': False,
'no_device': None})]
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
self.assertRaises(exception.InvalidParameterValue,
self.cloud.create_image,
self.context,
ec2_instance_id,
no_reboot=True)
@staticmethod
def _fake_bdm_get(ctxt, id, use_slave=False):
blockdms = [{'volume_id': 87654321,
'source_type': 'volume',
'destination_type': 'volume',
'snapshot_id': None,
'no_device': None,
'delete_on_termination': True,
'device_name': '/dev/sdh'},
{'volume_id': None,
'snapshot_id': 98765432,
'source_type': 'snapshot',
'destination_type': 'volume',
'no_device': None,
'delete_on_termination': True,
'device_name': '/dev/sdi'},
{'volume_id': None,
'snapshot_id': None,
'no_device': True,
'source_type': 'blank',
'destination_type': None,
'delete_on_termination': None,
'device_name': None},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': None,
'delete_on_termination': None,
'device_name': '/dev/sdb'},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': 'swap',
'delete_on_termination': None,
'device_name': '/dev/sdc'},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': None,
'delete_on_termination': None,
'device_name': '/dev/sdd'},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': None,
'delete_on_termination': None,
'device_name': '/dev/sd3'},
]
extra = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'id': 0,
'device_type': None,
'disk_bus': None,
'instance_uuid': '',
'image_id': None,
'volume_size': None,
'connection_info': None,
'boot_index': None,
'guest_format': None,
}
for bdm in blockdms:
bdm.update(extra)
return blockdms
def test_describe_instance_attribute(self):
# Make sure that describe_instance_attribute works.
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
self._fake_bdm_get)
def fake_get(ctxt, instance_id, want_objects=False):
self.assertTrue(want_objects)
inst_type = flavors.get_default_flavor()
inst_type['name'] = 'fake_type'
sys_meta = flavors.save_flavor_info({}, inst_type)
secgroups = objects.SecurityGroupList()
secgroups.objects.append(
objects.SecurityGroup(name='fake0'))
secgroups.objects.append(
objects.SecurityGroup(name='fake1'))
instance = objects.Instance(ctxt)
instance.id = 0
instance.uuid = 'e5fe5518-0288-4fa3-b0c4-c79764101b85'
instance.root_device_name = '/dev/sdh'
instance.security_groups = secgroups
instance.vm_state = vm_states.STOPPED
instance.kernel_id = 'cedef40a-ed67-4d10-800e-17455edce175'
instance.ramdisk_id = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
instance.user_data = 'fake-user data'
instance.shutdown_terminate = False
instance.disable_terminate = False
instance.system_metadata = sys_meta
return instance
self.stubs.Set(self.cloud.compute_api, 'get', fake_get)
def fake_ec2_instance_get_by_id(ctxt, int_id):
if int_id == 305419896:
fake_map = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'id': 305419896,
'uuid': 'e5fe5518-0288-4fa3-b0c4-c79764101b85',
}
return fake_map
raise exception.InstanceNotFound(instance_id=int_id)
self.stubs.Set(db, 'ec2_instance_get_by_id',
fake_ec2_instance_get_by_id)
get_attribute = functools.partial(
self.cloud.describe_instance_attribute,
self.context, 'i-12345678')
bdm = get_attribute('blockDeviceMapping')
bdm['blockDeviceMapping'].sort()
expected_bdm = {'instance_id': 'i-12345678',
'rootDeviceType': 'ebs',
'blockDeviceMapping': [
{'deviceName': '/dev/sdh',
'ebs': {'status': 'attached',
'deleteOnTermination': True,
'volumeId': 'vol-05397fb1',
'attachTime': '13:56:24'}}]}
expected_bdm['blockDeviceMapping'].sort()
self.assertEqual(bdm, expected_bdm)
groupSet = get_attribute('groupSet')
groupSet['groupSet'].sort()
expected_groupSet = {'instance_id': 'i-12345678',
'groupSet': [{'groupId': 'fake0'},
{'groupId': 'fake1'}]}
expected_groupSet['groupSet'].sort()
self.assertEqual(groupSet, expected_groupSet)
self.assertEqual(get_attribute('instanceInitiatedShutdownBehavior'),
{'instance_id': 'i-12345678',
'instanceInitiatedShutdownBehavior': 'stop'})
self.assertEqual(get_attribute('disableApiTermination'),
{'instance_id': 'i-12345678',
'disableApiTermination': False})
self.assertEqual(get_attribute('instanceType'),
{'instance_id': 'i-12345678',
'instanceType': 'fake_type'})
self.assertEqual(get_attribute('kernel'),
{'instance_id': 'i-12345678',
'kernel': 'aki-00000001'})
self.assertEqual(get_attribute('ramdisk'),
{'instance_id': 'i-12345678',
'ramdisk': 'ari-00000002'})
self.assertEqual(get_attribute('rootDeviceName'),
{'instance_id': 'i-12345678',
'rootDeviceName': '/dev/sdh'})
# NOTE(yamahata): this isn't supported
# get_attribute('sourceDestCheck')
self.assertEqual(get_attribute('userData'),
{'instance_id': 'i-12345678',
'userData': '}\xa9\x1e\xba\xc7\xabu\xabZ'})
def test_instance_initiated_shutdown_behavior(self):
def test_dia_iisb(expected_result, **kwargs):
"""test describe_instance_attribute
attribute instance_initiated_shutdown_behavior
"""
kwargs.update({'instance_type': CONF.default_flavor,
'max_count': 1})
instance_id = self._run_instance(**kwargs)
result = self.cloud.describe_instance_attribute(self.context,
instance_id, 'instanceInitiatedShutdownBehavior')
self.assertEqual(result['instanceInitiatedShutdownBehavior'],
expected_result)
expected = {'instancesSet': [
{'instanceId': instance_id,
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context,
[instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
test_dia_iisb('stop', image_id='ami-1')
block_device_mapping = [{'device_name': '/dev/vdb',
'virtual_name': 'ephemeral0'}]
test_dia_iisb('stop', image_id='ami-2',
block_device_mapping=block_device_mapping)
def fake_show(self, context, id_, **kwargs):
LOG.debug("id_ %s", id_)
prop = {}
if id_ == 'ami-3':
pass
elif id_ == 'ami-4':
prop = {'mappings': [{'device': 'sdb0',
'virtual': 'ephemeral0'}]}
elif id_ == 'ami-5':
prop = {'block_device_mapping':
[{'device_name': '/dev/sdb0',
'virtual_name': 'ephemeral0'}]}
elif id_ == 'ami-6':
prop = {'mappings': [{'device': 'sdb0',
'virtual': 'ephemeral0'}],
'block_device_mapping':
[{'device_name': '/dev/sdb0',
'virtual_name': 'ephemeral0'}]}
prop_base = {'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'}
prop_base.update(prop)
return {
'id': id_,
'name': 'fake_name',
'properties': prop_base,
'container_format': 'ami',
'status': 'active'}
# NOTE(yamahata): create ami-3 ... ami-7
# ami-1 and ami-2 is already created by setUp()
for i in range(3, 8):
db.s3_image_create(self.context, 'ami-%d' % i)
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
test_dia_iisb('stop', image_id='ami-3')
test_dia_iisb('stop', image_id='ami-4')
test_dia_iisb('stop', image_id='ami-5')
test_dia_iisb('stop', image_id='ami-6')
test_dia_iisb('terminate', image_id='ami-7',
instance_initiated_shutdown_behavior='terminate')
def test_create_delete_tags(self):
# We need to stub network calls
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
# We need to stub out the MQ call - it won't succeed. We do want
# to check that the method is called, though
meta_changes = [None]
def fake_change_instance_metadata(inst, ctxt, diff, instance=None,
instance_uuid=None):
meta_changes[0] = diff
self.stubs.Set(compute_rpcapi.ComputeAPI, 'change_instance_metadata',
fake_change_instance_metadata)
# Create a test image
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
inst1_kwargs = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1111',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 1)
}
inst1 = db.instance_create(self.context, inst1_kwargs)
ec2_id = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
# Create some tags
md = {'key': 'foo', 'value': 'bar'}
md_result = {'foo': 'bar'}
self.cloud.create_tags(self.context, resource_id=[ec2_id],
tag=[md])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, md_result)
self.assertEqual(meta_changes, [{'foo': ['+', 'bar']}])
# Delete them
self.cloud.delete_tags(self.context, resource_id=[ec2_id],
tag=[{'key': 'foo', 'value': 'bar'}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, {})
self.assertEqual(meta_changes, [{'foo': ['-']}])
def test_describe_tags(self):
# We need to stub network calls
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
# We need to stub out the MQ call - it won't succeed. We do want
# to check that the method is called, though
meta_changes = [None]
def fake_change_instance_metadata(inst, ctxt, diff, instance=None,
instance_uuid=None):
meta_changes[0] = diff
self.stubs.Set(compute_rpcapi.ComputeAPI, 'change_instance_metadata',
fake_change_instance_metadata)
# Create some test images
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
inst1_kwargs = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1111',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 1)
}
inst2_kwargs = {
'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1112',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 2)
}
inst1 = db.instance_create(self.context, inst1_kwargs)
ec2_id1 = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
inst2 = db.instance_create(self.context, inst2_kwargs)
ec2_id2 = ec2utils.id_to_ec2_inst_id(inst2['uuid'])
# Create some tags
# We get one overlapping pair, and each has a different key value pair
# inst1 : {'foo': 'bar', 'bax': 'wibble'}
# inst1 : {'foo': 'bar', 'baz': 'quux'}
md = {'key': 'foo', 'value': 'bar'}
md_result = {'foo': 'bar'}
self.cloud.create_tags(self.context, resource_id=[ec2_id1, ec2_id2],
tag=[md])
self.assertEqual(meta_changes, [{'foo': ['+', 'bar']}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, md_result)
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst2)
self.assertEqual(metadata, md_result)
md2 = {'key': 'baz', 'value': 'quux'}
md2_result = {'baz': 'quux'}
md2_result.update(md_result)
self.cloud.create_tags(self.context, resource_id=[ec2_id2],
tag=[md2])
self.assertEqual(meta_changes, [{'baz': ['+', 'quux']}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst2)
self.assertEqual(metadata, md2_result)
md3 = {'key': 'bax', 'value': 'wibble'}
md3_result = {'bax': 'wibble'}
md3_result.update(md_result)
self.cloud.create_tags(self.context, resource_id=[ec2_id1],
tag=[md3])
self.assertEqual(meta_changes, [{'bax': ['+', 'wibble']}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, md3_result)
inst1_key_foo = {'key': u'foo', 'resource_id': 'i-00000001',
'resource_type': 'instance', 'value': u'bar'}
inst1_key_bax = {'key': u'bax', 'resource_id': 'i-00000001',
'resource_type': 'instance', 'value': u'wibble'}
inst2_key_foo = {'key': u'foo', 'resource_id': 'i-00000002',
'resource_type': 'instance', 'value': u'bar'}
inst2_key_baz = {'key': u'baz', 'resource_id': 'i-00000002',
'resource_type': 'instance', 'value': u'quux'}
# We should be able to search by:
# No filter
tags = self.cloud.describe_tags(self.context)['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo,
inst2_key_baz, inst1_key_bax])
# Resource ID
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'resource-id',
'value': [ec2_id1]}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst1_key_bax])
# Resource Type
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'resource-type',
'value': ['instance']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo,
inst2_key_baz, inst1_key_bax])
# Key, either bare or with wildcards
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['foo']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz']}])['tagSet']
self.assertEqualSorted(tags, [inst2_key_baz])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['ba?']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_bax, inst2_key_baz])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['b*']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_bax, inst2_key_baz])
# Value, either bare or with wildcards
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'value',
'value': ['bar']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'value',
'value': ['wi*']}])['tagSet']
self.assertEqual(tags, [inst1_key_bax])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'value',
'value': ['quu?']}])['tagSet']
self.assertEqual(tags, [inst2_key_baz])
# Multiple values
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz', 'bax']}])['tagSet']
self.assertEqualSorted(tags, [inst2_key_baz, inst1_key_bax])
# Multiple filters (AND): no match
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz']},
{'name': 'value',
'value': ['wibble']}])['tagSet']
self.assertEqual(tags, [])
# Multiple filters (AND): match
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz']},
{'name': 'value',
'value': ['quux']}])['tagSet']
self.assertEqualSorted(tags, [inst2_key_baz])
# And we should fail on supported resource types
self.assertRaises(exception.InvalidParameterValue,
self.cloud.describe_tags,
self.context,
filter=[{'name': 'resource-type',
'value': ['instance', 'volume']}])
def test_resource_type_from_id(self):
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'i-12345'),
'instance')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'r-12345'),
'reservation')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'vol-12345'),
'volume')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'snap-12345'),
'snapshot')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'ami-12345'),
'image')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'ari-12345'),
'image')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'aki-12345'),
'image')
self.assertIsNone(
ec2utils.resource_type_from_id(self.context, 'x-12345'))
@mock.patch.object(ec2utils, 'ec2_vol_id_to_uuid',
side_effect=lambda
ec2_volume_id: uuidutils.generate_uuid())
def test_detach_volume_unattched_error(self, mock_ec2_vol_id_to_uuid):
# Validates that VolumeUnattached is raised if the volume doesn't
# have an instance_uuid value.
ec2_volume_id = 'vol-987654321'
with mock.patch.object(self.cloud.volume_api, 'get',
side_effect=lambda context, volume_id:
{'id': volume_id}) as mock_get:
self.assertRaises(exception.VolumeUnattached,
self.cloud.detach_volume,
self.context,
ec2_volume_id)
mock_get.assert_called_once_with(self.context, mock.ANY)
mock_ec2_vol_id_to_uuid.assert_called_once_with(ec2_volume_id)
class CloudTestCaseNeutronProxy(test.NoDBTestCase):
def setUp(self):
super(CloudTestCaseNeutronProxy, self).setUp()
cfg.CONF.set_override('security_group_api', 'neutron')
self.cloud = cloud.CloudController()
self.original_client = neutronv2.get_client
neutronv2.get_client = test_neutron.get_client
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
def tearDown(self):
neutronv2.get_client = self.original_client
test_neutron.get_client()._reset()
super(CloudTestCaseNeutronProxy, self).tearDown()
def test_describe_security_groups(self):
# Makes sure describe_security_groups works and filters results.
group_name = 'test'
description = 'test'
self.cloud.create_security_group(self.context, group_name,
description)
result = self.cloud.describe_security_groups(self.context)
# NOTE(vish): should have the default group as well
self.assertEqual(len(result['securityGroupInfo']), 2)
result = self.cloud.describe_security_groups(self.context,
group_name=[group_name])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(result['securityGroupInfo'][0]['groupName'],
group_name)
self.cloud.delete_security_group(self.context, group_name)
def test_describe_security_groups_by_id(self):
group_name = 'test'
description = 'test'
self.cloud.create_security_group(self.context, group_name,
description)
neutron = test_neutron.get_client()
# Get id from neutron since cloud.create_security_group
# does not expose it.
search_opts = {'name': group_name}
groups = neutron.list_security_groups(
**search_opts)['security_groups']
result = self.cloud.describe_security_groups(self.context,
group_id=[groups[0]['id']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
group_name)
self.cloud.delete_security_group(self.context, group_name)
def test_create_delete_security_group(self):
descript = 'test description'
create = self.cloud.create_security_group
result = create(self.context, 'testgrp', descript)
group_descript = result['securityGroupSet'][0]['groupDescription']
self.assertEqual(descript, group_descript)
delete = self.cloud.delete_security_group
self.assertTrue(delete(self.context, 'testgrp'))
| # Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import copy
import datetime
import functools
import os
import string
import tempfile
import fixtures
import iso8601
import mock
from oslo.config import cfg
from nova.api.ec2 import cloud
from nova.api.ec2 import ec2utils
from nova.api.ec2 import inst_state
from nova.api.metadata import password
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import power_state
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova import context
from nova import db
from nova import exception
from nova.image import s3
from nova.network import api as network_api
from nova.network import base_api as base_network_api
from nova.network import model
from nova.network import neutronv2
from nova import objects
from nova.objects import base as obj_base
from nova.openstack.common import log as logging
from nova.openstack.common import policy as common_policy
from nova.openstack.common import timeutils
from nova.openstack.common import uuidutils
from nova import policy
from nova import test
from nova.tests.api.openstack.compute.contrib import (
test_neutron_security_groups as test_neutron)
from nova.tests import cast_as_call
from nova.tests import fake_block_device
from nova.tests import fake_network
from nova.tests import fake_notifier
from nova.tests import fake_utils
from nova.tests.image import fake
from nova.tests import matchers
from nova import utils
from nova.virt import fake as fake_virt
from nova import volume
CONF = cfg.CONF
CONF.import_opt('compute_driver', 'nova.virt.driver')
CONF.import_opt('default_flavor', 'nova.compute.flavors')
CONF.import_opt('use_ipv6', 'nova.netconf')
LOG = logging.getLogger(__name__)
HOST = "testhost"
def get_fake_cache(get_floating):
def _ip(ip, fixed=True, floats=None):
ip_dict = {'address': ip, 'type': 'fixed'}
if not fixed:
ip_dict['type'] = 'floating'
if fixed and floats:
ip_dict['floating_ips'] = [_ip(f, fixed=False) for f in floats]
return ip_dict
if get_floating:
ip_info = [_ip('192.168.0.3',
floats=['1.2.3.4', '5.6.7.8']),
_ip('192.168.0.4')]
else:
ip_info = [_ip('192.168.0.3'),
_ip('192.168.0.4')]
info = [{'address': 'aa:bb:cc:dd:ee:ff',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': ip_info}]}}]
if CONF.use_ipv6:
ipv6_addr = 'fe80:b33f::a8bb:ccff:fedd:eeff'
info[0]['network']['subnets'].append({'cidr': 'fe80:b33f::/64',
'ips': [_ip(ipv6_addr)]})
return model.NetworkInfo.hydrate(info)
def get_instances_with_cached_ips(orig_func, get_floating,
*args, **kwargs):
"""Kludge the cache into instance(s) without having to create DB
entries
"""
instances = orig_func(*args, **kwargs)
if kwargs.get('want_objects', False):
info_cache = objects.InstanceInfoCache()
info_cache.network_info = get_fake_cache(get_floating)
info_cache.obj_reset_changes()
else:
info_cache = {'network_info': get_fake_cache(get_floating)}
if isinstance(instances, (list, obj_base.ObjectListBase)):
for instance in instances:
instance['info_cache'] = info_cache
else:
instances['info_cache'] = info_cache
return instances
class CloudTestCase(test.TestCase):
def setUp(self):
super(CloudTestCase, self).setUp()
self.useFixture(test.SampleNetworks())
ec2utils.reset_cache()
self.flags(compute_driver='nova.virt.fake.FakeDriver',
volume_api_class='nova.tests.fake_volume.API')
self.useFixture(fixtures.FakeLogger('boto'))
fake_utils.stub_out_utils_spawn_n(self.stubs)
def fake_show(meh, context, id, **kwargs):
return {'id': id,
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine',
'image_state': 'available'}}
def fake_detail(_self, context, **kwargs):
image = fake_show(None, context, None)
image['name'] = kwargs.get('filters', {}).get('name')
return [image]
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
fake.stub_out_image_service(self.stubs)
def dumb(*args, **kwargs):
pass
self.stubs.Set(compute_utils, 'notify_about_instance_usage', dumb)
fake_network.set_stub_network_methods(self.stubs)
# set up our cloud
self.cloud = cloud.CloudController()
self.flags(scheduler_driver='nova.scheduler.chance.ChanceScheduler')
# Short-circuit the conductor service
self.flags(use_local=True, group='conductor')
# Stub out the notification service so we use the no-op serializer
# and avoid lazy-load traces with the wrap_exception decorator in
# the compute service.
fake_notifier.stub_notifier(self.stubs)
self.addCleanup(fake_notifier.reset)
# set up services
self.conductor = self.start_service('conductor',
manager=CONF.conductor.manager)
self.compute = self.start_service('compute')
self.scheduler = self.start_service('scheduler')
self.network = self.start_service('network')
self.consoleauth = self.start_service('consoleauth')
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
self.volume_api = volume.API()
self.useFixture(cast_as_call.CastAsCall(self.stubs))
# make sure we can map ami-00000001/2 to a uuid in FakeImageService
db.s3_image_create(self.context,
'cedef40a-ed67-4d10-800e-17455edce175')
db.s3_image_create(self.context,
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6')
def tearDown(self):
self.volume_api.reset_fake_api(self.context)
super(CloudTestCase, self).tearDown()
fake.FakeImageService_reset()
def fake_get_target(obj, iqn):
return 1
def fake_remove_iscsi_target(obj, tid, lun, vol_id, **kwargs):
pass
def _stub_instance_get_with_fixed_ips(self,
func_name, get_floating=True):
orig_func = getattr(self.cloud.compute_api, func_name)
def fake_get(*args, **kwargs):
return get_instances_with_cached_ips(orig_func, get_floating,
*args, **kwargs)
self.stubs.Set(self.cloud.compute_api, func_name, fake_get)
def _create_key(self, name):
# NOTE(vish): create depends on pool, so just call helper directly
keypair_api = compute_api.KeypairAPI()
return keypair_api.create_key_pair(self.context, self.context.user_id,
name)
def test_describe_regions(self):
# Makes sure describe regions runs without raising an exception.
result = self.cloud.describe_regions(self.context)
self.assertEqual(len(result['regionInfo']), 1)
self.flags(region_list=["one=test_host1", "two=test_host2"])
result = self.cloud.describe_regions(self.context)
self.assertEqual(len(result['regionInfo']), 2)
def test_describe_addresses(self):
# Makes sure describe addresses runs without raising an exception.
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.flags(network_api_class='nova.network.api.API')
self.cloud.allocate_address(self.context)
self.cloud.describe_addresses(self.context)
self.cloud.release_address(self.context,
public_ip=address)
db.floating_ip_destroy(self.context, address)
def test_describe_addresses_in_neutron(self):
# Makes sure describe addresses runs without raising an exception.
address = "10.10.10.10"
self.flags(network_api_class='nova.network.neutronv2.api.API')
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
self.cloud.describe_addresses(self.context)
self.cloud.release_address(self.context,
public_ip=address)
db.floating_ip_destroy(self.context, address)
def test_describe_specific_address(self):
# Makes sure describe specific address works.
addresses = ["10.10.10.10", "10.10.10.11"]
for address in addresses:
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
result = self.cloud.describe_addresses(self.context)
self.assertEqual(len(result['addressesSet']), 2)
result = self.cloud.describe_addresses(self.context,
public_ip=['10.10.10.10'])
self.assertEqual(len(result['addressesSet']), 1)
for address in addresses:
self.cloud.release_address(self.context,
public_ip=address)
db.floating_ip_destroy(self.context, address)
def test_allocate_address(self):
address = "10.10.10.10"
allocate = self.cloud.allocate_address
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.assertEqual(allocate(self.context)['publicIp'], address)
db.floating_ip_destroy(self.context, address)
self.assertRaises(exception.NoMoreFloatingIps,
allocate,
self.context)
def test_release_address(self):
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova',
'project_id': self.project_id})
result = self.cloud.release_address(self.context, address)
self.assertEqual(result.get('return', None), 'true')
def test_associate_disassociate_address(self):
# Verifies associate runs cleanly without raising an exception.
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
# TODO(jkoelker) Probably need to query for instance_type_id and
# make sure we get a valid one
inst = db.instance_create(self.context, {'host': self.compute.host,
'display_name': HOST,
'instance_type_id': 1})
networks = db.network_get_all(self.context)
for network in networks:
db.network_update(self.context, network['id'],
{'host': self.network.host})
project_id = self.context.project_id
nw_info = self.network.allocate_for_instance(self.context,
instance_id=inst['id'],
instance_uuid=inst['uuid'],
host=inst['host'],
vpn=None,
rxtx_factor=3,
project_id=project_id,
macs=None)
fixed_ips = nw_info.fixed_ips()
ec2_id = ec2utils.id_to_ec2_inst_id(inst['uuid'])
self.stubs.Set(ec2utils, 'get_ip_info_for_instance',
lambda *args: {'fixed_ips': ['10.0.0.1'],
'fixed_ip6s': [],
'floating_ips': []})
self.stubs.Set(network_api.API, 'get_instance_id_by_floating_address',
lambda *args: 1)
def fake_update_instance_cache_with_nw_info(api, context, instance,
nw_info=None,
update_cells=True):
return
self.stubs.Set(base_network_api, "update_instance_cache_with_nw_info",
fake_update_instance_cache_with_nw_info)
self.cloud.associate_address(self.context,
instance_id=ec2_id,
public_ip=address)
self.cloud.disassociate_address(self.context,
public_ip=address)
self.cloud.release_address(self.context,
public_ip=address)
self.network.deallocate_fixed_ip(self.context, fixed_ips[0]['address'],
inst['host'])
db.instance_destroy(self.context, inst['uuid'])
db.floating_ip_destroy(self.context, address)
def test_disassociate_auto_assigned_address(self):
"""Verifies disassociating auto assigned floating IP
raises an exception
"""
address = "10.10.10.10"
def fake_get(*args, **kwargs):
pass
def fake_disassociate_floating_ip(*args, **kwargs):
raise exception.CannotDisassociateAutoAssignedFloatingIP()
self.stubs.Set(network_api.API, 'get_instance_id_by_floating_address',
lambda *args: 1)
self.stubs.Set(self.cloud.compute_api, 'get', fake_get)
self.stubs.Set(network_api.API, 'disassociate_floating_ip',
fake_disassociate_floating_ip)
self.assertRaises(exception.CannotDisassociateAutoAssignedFloatingIP,
self.cloud.disassociate_address,
self.context, public_ip=address)
def test_disassociate_unassociated_address(self):
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
self.cloud.describe_addresses(self.context)
result = self.cloud.disassociate_address(self.context,
public_ip=address)
self.assertEqual(result['return'], 'true')
db.floating_ip_destroy(self.context, address)
def test_describe_security_groups(self):
# Makes sure describe_security_groups works and filters results.
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
result = self.cloud.describe_security_groups(self.context)
# NOTE(vish): should have the default group as well
self.assertEqual(len(result['securityGroupInfo']), 2)
result = self.cloud.describe_security_groups(self.context,
group_name=[sec['name']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
sec['name'])
db.security_group_destroy(self.context, sec['id'])
def test_describe_security_groups_all_tenants(self):
# Makes sure describe_security_groups works and filters results.
sec = db.security_group_create(self.context,
{'project_id': 'foobar',
'name': 'test'})
def _check_name(result, i, expected):
self.assertEqual(result['securityGroupInfo'][i]['groupName'],
expected)
# include all tenants
filter = [{'name': 'all-tenants', 'value': {'1': 1}}]
result = self.cloud.describe_security_groups(self.context,
filter=filter)
self.assertEqual(len(result['securityGroupInfo']), 2)
_check_name(result, 0, 'default')
_check_name(result, 1, sec['name'])
# exclude all tenants
filter = [{'name': 'all-tenants', 'value': {'1': 0}}]
result = self.cloud.describe_security_groups(self.context,
filter=filter)
self.assertEqual(len(result['securityGroupInfo']), 1)
_check_name(result, 0, 'default')
# default all tenants
result = self.cloud.describe_security_groups(self.context)
self.assertEqual(len(result['securityGroupInfo']), 1)
_check_name(result, 0, 'default')
db.security_group_destroy(self.context, sec['id'])
def test_describe_security_groups_by_id(self):
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
result = self.cloud.describe_security_groups(self.context,
group_id=[sec['id']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
sec['name'])
default = db.security_group_get_by_name(self.context,
self.context.project_id,
'default')
result = self.cloud.describe_security_groups(self.context,
group_id=[default['id']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
'default')
db.security_group_destroy(self.context, sec['id'])
def test_create_delete_security_group(self):
descript = 'test description'
create = self.cloud.create_security_group
result = create(self.context, 'testgrp', descript)
group_descript = result['securityGroupSet'][0]['groupDescription']
self.assertEqual(descript, group_descript)
delete = self.cloud.delete_security_group
self.assertTrue(delete(self.context, 'testgrp'))
def test_security_group_quota_limit(self):
self.flags(quota_security_groups=10)
for i in range(1, CONF.quota_security_groups):
name = 'test name %i' % i
descript = 'test description %i' % i
create = self.cloud.create_security_group
create(self.context, name, descript)
# 11'th group should fail
self.assertRaises(exception.SecurityGroupLimitExceeded,
create, self.context, 'foo', 'bar')
def test_delete_security_group_by_id(self):
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
delete = self.cloud.delete_security_group
self.assertTrue(delete(self.context, group_id=sec['id']))
def test_delete_security_group_with_bad_name(self):
delete = self.cloud.delete_security_group
notfound = exception.SecurityGroupNotFound
self.assertRaises(notfound, delete, self.context, 'badname')
def test_delete_security_group_with_bad_group_id(self):
delete = self.cloud.delete_security_group
notfound = exception.SecurityGroupNotFound
self.assertRaises(notfound, delete, self.context, group_id=999)
def test_delete_security_group_no_params(self):
delete = self.cloud.delete_security_group
self.assertRaises(exception.MissingParameter, delete, self.context)
def test_delete_security_group_policy_not_allowed(self):
rules = {'compute_extension:security_groups':
common_policy.parse_rule('project_id:%(project_id)s')}
policy.set_rules(rules)
with mock.patch.object(self.cloud.security_group_api,
'get') as get:
get.return_value = {'project_id': 'invalid'}
self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.delete_security_group, self.context,
'fake-name', 'fake-id')
def test_authorize_security_group_ingress_policy_not_allowed(self):
rules = {'compute_extension:security_groups':
common_policy.parse_rule('project_id:%(project_id)s')}
policy.set_rules(rules)
with mock.patch.object(self.cloud.security_group_api,
'get') as get:
get.return_value = {'project_id': 'invalid'}
self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.authorize_security_group_ingress, self.context,
'fake-name', 'fake-id')
def test_authorize_security_group_ingress(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
self.assertTrue(authz(self.context, group_name=sec['name'], **kwargs))
def test_authorize_security_group_ingress_ip_permissions_ip_ranges(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [{'to_port': 81, 'from_port': 81,
'ip_ranges':
{'1': {'cidr_ip': u'0.0.0.0/0'},
'2': {'cidr_ip': u'10.10.10.10/32'}},
'ip_protocol': u'tcp'}]}
self.assertTrue(authz(self.context, group_name=sec['name'], **kwargs))
def test_authorize_security_group_fail_missing_source_group(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [{'to_port': 81, 'from_port': 81,
'ip_ranges': {'1': {'cidr_ip': u'0.0.0.0/0'},
'2': {'cidr_ip': u'10.10.10.10/32'}},
'groups': {'1': {'user_id': u'someuser',
'group_name': u'somegroup1'}},
'ip_protocol': u'tcp'}]}
self.assertRaises(exception.SecurityGroupNotFound, authz,
self.context, group_name=sec['name'], **kwargs)
def test_authorize_security_group_ingress_ip_permissions_groups(self):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'name': 'test'
}
sec = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'somegroup1'})
sec = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'othergroup2'})
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [{'to_port': 81, 'from_port': 81,
'groups': {'1': {'user_id': u'someuser',
'group_name': u'somegroup1'},
'2': {'user_id': u'someuser',
'group_name': u'othergroup2'}},
'ip_protocol': u'tcp'}]}
self.assertTrue(authz(self.context, group_name=sec['name'], **kwargs))
def test_describe_security_group_ingress_groups(self):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'name': 'test'
}
sec1 = db.security_group_create(self.context, kwargs)
sec2 = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'somegroup1'})
sec3 = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'othergroup2'})
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [
{'groups': {'1': {'user_id': u'someuser',
'group_name': u'somegroup1'}}},
{'ip_protocol': 'tcp',
'from_port': 80,
'to_port': 80,
'groups': {'1': {'user_id': u'someuser',
'group_name': u'othergroup2'}}}]}
self.assertTrue(authz(self.context, group_name=sec1['name'], **kwargs))
describe = self.cloud.describe_security_groups
groups = describe(self.context, group_name=['test'])
self.assertEqual(len(groups['securityGroupInfo']), 1)
actual_rules = groups['securityGroupInfo'][0]['ipPermissions']
self.assertEqual(len(actual_rules), 4)
expected_rules = [{'fromPort': -1,
'groups': [{'groupName': 'somegroup1',
'userId': 'someuser'}],
'ipProtocol': 'icmp',
'ipRanges': [],
'toPort': -1},
{'fromPort': 1,
'groups': [{'groupName': u'somegroup1',
'userId': u'someuser'}],
'ipProtocol': 'tcp',
'ipRanges': [],
'toPort': 65535},
{'fromPort': 1,
'groups': [{'groupName': u'somegroup1',
'userId': u'someuser'}],
'ipProtocol': 'udp',
'ipRanges': [],
'toPort': 65535},
{'fromPort': 80,
'groups': [{'groupName': u'othergroup2',
'userId': u'someuser'}],
'ipProtocol': u'tcp',
'ipRanges': [],
'toPort': 80}]
for rule in expected_rules:
self.assertIn(rule, actual_rules)
db.security_group_destroy(self.context, sec3['id'])
db.security_group_destroy(self.context, sec2['id'])
db.security_group_destroy(self.context, sec1['id'])
def test_revoke_security_group_ingress_policy_not_allowed(self):
rules = {'compute_extension:security_groups':
common_policy.parse_rule('project_id:%(project_id)s')}
policy.set_rules(rules)
with mock.patch.object(self.cloud.security_group_api,
'get') as get:
get.return_value = {'project_id': 'invalid'}
self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.revoke_security_group_ingress, self.context,
'fake-name', 'fake-id')
def test_revoke_security_group_ingress(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
authz(self.context, group_id=sec['id'], **kwargs)
revoke = self.cloud.revoke_security_group_ingress
self.assertTrue(revoke(self.context, group_name=sec['name'], **kwargs))
def test_authorize_revoke_security_group_ingress_by_id(self):
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
authz(self.context, group_id=sec['id'], **kwargs)
revoke = self.cloud.revoke_security_group_ingress
self.assertTrue(revoke(self.context, group_id=sec['id'], **kwargs))
def test_authorize_security_group_ingress_missing_protocol_params(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
self.assertRaises(exception.MissingParameter, authz, self.context,
'test')
def test_authorize_security_group_ingress_missing_group_name_or_id(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
authz = self.cloud.authorize_security_group_ingress
self.assertRaises(exception.MissingParameter, authz, self.context,
**kwargs)
def test_authorize_security_group_ingress_already_exists(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
authz(self.context, group_name=sec['name'], **kwargs)
self.assertRaises(exception.SecurityGroupRuleExists, authz,
self.context, group_name=sec['name'], **kwargs)
def test_security_group_ingress_quota_limit(self):
self.flags(quota_security_group_rules=20)
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec_group = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
for i in range(100, 120):
kwargs = {'to_port': i, 'from_port': i, 'ip_protocol': 'tcp'}
authz(self.context, group_id=sec_group['id'], **kwargs)
kwargs = {'to_port': 121, 'from_port': 121, 'ip_protocol': 'tcp'}
self.assertRaises(exception.SecurityGroupLimitExceeded, authz,
self.context, group_id=sec_group['id'], **kwargs)
def _test_authorize_security_group_no_ports_with_source_group(self, proto):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'description': '',
'name': 'test'
}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
auth_kwargs = {'ip_protocol': proto,
'groups': {'1': {'user_id': self.context.user_id,
'group_name': u'test'}}}
self.assertTrue(authz(self.context, group_name=sec['name'],
**auth_kwargs))
describe = self.cloud.describe_security_groups
groups = describe(self.context, group_name=['test'])
self.assertEqual(len(groups['securityGroupInfo']), 1)
actual_rules = groups['securityGroupInfo'][0]['ipPermissions']
expected_rules = [{'groups': [{'groupName': 'test',
'userId': self.context.user_id}],
'ipProtocol': proto,
'ipRanges': []}]
if proto == 'icmp':
expected_rules[0]['fromPort'] = -1
expected_rules[0]['toPort'] = -1
else:
expected_rules[0]['fromPort'] = 1
expected_rules[0]['toPort'] = 65535
self.assertTrue(expected_rules == actual_rules)
describe = self.cloud.describe_security_groups
groups = describe(self.context, group_name=['test'])
db.security_group_destroy(self.context, sec['id'])
def _test_authorize_security_group_no_ports_no_source_group(self, proto):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'description': '',
'name': 'test'
}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
auth_kwargs = {'ip_protocol': proto}
self.assertRaises(exception.MissingParameter, authz, self.context,
group_name=sec['name'], **auth_kwargs)
db.security_group_destroy(self.context, sec['id'])
def test_authorize_security_group_no_ports_icmp(self):
self._test_authorize_security_group_no_ports_with_source_group('icmp')
self._test_authorize_security_group_no_ports_no_source_group('icmp')
def test_authorize_security_group_no_ports_tcp(self):
self._test_authorize_security_group_no_ports_with_source_group('tcp')
self._test_authorize_security_group_no_ports_no_source_group('tcp')
def test_authorize_security_group_no_ports_udp(self):
self._test_authorize_security_group_no_ports_with_source_group('udp')
self._test_authorize_security_group_no_ports_no_source_group('udp')
def test_revoke_security_group_ingress_missing_group_name_or_id(self):
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
revoke = self.cloud.revoke_security_group_ingress
self.assertRaises(exception.MissingParameter, revoke,
self.context, **kwargs)
def test_delete_security_group_in_use_by_group(self):
self.cloud.create_security_group(self.context, 'testgrp1',
"test group 1")
self.cloud.create_security_group(self.context, 'testgrp2',
"test group 2")
kwargs = {'groups': {'1': {'user_id': u'%s' % self.context.user_id,
'group_name': u'testgrp2'}},
}
self.cloud.authorize_security_group_ingress(self.context,
group_name='testgrp1', **kwargs)
group1 = db.security_group_get_by_name(self.context,
self.project_id, 'testgrp1')
get_rules = db.security_group_rule_get_by_security_group
self.assertTrue(get_rules(self.context, group1['id']))
self.cloud.delete_security_group(self.context, 'testgrp2')
self.assertFalse(get_rules(self.context, group1['id']))
def test_delete_security_group_in_use_by_instance(self):
# Ensure that a group can not be deleted if in use by an instance.
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
args = {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active'}
inst = db.instance_create(self.context, args)
args = {'user_id': self.context.user_id,
'project_id': self.context.project_id,
'name': 'testgrp',
'description': 'Test group'}
group = db.security_group_create(self.context, args)
db.instance_add_security_group(self.context, inst['uuid'], group['id'])
self.assertRaises(exception.InvalidGroup,
self.cloud.delete_security_group,
self.context, 'testgrp')
db.instance_destroy(self.context, inst['uuid'])
self.cloud.delete_security_group(self.context, 'testgrp')
def test_describe_availability_zones(self):
# Makes sure describe_availability_zones works and filters results.
service1 = db.service_create(self.context, {'host': 'host1_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
service2 = db.service_create(self.context, {'host': 'host2_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
# Aggregate based zones
agg = db.aggregate_create(self.context,
{'name': 'agg1'}, {'availability_zone': 'zone1'})
db.aggregate_host_add(self.context, agg['id'], 'host1_zones')
agg = db.aggregate_create(self.context,
{'name': 'agg2'}, {'availability_zone': 'zone2'})
db.aggregate_host_add(self.context, agg['id'], 'host2_zones')
result = self.cloud.describe_availability_zones(self.context)
self.assertEqual(len(result['availabilityZoneInfo']), 3)
admin_ctxt = context.get_admin_context(read_deleted="no")
result = self.cloud.describe_availability_zones(admin_ctxt,
zone_name='verbose')
self.assertEqual(len(result['availabilityZoneInfo']), 18)
db.service_destroy(self.context, service1['id'])
db.service_destroy(self.context, service2['id'])
def test_describe_availability_zones_verbose(self):
# Makes sure describe_availability_zones works and filters results.
service1 = db.service_create(self.context, {'host': 'host1_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
service2 = db.service_create(self.context, {'host': 'host2_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
agg = db.aggregate_create(self.context,
{'name': 'agg1'}, {'availability_zone': 'second_zone'})
db.aggregate_host_add(self.context, agg['id'], 'host2_zones')
admin_ctxt = context.get_admin_context(read_deleted="no")
result = self.cloud.describe_availability_zones(admin_ctxt,
zone_name='verbose')
self.assertEqual(len(result['availabilityZoneInfo']), 17)
db.service_destroy(self.context, service1['id'])
db.service_destroy(self.context, service2['id'])
def assertEqualSorted(self, x, y):
self.assertEqual(sorted(x), sorted(y))
def test_describe_instances(self):
# Makes sure describe_instances works and filters results.
self.flags(use_ipv6=True)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
sys_meta['EC2_client_token'] = "<PASSWORD>"
inst1 = db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'hostname': 'server-1234',
'vm_state': 'active',
'system_metadata': sys_meta})
sys_meta['EC2_client_token'] = "<PASSWORD>"
inst2 = db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host2',
'hostname': 'server-4321',
'vm_state': 'active',
'system_metadata': sys_meta})
comp1 = db.service_create(self.context, {'host': 'host1',
'topic': "compute"})
agg = db.aggregate_create(self.context,
{'name': 'agg1'}, {'availability_zone': 'zone1'})
db.aggregate_host_add(self.context, agg['id'], 'host1')
comp2 = db.service_create(self.context, {'host': 'host2',
'topic': "compute"})
agg2 = db.aggregate_create(self.context,
{'name': 'agg2'}, {'availability_zone': 'zone2'})
db.aggregate_host_add(self.context, agg2['id'], 'host2')
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
self.assertEqual(len(result['instancesSet']), 2)
# Now try filtering.
instance_id = ec2utils.id_to_ec2_inst_id(inst2['uuid'])
result = self.cloud.describe_instances(self.context,
instance_id=[instance_id])
result = result['reservationSet'][0]
self.assertEqual(len(result['instancesSet']), 1)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], instance_id)
self.assertEqual(instance['placement']['availabilityZone'], 'zone2')
self.assertEqual(instance['ipAddress'], '172.16.58.3')
self.assertEqual(instance['dnsName'], '172.16.58.3')
self.assertEqual(instance['tagSet'], [])
self.assertEqual(instance['privateDnsName'], 'server-4321')
self.assertEqual(instance['privateIpAddress'], '192.168.0.3')
self.assertEqual(instance['dnsNameV6'],
'fe80:b33f::a8bb:ccff:fedd:eeff')
self.assertEqual(instance['clientToken'], '<PASSWORD>')
# A filter with even one invalid id should cause an exception to be
# raised
self.assertRaises(exception.InstanceNotFound,
self.cloud.describe_instances, self.context,
instance_id=[instance_id, '435679'])
db.instance_destroy(self.context, inst1['uuid'])
db.instance_destroy(self.context, inst2['uuid'])
db.service_destroy(self.context, comp1['id'])
db.service_destroy(self.context, comp2['id'])
def test_describe_instances_all_invalid(self):
# Makes sure describe_instances works and filters results.
self.flags(use_ipv6=True)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
instance_id = ec2utils.id_to_ec2_inst_id('435679')
self.assertRaises(exception.InstanceNotFound,
self.cloud.describe_instances, self.context,
instance_id=[instance_id])
def test_describe_instances_with_filters(self):
# Makes sure describe_instances works and filters results.
filters = {'filter': [{'name': 'test',
'value': ['a', 'b']},
{'name': 'another_test',
'value': 'a string'}]}
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': []})
def test_describe_instances_with_filters_tags(self):
# Makes sure describe_instances works and filters tag results.
# We need to stub network calls
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
# We need to stub out the MQ call - it won't succeed. We do want
# to check that the method is called, though
meta_changes = [None]
def fake_change_instance_metadata(inst, ctxt, diff, instance=None,
instance_uuid=None):
meta_changes[0] = diff
self.stubs.Set(compute_rpcapi.ComputeAPI, 'change_instance_metadata',
fake_change_instance_metadata)
utc = iso8601.iso8601.Utc()
# Create some test images
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
inst1_kwargs = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1111',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 1,
tzinfo=utc),
'system_metadata': sys_meta
}
inst2_kwargs = {
'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host2',
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1112',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 2,
tzinfo=utc),
'system_metadata': sys_meta
}
inst1 = db.instance_create(self.context, inst1_kwargs)
ec2_id1 = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
inst2 = db.instance_create(self.context, inst2_kwargs)
ec2_id2 = ec2utils.id_to_ec2_inst_id(inst2['uuid'])
# Create some tags
# We get one overlapping pair, one overlapping key, and a
# disparate pair
# inst1 : {'foo': 'bar', 'baz': 'wibble', 'bax': 'wobble'}
# inst2 : {'foo': 'bar', 'baz': 'quux', 'zog': 'bobble'}
md = {'key': 'foo', 'value': 'bar'}
self.cloud.create_tags(self.context, resource_id=[ec2_id1, ec2_id2],
tag=[md])
md2 = {'key': 'baz', 'value': 'wibble'}
md3 = {'key': 'bax', 'value': 'wobble'}
self.cloud.create_tags(self.context, resource_id=[ec2_id1],
tag=[md2, md3])
md4 = {'key': 'baz', 'value': 'quux'}
md5 = {'key': 'zog', 'value': 'bobble'}
self.cloud.create_tags(self.context, resource_id=[ec2_id2],
tag=[md4, md5])
# We should be able to search by:
inst1_ret = {
'groupSet': None,
'instancesSet': [{'amiLaunchIndex': None,
'dnsName': '172.16.58.3',
'dnsNameV6': 'fe80:b33f::a8bb:ccff:fedd:eeff',
'imageId': 'ami-00000001',
'instanceId': 'i-00000001',
'instanceState': {'code': 16,
'name': 'running'},
'instanceType': u'm1.medium',
'ipAddress': '172.16.58.3',
'keyName': 'None (None, host1)',
'launchTime':
datetime.datetime(2012, 5, 1, 1, 1, 1,
tzinfo=utc),
'placement': {
'availabilityZone': 'nova'},
'privateDnsName': u'server-1111',
'privateIpAddress': '192.168.0.3',
'productCodesSet': None,
'rootDeviceName': '/dev/sda1',
'rootDeviceType': 'instance-store',
'tagSet': [{'key': u'foo',
'value': u'bar'},
{'key': u'baz',
'value': u'wibble'},
{'key': u'bax',
'value': u'wobble'}]}],
'ownerId': None,
'reservationId': u'a'}
inst2_ret = {
'groupSet': None,
'instancesSet': [{'amiLaunchIndex': None,
'dnsName': '172.16.58.3',
'dnsNameV6': 'fe80:b33f::a8bb:ccff:fedd:eeff',
'imageId': 'ami-00000001',
'instanceId': 'i-00000002',
'instanceState': {'code': 16,
'name': 'running'},
'instanceType': u'm1.medium',
'ipAddress': '172.16.58.3',
'keyName': u'None (None, host2)',
'launchTime':
datetime.datetime(2012, 5, 1, 1, 1, 2,
tzinfo=utc),
'placement': {
'availabilityZone': 'nova'},
'privateDnsName': u'server-1112',
'privateIpAddress': '192.168.0.3',
'productCodesSet': None,
'rootDeviceName': '/dev/sda1',
'rootDeviceType': 'instance-store',
'tagSet': [{'key': u'foo',
'value': u'bar'},
{'key': u'baz',
'value': u'quux'},
{'key': u'zog',
'value': u'bobble'}]}],
'ownerId': None,
'reservationId': u'b'}
# No filter
result = self.cloud.describe_instances(self.context)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Key search
# Both should have tags with key 'foo' and value 'bar'
filters = {'filter': [{'name': 'tag:foo',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Both should have tags with key 'foo'
filters = {'filter': [{'name': 'tag-key',
'value': ['foo']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Value search
# Only inst2 should have tags with key 'baz' and value 'quux'
filters = {'filter': [{'name': 'tag:baz',
'value': ['quux']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# Only inst2 should have tags with value 'quux'
filters = {'filter': [{'name': 'tag-value',
'value': ['quux']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# Multiple values
# Both should have tags with key 'baz' and values in the set
# ['quux', 'wibble']
filters = {'filter': [{'name': 'tag:baz',
'value': ['quux', 'wibble']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Both should have tags with key 'baz' or tags with value 'bar'
filters = {'filter': [{'name': 'tag-key',
'value': ['baz']},
{'name': 'tag-value',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Confirm deletion of tags
# Check for format 'tag:'
self.cloud.delete_tags(self.context, resource_id=[ec2_id1], tag=[md])
filters = {'filter': [{'name': 'tag:foo',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# Check for format 'tag-'
filters = {'filter': [{'name': 'tag-key',
'value': ['foo']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
filters = {'filter': [{'name': 'tag-value',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# destroy the test instances
db.instance_destroy(self.context, inst1['uuid'])
db.instance_destroy(self.context, inst2['uuid'])
def test_describe_instances_sorting(self):
# Makes sure describe_instances works and is sorted as expected.
self.flags(use_ipv6=True)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
inst_base = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'system_metadata': sys_meta,
}
utc = iso8601.iso8601.Utc()
inst1_kwargs = {}
inst1_kwargs.update(inst_base)
inst1_kwargs['host'] = 'host1'
inst1_kwargs['hostname'] = 'server-1111'
inst1_kwargs['created_at'] = datetime.datetime(2012, 5, 1, 1, 1, 1,
tzinfo=utc)
inst1 = db.instance_create(self.context, inst1_kwargs)
inst2_kwargs = {}
inst2_kwargs.update(inst_base)
inst2_kwargs['host'] = 'host2'
inst2_kwargs['hostname'] = 'server-2222'
inst2_kwargs['created_at'] = datetime.datetime(2012, 2, 1, 1, 1, 1,
tzinfo=utc)
inst2 = db.instance_create(self.context, inst2_kwargs)
inst3_kwargs = {}
inst3_kwargs.update(inst_base)
inst3_kwargs['host'] = 'host3'
inst3_kwargs['hostname'] = 'server-3333'
inst3_kwargs['created_at'] = datetime.datetime(2012, 2, 5, 1, 1, 1,
tzinfo=utc)
inst3 = db.instance_create(self.context, inst3_kwargs)
comp1 = db.service_create(self.context, {'host': 'host1',
'topic': "compute"})
comp2 = db.service_create(self.context, {'host': 'host2',
'topic': "compute"})
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]['instancesSet']
self.assertEqual(result[0]['launchTime'], inst2_kwargs['created_at'])
self.assertEqual(result[1]['launchTime'], inst3_kwargs['created_at'])
self.assertEqual(result[2]['launchTime'], inst1_kwargs['created_at'])
db.instance_destroy(self.context, inst1['uuid'])
db.instance_destroy(self.context, inst2['uuid'])
db.instance_destroy(self.context, inst3['uuid'])
db.service_destroy(self.context, comp1['id'])
db.service_destroy(self.context, comp2['id'])
def test_describe_instance_state(self):
# Makes sure describe_instances for instanceState works.
def test_instance_state(expected_code, expected_name,
power_state_, vm_state_, values=None):
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
values = values or {}
values.update({'image_ref': image_uuid, 'instance_type_id': 1,
'power_state': power_state_, 'vm_state': vm_state_,
'system_metadata': sys_meta})
inst = db.instance_create(self.context, values)
instance_id = ec2utils.id_to_ec2_inst_id(inst['uuid'])
result = self.cloud.describe_instances(self.context,
instance_id=[instance_id])
result = result['reservationSet'][0]
result = result['instancesSet'][0]['instanceState']
name = result['name']
code = result['code']
self.assertEqual(code, expected_code)
self.assertEqual(name, expected_name)
db.instance_destroy(self.context, inst['uuid'])
test_instance_state(inst_state.RUNNING_CODE, inst_state.RUNNING,
power_state.RUNNING, vm_states.ACTIVE)
test_instance_state(inst_state.STOPPED_CODE, inst_state.STOPPED,
power_state.NOSTATE, vm_states.STOPPED,
{'shutdown_terminate': False})
def test_describe_instances_no_ipv6(self):
# Makes sure describe_instances w/ no ipv6 works.
self.flags(use_ipv6=False)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
inst1 = db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'hostname': 'server-1234',
'vm_state': 'active',
'system_metadata': sys_meta})
comp1 = db.service_create(self.context, {'host': 'host1',
'topic': "compute"})
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
self.assertEqual(len(result['instancesSet']), 1)
instance = result['instancesSet'][0]
instance_id = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
self.assertEqual(instance['instanceId'], instance_id)
self.assertEqual(instance['ipAddress'], '172.16.58.3')
self.assertEqual(instance['dnsName'], '172.16.58.3')
self.assertEqual(instance['privateDnsName'], 'server-1234')
self.assertEqual(instance['privateIpAddress'], '192.168.0.3')
self.assertNotIn('dnsNameV6', instance)
db.instance_destroy(self.context, inst1['uuid'])
db.service_destroy(self.context, comp1['id'])
def test_describe_instances_deleted(self):
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
args1 = {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
inst1 = db.instance_create(self.context, args1)
args2 = {'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
inst2 = db.instance_create(self.context, args2)
db.instance_destroy(self.context, inst1['uuid'])
result = self.cloud.describe_instances(self.context)
self.assertEqual(len(result['reservationSet']), 1)
result1 = result['reservationSet'][0]['instancesSet']
self.assertEqual(result1[0]['instanceId'],
ec2utils.id_to_ec2_inst_id(inst2['uuid']))
def test_describe_instances_with_image_deleted(self):
image_uuid = 'aebef54a-ed67-4d10-912f-14455edce176'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
args1 = {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
db.instance_create(self.context, args1)
args2 = {'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
db.instance_create(self.context, args2)
result = self.cloud.describe_instances(self.context)
self.assertEqual(len(result['reservationSet']), 2)
def test_describe_instances_dnsName_set(self):
# Verifies dnsName doesn't get set if floating IP is set.
self._stub_instance_get_with_fixed_ips('get_all', get_floating=False)
self._stub_instance_get_with_fixed_ips('get', get_floating=False)
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'hostname': 'server-1234',
'vm_state': 'active',
'system_metadata': sys_meta})
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
instance = result['instancesSet'][0]
self.assertIsNone(instance['dnsName'])
def test_describe_instances_booting_from_a_volume(self):
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
inst = objects.Instance(self.context)
inst.reservation_id = 'a'
inst.image_ref = ''
inst.root_device_name = '/dev/sdh'
inst.instance_type_id = 1
inst.vm_state = vm_states.ACTIVE
inst.host = 'host1'
inst.system_metadata = sys_meta
inst.create()
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
instance = result['instancesSet'][0]
self.assertIsNone(instance['imageId'])
def test_describe_images(self):
describe_images = self.cloud.describe_images
def fake_detail(meh, context, **kwargs):
return [{'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'}}]
def fake_show_none(meh, context, id):
raise exception.ImageNotFound(image_id='bad_image_id')
def fake_detail_none(self, context, **kwargs):
return []
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
# list all
result1 = describe_images(self.context)
result1 = result1['imagesSet'][0]
self.assertEqual(result1['imageId'], 'ami-00000001')
# provided a valid image_id
result2 = describe_images(self.context, ['ami-00000001'])
self.assertEqual(1, len(result2['imagesSet']))
# provide more than 1 valid image_id
result3 = describe_images(self.context, ['ami-00000001',
'ami-00000002'])
self.assertEqual(2, len(result3['imagesSet']))
# provide a non-existing image_id
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show_none)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail_none)
self.assertRaises(exception.ImageNotFound, describe_images,
self.context, ['ami-fake'])
def assertDictListUnorderedMatch(self, L1, L2, key):
self.assertEqual(len(L1), len(L2))
for d1 in L1:
self.assertIn(key, d1)
for d2 in L2:
self.assertIn(key, d2)
if d1[key] == d2[key]:
self.assertThat(d1, matchers.DictMatches(d2))
def _setUpImageSet(self, create_volumes_and_snapshots=False):
self.flags(max_local_block_devices=-1)
mappings1 = [
{'device': '/dev/sda1', 'virtual': 'root'},
{'device': 'sdb0', 'virtual': 'ephemeral0'},
{'device': 'sdb1', 'virtual': 'ephemeral1'},
{'device': 'sdb2', 'virtual': 'ephemeral2'},
{'device': 'sdb3', 'virtual': 'ephemeral3'},
{'device': 'sdb4', 'virtual': 'ephemeral4'},
{'device': 'sdc0', 'virtual': 'swap'},
{'device': 'sdc1', 'virtual': 'swap'},
{'device': 'sdc2', 'virtual': 'swap'},
{'device': 'sdc3', 'virtual': 'swap'},
{'device': 'sdc4', 'virtual': 'swap'}]
block_device_mapping1 = [
{'device_name': '/dev/sdb1',
'snapshot_id': 'ccec42a2-c220-4806-b762-6b12fbb592e3'},
{'device_name': '/dev/sdb2',
'volume_id': 'ccec42a2-c220-4806-b762-6b12fbb592e4'},
{'device_name': '/dev/sdb3', 'virtual_name': 'ephemeral5'},
{'device_name': '/dev/sdb4', 'no_device': True},
{'device_name': '/dev/sdc1',
'snapshot_id': 'ccec42a2-c220-4806-b762-6b12fbb592e5'},
{'device_name': '/dev/sdc2',
'volume_id': 'ccec42a2-c220-4806-b762-6b12fbb592e6'},
{'device_name': '/dev/sdc3', 'virtual_name': 'ephemeral6'},
{'device_name': '/dev/sdc4', 'no_device': True}]
image1 = {
'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine',
'image_state': 'available',
'mappings': mappings1,
'block_device_mapping': block_device_mapping1,
}
}
mappings2 = [{'device': '/dev/sda1', 'virtual': 'root'}]
block_device_mapping2 = [{'device_name': '/dev/sdb1',
'snapshot_id': 'ccec42a2-c220-4806-b762-6b12fbb592e7',
'volume_id': None}]
image2 = {
'id': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
'name': 'fake_name',
'status': 'active',
'properties': {
'kernel_id': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
'type': 'machine',
'root_device_name': '/dev/sdb1',
'mappings': mappings2,
'block_device_mapping': block_device_mapping2}}
def fake_show(meh, context, image_id, **kwargs):
_images = [copy.deepcopy(image1), copy.deepcopy(image2)]
for i in _images:
if str(i['id']) == str(image_id):
return i
raise exception.ImageNotFound(image_id=image_id)
def fake_detail(meh, context, **kwargs):
return [copy.deepcopy(image1), copy.deepcopy(image2)]
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
volumes = []
snapshots = []
if create_volumes_and_snapshots:
for bdm in block_device_mapping1:
if 'volume_id' in bdm:
vol = self._volume_create(bdm['volume_id'])
volumes.append(vol['id'])
if 'snapshot_id' in bdm:
snap = self._snapshot_create(bdm['snapshot_id'])
snapshots.append(snap['id'])
return (volumes, snapshots)
def _assertImageSet(self, result, root_device_type, root_device_name):
self.assertEqual(1, len(result['imagesSet']))
result = result['imagesSet'][0]
self.assertIn('rootDeviceType', result)
self.assertEqual(result['rootDeviceType'], root_device_type)
self.assertIn('rootDeviceName', result)
self.assertEqual(result['rootDeviceName'], root_device_name)
self.assertIn('blockDeviceMapping', result)
return result
_expected_root_device_name1 = '/dev/sda1'
# NOTE(yamahata): noDevice doesn't make sense when returning mapping
# It makes sense only when user overriding existing
# mapping.
_expected_bdms1 = [
{'deviceName': '/dev/sdb0', 'virtualName': 'ephemeral0'},
{'deviceName': '/dev/sdb1', 'ebs': {'snapshotId':
'snap-00000001'}},
{'deviceName': '/dev/sdb2', 'ebs': {'snapshotId':
'vol-00000001'}},
{'deviceName': '/dev/sdb3', 'virtualName': 'ephemeral5'},
# {'deviceName': '/dev/sdb4', 'noDevice': True},
{'deviceName': '/dev/sdc0', 'virtualName': 'swap'},
{'deviceName': '/dev/sdc1', 'ebs': {'snapshotId':
'snap-00000002'}},
{'deviceName': '/dev/sdc2', 'ebs': {'snapshotId':
'vol-00000002'}},
{'deviceName': '/dev/sdc3', 'virtualName': 'ephemeral6'},
# {'deviceName': '/dev/sdc4', 'noDevice': True}
]
_expected_root_device_name2 = '/dev/sdb1'
_expected_bdms2 = [{'deviceName': '/dev/sdb1',
'ebs': {'snapshotId': 'snap-00000003'}}]
# NOTE(yamahata):
# InstanceBlockDeviceMappingItemType
# rootDeviceType
# rootDeviceName
# blockDeviceMapping
# deviceName
# virtualName
# ebs
# snapshotId
# volumeSize
# deleteOnTermination
# noDevice
def test_describe_image_mapping(self):
# test for rootDeviceName and blockDeviceMapping.
describe_images = self.cloud.describe_images
self._setUpImageSet()
result = describe_images(self.context, ['ami-00000001'])
result = self._assertImageSet(result, 'instance-store',
self._expected_root_device_name1)
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms1, 'deviceName')
result = describe_images(self.context, ['ami-00000002'])
result = self._assertImageSet(result, 'ebs',
self._expected_root_device_name2)
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms2, 'deviceName')
def test_describe_image_attribute(self):
describe_image_attribute = self.cloud.describe_image_attribute
def fake_show(meh, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'is_public': True}
def fake_detail(self, context, **kwargs):
image = fake_show(None, context, None)
image['name'] = kwargs.get('filters', {}).get('name')
return [image]
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
result = describe_image_attribute(self.context, 'ami-00000001',
'launchPermission')
self.assertEqual([{'group': 'all'}], result['launchPermission'])
result = describe_image_attribute(self.context, 'ami-00000001',
'kernel')
self.assertEqual('aki-00000001', result['kernel']['value'])
result = describe_image_attribute(self.context, 'ami-00000001',
'ramdisk')
self.assertEqual('ari-00000001', result['ramdisk']['value'])
def test_describe_image_attribute_root_device_name(self):
describe_image_attribute = self.cloud.describe_image_attribute
self._setUpImageSet()
result = describe_image_attribute(self.context, 'ami-00000001',
'rootDeviceName')
self.assertEqual(result['rootDeviceName'],
self._expected_root_device_name1)
result = describe_image_attribute(self.context, 'ami-00000002',
'rootDeviceName')
self.assertEqual(result['rootDeviceName'],
self._expected_root_device_name2)
def test_describe_image_attribute_block_device_mapping(self):
describe_image_attribute = self.cloud.describe_image_attribute
self._setUpImageSet()
result = describe_image_attribute(self.context, 'ami-00000001',
'blockDeviceMapping')
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms1, 'deviceName')
result = describe_image_attribute(self.context, 'ami-00000002',
'blockDeviceMapping')
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms2, 'deviceName')
def test_modify_image_attribute(self):
modify_image_attribute = self.cloud.modify_image_attribute
fake_metadata = {
'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'is_public': False}
def fake_show(meh, context, id, **kwargs):
return copy.deepcopy(fake_metadata)
def fake_detail(self, context, **kwargs):
image = fake_show(None, context, None)
image['name'] = kwargs.get('filters', {}).get('name')
return [image]
def fake_update(meh, context, image_id, metadata, data=None):
self.assertEqual(metadata['properties']['kernel_id'],
fake_metadata['properties']['kernel_id'])
self.assertEqual(metadata['properties']['ramdisk_id'],
fake_metadata['properties']['ramdisk_id'])
self.assertTrue(metadata['is_public'])
image = copy.deepcopy(fake_metadata)
image.update(metadata)
return image
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
self.stubs.Set(fake._FakeImageService, 'update', fake_update)
result = modify_image_attribute(self.context, 'ami-00000001',
'launchPermission', 'add',
user_group=['all'])
self.assertTrue(result['is_public'])
def test_register_image(self):
register_image = self.cloud.register_image
def fake_create(*args, **kwargs):
# NOTE(vish): We are mocking s3 so make sure we have converted
# to ids instead of uuids.
return {'id': 1,
'name': 'fake_name',
'container_format': 'ami',
'properties': {'kernel_id': 1,
'ramdisk_id': 1,
'type': 'machine'
},
'is_public': False
}
self.stubs.Set(s3.S3ImageService, 'create', fake_create)
image_location = 'fake_bucket/fake.img.manifest.xml'
result = register_image(self.context, image_location)
self.assertEqual(result['imageId'], 'ami-00000001')
def test_register_image_empty(self):
register_image = self.cloud.register_image
self.assertRaises(exception.MissingParameter, register_image,
self.context, image_location=None)
def test_register_image_name(self):
register_image = self.cloud.register_image
def fake_create(_self, context, metadata, data=None):
self.assertEqual(metadata['name'], self.expected_name)
metadata['id'] = 1
metadata['container_format'] = 'ami'
metadata['is_public'] = False
return metadata
self.stubs.Set(s3.S3ImageService, 'create', fake_create)
self.expected_name = 'fake_bucket/fake.img.manifest.xml'
register_image(self.context,
image_location=self.expected_name,
name=None)
self.expected_name = 'an image name'
register_image(self.context,
image_location='some_location',
name=self.expected_name)
def test_format_image(self):
image = {
'id': 1,
'container_format': 'ami',
'name': 'name',
'owner': 'someone',
'properties': {
'image_location': 'location',
'kernel_id': 1,
'ramdisk_id': 1,
'type': 'machine'},
'is_public': False}
expected = {'name': 'name',
'imageOwnerId': 'someone',
'isPublic': False,
'imageId': 'ami-00000001',
'imageState': None,
'rootDeviceType': 'instance-store',
'architecture': None,
'imageLocation': 'location',
'kernelId': 'aki-00000001',
'ramdiskId': 'ari-00000001',
'rootDeviceName': '/dev/sda1',
'imageType': 'machine',
'description': None}
result = self.cloud._format_image(image)
self.assertThat(result, matchers.DictMatches(expected))
image['properties']['image_location'] = None
expected['imageLocation'] = 'None (name)'
result = self.cloud._format_image(image)
self.assertThat(result, matchers.DictMatches(expected))
image['name'] = None
image['properties']['image_location'] = 'location'
expected['imageLocation'] = 'location'
expected['name'] = 'location'
result = self.cloud._format_image(image)
self.assertThat(result, matchers.DictMatches(expected))
def test_deregister_image(self):
deregister_image = self.cloud.deregister_image
def fake_delete(self, context, id):
return None
self.stubs.Set(fake._FakeImageService, 'delete', fake_delete)
# valid image
result = deregister_image(self.context, 'ami-00000001')
self.assertTrue(result)
# invalid image
self.stubs.UnsetAll()
def fake_detail_empty(self, context, **kwargs):
return []
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail_empty)
self.assertRaises(exception.ImageNotFound, deregister_image,
self.context, 'ami-bad001')
def test_deregister_image_wrong_container_type(self):
deregister_image = self.cloud.deregister_image
def fake_delete(self, context, id):
return None
self.stubs.Set(fake._FakeImageService, 'delete', fake_delete)
self.assertRaises(exception.NotFound, deregister_image, self.context,
'aki-00000001')
def _run_instance(self, **kwargs):
rv = self.cloud.run_instances(self.context, **kwargs)
instance_id = rv['instancesSet'][0]['instanceId']
return instance_id
def test_get_password_data(self):
instance_id = self._run_instance(
image_id='ami-1',
instance_type=CONF.default_flavor,
max_count=1)
self.stubs.Set(password, 'extract_password', lambda i: '<PASSWORD>')
output = self.cloud.get_password_data(context=self.context,
instance_id=[instance_id])
self.assertEqual(output['passwordData'], '<PASSWORD>')
self.cloud.terminate_instances(self.context, [instance_id])
def test_console_output(self):
instance_id = self._run_instance(
image_id='ami-1',
instance_type=CONF.default_flavor,
max_count=1)
output = self.cloud.get_console_output(context=self.context,
instance_id=[instance_id])
self.assertEqual(base64.b64decode(output['output']),
'FAKE CONSOLE OUTPUT\nANOTHER\nLAST LINE')
# TODO(soren): We need this until we can stop polling in the rpc code
# for unit tests.
self.cloud.terminate_instances(self.context, [instance_id])
def test_key_generation(self):
result, private_key = self._create_key('test')
expected = db.key_pair_get(self.context,
self.context.user_id,
'test')['public_key']
(fd, fname) = tempfile.mkstemp()
os.write(fd, private_key)
public_key, err = utils.execute('ssh-keygen', '-e', '-f', fname)
os.unlink(fname)
# assert key fields are equal
self.assertEqual(''.join(public_key.split("\n")[2:-2]),
expected.split(" ")[1].strip())
def test_describe_key_pairs(self):
self._create_key('test1')
self._create_key('test2')
result = self.cloud.describe_key_pairs(self.context)
keys = result["keySet"]
self.assertTrue(filter(lambda k: k['keyName'] == 'test1', keys))
self.assertTrue(filter(lambda k: k['keyName'] == 'test2', keys))
def test_describe_bad_key_pairs(self):
self.assertRaises(exception.KeypairNotFound,
self.cloud.describe_key_pairs, self.context,
key_name=['DoesNotExist'])
def test_import_key_pair(self):
pubkey_path = os.path.join(os.path.dirname(__file__), 'public_key')
with open(pubkey_path + '/dummy.pub') as f:
dummypub = f.readline().rstrip()
with open(pubkey_path + '/dummy.fingerprint') as f:
dummyfprint = f.readline().rstrip()
key_name = 'testimportkey'
public_key_material = base64.b64encode(dummypub)
result = self.cloud.import_key_pair(self.context,
key_name,
public_key_material)
self.assertEqual(result['keyName'], key_name)
self.assertEqual(result['keyFingerprint'], dummyfprint)
keydata = db.key_pair_get(self.context,
self.context.user_id,
key_name)
self.assertEqual(dummypub, keydata['public_key'])
self.assertEqual(dummyfprint, keydata['fingerprint'])
def test_import_key_pair_quota_limit(self):
self.flags(quota_key_pairs=0)
pubkey_path = os.path.join(os.path.dirname(__file__), 'public_key')
f = open(pubkey_path + '/dummy.pub', 'r')
dummypub = f.readline().rstrip()
f.close
f = open(pubkey_path + '/dummy.fingerprint', 'r')
f.readline().rstrip()
f.close
key_name = 'testimportkey'
public_key_material = base64.b64encode(dummypub)
self.assertRaises(exception.KeypairLimitExceeded,
self.cloud.import_key_pair, self.context, key_name,
public_key_material)
def test_create_key_pair(self):
good_names = ('a', 'a' * 255, string.ascii_letters + ' -_')
bad_names = ('', 'a' * 256, '*', '/')
for key_name in good_names:
result = self.cloud.create_key_pair(self.context,
key_name)
self.assertEqual(result['keyName'], key_name)
for key_name in bad_names:
self.assertRaises(exception.InvalidKeypair,
self.cloud.create_key_pair,
self.context,
key_name)
def test_create_key_pair_quota_limit(self):
self.flags(quota_key_pairs=10)
for i in range(0, 10):
key_name = 'key_%i' % i
result = self.cloud.create_key_pair(self.context,
key_name)
self.assertEqual(result['keyName'], key_name)
# 11'th group should fail
self.assertRaises(exception.KeypairLimitExceeded,
self.cloud.create_key_pair,
self.context,
'foo')
def test_delete_key_pair(self):
self._create_key('test')
self.cloud.delete_key_pair(self.context, 'test')
def test_run_instances(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'status': 'active'}
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
def dumb(*args, **kwargs):
pass
self.stubs.Set(compute_utils, 'notify_about_instance_usage', dumb)
self.useFixture(cast_as_call.CastAsCall(self.stubs))
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['imageId'], 'ami-00000001')
self.assertEqual(instance['instanceId'], 'i-00000001')
self.assertEqual(instance['instanceState']['name'], 'running')
self.assertEqual(instance['instanceType'], 'm1.small')
def test_run_instances_invalid_maxcount(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 0}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InvalidInput, run_instances,
self.context, **kwargs)
def test_run_instances_invalid_mincount(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'min_count': 0}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InvalidInput, run_instances,
self.context, **kwargs)
def test_run_instances_invalid_count(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1,
'min_count': 2}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InvalidInput, run_instances,
self.context, **kwargs)
def test_run_instances_availability_zone(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1,
'placement': {'availability_zone': 'fake'},
}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'status': 'active'}
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.useFixture(cast_as_call.CastAsCall(self.stubs))
def fake_format(*args, **kwargs):
pass
self.stubs.Set(self.cloud, '_format_run_instances', fake_format)
def fake_create(*args, **kwargs):
self.assertEqual(kwargs['availability_zone'], 'fake')
return ({'id': 'fake-instance'}, 'fake-res-id')
self.stubs.Set(self.cloud.compute_api, 'create', fake_create)
# NOTE(vish) the assert for this call is in the fake_create method.
run_instances(self.context, **kwargs)
def test_empty_reservation_id_from_token(self):
client_token = '<PASSWORD>'
def fake_get_all_system_metadata(context, search_filts):
reference = [{'key': ['EC2_client_token']},
{'value': ['client-token-1']}]
self.assertEqual(search_filts, reference)
return []
self.stubs.Set(self.cloud.compute_api, 'get_all_system_metadata',
fake_get_all_system_metadata)
resv_id = self.cloud._resv_id_from_token(self.context, client_token)
self.assertIsNone(resv_id)
def test_run_instances_idempotent(self):
# Ensure subsequent run_instances calls with same client token
# are idempotent and that ones with different client_token are not
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'status': 'active'}
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
def dumb(*args, **kwargs):
pass
self.stubs.Set(compute_utils, 'notify_about_instance_usage', dumb)
self.useFixture(cast_as_call.CastAsCall(self.stubs))
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000001')
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000002')
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000002')
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000001')
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000003')
# make sure terminated instances lose their client tokens
self.cloud.stop_instances(self.context,
instance_id=[instance['instanceId']])
self.cloud.terminate_instances(self.context,
instance_id=[instance['instanceId']])
kwargs['client_token'] = '<PASSWORD>'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000004')
def test_run_instances_image_state_none(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show_no_state(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'}, 'container_format': 'ami'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show_no_state)
self.assertRaises(exception.ImageNotActive, run_instances,
self.context, **kwargs)
def test_run_instances_image_state_invalid(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show_decrypt(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine', 'image_state': 'decrypting'}}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show_decrypt)
self.assertRaises(exception.ImageNotActive, run_instances,
self.context, **kwargs)
def test_run_instances_image_status_active(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show_stat_active(self, context, id, **kwargs):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
def fake_id_to_glance_id(context, id):
return 'cedef40a-ed67-4d10-800e-17455edce175'
self.stubs.Set(fake._FakeImageService, 'show', fake_show_stat_active)
self.stubs.Set(ec2utils, 'id_to_glance_id', fake_id_to_glance_id)
result = run_instances(self.context, **kwargs)
self.assertEqual(len(result['instancesSet']), 1)
def _restart_compute_service(self, periodic_interval_max=None):
"""restart compute service. NOTE: fake driver forgets all instances."""
self.compute.kill()
if periodic_interval_max:
self.compute = self.start_service(
'compute', periodic_interval_max=periodic_interval_max)
else:
self.compute = self.start_service('compute')
def test_stop_start_instance(self):
# Makes sure stop/start instance works.
# enforce periodic tasks run in short time to avoid wait for 60s.
self._restart_compute_service(periodic_interval_max=0.3)
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
# a running instance can't be started.
self.assertRaises(exception.InstanceInvalidState,
self.cloud.start_instances,
self.context, [instance_id])
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 80,
'name': 'stopped'}}]}
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 16,
'name': 'running'}}]}
result = self.cloud.start_instances(self.context, [instance_id])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 80,
'name': 'stopped'}}]}
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
def test_start_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertTrue(result)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 16,
'name': 'running'}}]}
result = self.cloud.start_instances(self.context, [instance_id])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_start_instances_policy_failed(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
rules = {
"compute:start":
common_policy.parse_rule("project_id:non_fake"),
}
policy.set_rules(rules)
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.start_instances,
self.context, [instance_id])
self.assertIn("compute:start", exc.format_message())
self._restart_compute_service()
def test_stop_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 80,
'name': 'stopped'}}]}
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_stop_instances_policy_failed(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
rules = {
"compute:stop":
common_policy.parse_rule("project_id:non_fake")
}
policy.set_rules(rules)
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.stop_instances,
self.context, [instance_id])
self.assertIn("compute:stop", exc.format_message())
self._restart_compute_service()
def test_terminate_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
# a running instance can't be started.
self.assertRaises(exception.InstanceInvalidState,
self.cloud.start_instances,
self.context, [instance_id])
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_terminate_instances_invalid_instance_id(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
self._run_instance(**kwargs)
self.assertRaises(exception.InstanceNotFound,
self.cloud.terminate_instances,
self.context, ['i-2'])
self._restart_compute_service()
def test_terminate_instances_disable_terminate(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
internal_uuid = db.get_instance_uuid_by_ec2_id(self.context,
ec2utils.ec2_id_to_id(instance_id))
db.instance_update(self.context, internal_uuid,
{'disable_terminate': True})
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 16,
'name': 'running'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
db.instance_update(self.context, internal_uuid,
{'disable_terminate': False})
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_terminate_instances_two_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
inst1 = self._run_instance(**kwargs)
inst2 = self._run_instance(**kwargs)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 80,
'name': 'stopped'}}]}
result = self.cloud.stop_instances(self.context, [inst1])
self.assertEqual(result, expected)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 48,
'name': 'terminated'}},
{'instanceId': 'i-00000002',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [inst1, inst2])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_reboot_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
# a running instance can't be started.
self.assertRaises(exception.InstanceInvalidState,
self.cloud.start_instances,
self.context, [instance_id])
result = self.cloud.reboot_instances(self.context, [instance_id])
self.assertTrue(result)
def _volume_create(self, volume_id=None):
kwargs = {'name': 'test-volume',
'description': 'test volume description',
'status': 'available',
'host': 'fake',
'size': 1,
'attach_status': 'detached'}
if volume_id:
kwargs['volume_id'] = volume_id
return self.volume_api.create_with_kwargs(self.context, **kwargs)
def _snapshot_create(self, snapshot_id=None):
kwargs = {'volume_id': 'ccec42a2-c220-4806-b762-6b12fbb592e4',
'status': "available",
'volume_size': 1}
if snapshot_id:
kwargs['snap_id'] = snapshot_id
return self.volume_api.create_snapshot_with_kwargs(self.context,
**kwargs)
def _create_snapshot(self, ec2_volume_id):
result = self.cloud.create_snapshot(self.context,
volume_id=ec2_volume_id)
return result['snapshotId']
def _do_test_create_image(self, no_reboot):
"""Make sure that CreateImage works."""
# enforce periodic tasks run in short time to avoid wait for 60s.
self._restart_compute_service(periodic_interval_max=0.3)
(volumes, snapshots) = self._setUpImageSet(
create_volumes_and_snapshots=True)
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1}
ec2_instance_id = self._run_instance(**kwargs)
def fake_show(meh, context, id, **kwargs):
bdm = [dict(snapshot_id=snapshots[0],
volume_size=1,
device_name='sda1',
delete_on_termination=False)]
props = dict(kernel_id='cedef40a-ed67-4d10-800e-17455edce175',
ramdisk_id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
root_device_name='/dev/sda1',
block_device_mapping=bdm)
return dict(id=id,
properties=props,
container_format='ami',
status='active',
is_public=True)
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': volumes[0],
'snapshot_id': snapshots[0],
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'sda1',
'boot_index': 0,
'delete_on_termination': False,
'connection_info': '{"foo":"bar"}',
'no_device': None})]
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
virt_driver = {}
def fake_power_on(self, context, instance, network_info,
block_device_info):
virt_driver['powered_on'] = True
self.stubs.Set(fake_virt.FakeDriver, 'power_on', fake_power_on)
def fake_power_off(self, instance):
virt_driver['powered_off'] = True
self.stubs.Set(fake_virt.FakeDriver, 'power_off', fake_power_off)
result = self.cloud.create_image(self.context, ec2_instance_id,
no_reboot=no_reboot)
ec2_ids = [result['imageId']]
created_image = self.cloud.describe_images(self.context,
ec2_ids)['imagesSet'][0]
self.assertIn('blockDeviceMapping', created_image)
bdm = created_image['blockDeviceMapping'][0]
self.assertEqual(bdm.get('deviceName'), 'sda1')
self.assertIn('ebs', bdm)
self.assertEqual(bdm['ebs'].get('snapshotId'),
ec2utils.id_to_ec2_snap_id(snapshots[0]))
self.assertEqual(created_image.get('kernelId'), 'aki-00000001')
self.assertEqual(created_image.get('ramdiskId'), 'ari-00000002')
self.assertEqual(created_image.get('rootDeviceType'), 'ebs')
self.assertNotEqual(virt_driver.get('powered_on'), no_reboot)
self.assertNotEqual(virt_driver.get('powered_off'), no_reboot)
self.cloud.terminate_instances(self.context, [ec2_instance_id])
self._restart_compute_service()
def test_create_image_no_reboot(self):
# Make sure that CreateImage works.
self._do_test_create_image(True)
def test_create_image_with_reboot(self):
# Make sure that CreateImage works.
self._do_test_create_image(False)
def test_create_image_instance_store(self):
"""Ensure CreateImage fails as expected for an instance-store-backed
instance
"""
# enforce periodic tasks run in short time to avoid wait for 60s.
self._restart_compute_service(periodic_interval_max=0.3)
(volumes, snapshots) = self._setUpImageSet(
create_volumes_and_snapshots=True)
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1}
ec2_instance_id = self._run_instance(**kwargs)
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': volumes[0],
'snapshot_id': snapshots[0],
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'vda',
'delete_on_termination': False,
'no_device': None})]
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
self.assertRaises(exception.InvalidParameterValue,
self.cloud.create_image,
self.context,
ec2_instance_id,
no_reboot=True)
@staticmethod
def _fake_bdm_get(ctxt, id, use_slave=False):
blockdms = [{'volume_id': 87654321,
'source_type': 'volume',
'destination_type': 'volume',
'snapshot_id': None,
'no_device': None,
'delete_on_termination': True,
'device_name': '/dev/sdh'},
{'volume_id': None,
'snapshot_id': 98765432,
'source_type': 'snapshot',
'destination_type': 'volume',
'no_device': None,
'delete_on_termination': True,
'device_name': '/dev/sdi'},
{'volume_id': None,
'snapshot_id': None,
'no_device': True,
'source_type': 'blank',
'destination_type': None,
'delete_on_termination': None,
'device_name': None},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': None,
'delete_on_termination': None,
'device_name': '/dev/sdb'},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': 'swap',
'delete_on_termination': None,
'device_name': '/dev/sdc'},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': None,
'delete_on_termination': None,
'device_name': '/dev/sdd'},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': None,
'delete_on_termination': None,
'device_name': '/dev/sd3'},
]
extra = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'id': 0,
'device_type': None,
'disk_bus': None,
'instance_uuid': '',
'image_id': None,
'volume_size': None,
'connection_info': None,
'boot_index': None,
'guest_format': None,
}
for bdm in blockdms:
bdm.update(extra)
return blockdms
def test_describe_instance_attribute(self):
# Make sure that describe_instance_attribute works.
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
self._fake_bdm_get)
def fake_get(ctxt, instance_id, want_objects=False):
self.assertTrue(want_objects)
inst_type = flavors.get_default_flavor()
inst_type['name'] = 'fake_type'
sys_meta = flavors.save_flavor_info({}, inst_type)
secgroups = objects.SecurityGroupList()
secgroups.objects.append(
objects.SecurityGroup(name='fake0'))
secgroups.objects.append(
objects.SecurityGroup(name='fake1'))
instance = objects.Instance(ctxt)
instance.id = 0
instance.uuid = 'e5fe5518-0288-4fa3-b0c4-c79764101b85'
instance.root_device_name = '/dev/sdh'
instance.security_groups = secgroups
instance.vm_state = vm_states.STOPPED
instance.kernel_id = 'cedef40a-ed67-4d10-800e-17455edce175'
instance.ramdisk_id = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
instance.user_data = 'fake-user data'
instance.shutdown_terminate = False
instance.disable_terminate = False
instance.system_metadata = sys_meta
return instance
self.stubs.Set(self.cloud.compute_api, 'get', fake_get)
def fake_ec2_instance_get_by_id(ctxt, int_id):
if int_id == 305419896:
fake_map = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'id': 305419896,
'uuid': 'e5fe5518-0288-4fa3-b0c4-c79764101b85',
}
return fake_map
raise exception.InstanceNotFound(instance_id=int_id)
self.stubs.Set(db, 'ec2_instance_get_by_id',
fake_ec2_instance_get_by_id)
get_attribute = functools.partial(
self.cloud.describe_instance_attribute,
self.context, 'i-12345678')
bdm = get_attribute('blockDeviceMapping')
bdm['blockDeviceMapping'].sort()
expected_bdm = {'instance_id': 'i-12345678',
'rootDeviceType': 'ebs',
'blockDeviceMapping': [
{'deviceName': '/dev/sdh',
'ebs': {'status': 'attached',
'deleteOnTermination': True,
'volumeId': 'vol-05397fb1',
'attachTime': '13:56:24'}}]}
expected_bdm['blockDeviceMapping'].sort()
self.assertEqual(bdm, expected_bdm)
groupSet = get_attribute('groupSet')
groupSet['groupSet'].sort()
expected_groupSet = {'instance_id': 'i-12345678',
'groupSet': [{'groupId': 'fake0'},
{'groupId': 'fake1'}]}
expected_groupSet['groupSet'].sort()
self.assertEqual(groupSet, expected_groupSet)
self.assertEqual(get_attribute('instanceInitiatedShutdownBehavior'),
{'instance_id': 'i-12345678',
'instanceInitiatedShutdownBehavior': 'stop'})
self.assertEqual(get_attribute('disableApiTermination'),
{'instance_id': 'i-12345678',
'disableApiTermination': False})
self.assertEqual(get_attribute('instanceType'),
{'instance_id': 'i-12345678',
'instanceType': 'fake_type'})
self.assertEqual(get_attribute('kernel'),
{'instance_id': 'i-12345678',
'kernel': 'aki-00000001'})
self.assertEqual(get_attribute('ramdisk'),
{'instance_id': 'i-12345678',
'ramdisk': 'ari-00000002'})
self.assertEqual(get_attribute('rootDeviceName'),
{'instance_id': 'i-12345678',
'rootDeviceName': '/dev/sdh'})
# NOTE(yamahata): this isn't supported
# get_attribute('sourceDestCheck')
self.assertEqual(get_attribute('userData'),
{'instance_id': 'i-12345678',
'userData': '}\xa9\x1e\xba\xc7\xabu\xabZ'})
def test_instance_initiated_shutdown_behavior(self):
def test_dia_iisb(expected_result, **kwargs):
"""test describe_instance_attribute
attribute instance_initiated_shutdown_behavior
"""
kwargs.update({'instance_type': CONF.default_flavor,
'max_count': 1})
instance_id = self._run_instance(**kwargs)
result = self.cloud.describe_instance_attribute(self.context,
instance_id, 'instanceInitiatedShutdownBehavior')
self.assertEqual(result['instanceInitiatedShutdownBehavior'],
expected_result)
expected = {'instancesSet': [
{'instanceId': instance_id,
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context,
[instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
test_dia_iisb('stop', image_id='ami-1')
block_device_mapping = [{'device_name': '/dev/vdb',
'virtual_name': 'ephemeral0'}]
test_dia_iisb('stop', image_id='ami-2',
block_device_mapping=block_device_mapping)
def fake_show(self, context, id_, **kwargs):
LOG.debug("id_ %s", id_)
prop = {}
if id_ == 'ami-3':
pass
elif id_ == 'ami-4':
prop = {'mappings': [{'device': 'sdb0',
'virtual': 'ephemeral0'}]}
elif id_ == 'ami-5':
prop = {'block_device_mapping':
[{'device_name': '/dev/sdb0',
'virtual_name': 'ephemeral0'}]}
elif id_ == 'ami-6':
prop = {'mappings': [{'device': 'sdb0',
'virtual': 'ephemeral0'}],
'block_device_mapping':
[{'device_name': '/dev/sdb0',
'virtual_name': 'ephemeral0'}]}
prop_base = {'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'}
prop_base.update(prop)
return {
'id': id_,
'name': 'fake_name',
'properties': prop_base,
'container_format': 'ami',
'status': 'active'}
# NOTE(yamahata): create ami-3 ... ami-7
# ami-1 and ami-2 is already created by setUp()
for i in range(3, 8):
db.s3_image_create(self.context, 'ami-%d' % i)
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
test_dia_iisb('stop', image_id='ami-3')
test_dia_iisb('stop', image_id='ami-4')
test_dia_iisb('stop', image_id='ami-5')
test_dia_iisb('stop', image_id='ami-6')
test_dia_iisb('terminate', image_id='ami-7',
instance_initiated_shutdown_behavior='terminate')
def test_create_delete_tags(self):
# We need to stub network calls
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
# We need to stub out the MQ call - it won't succeed. We do want
# to check that the method is called, though
meta_changes = [None]
def fake_change_instance_metadata(inst, ctxt, diff, instance=None,
instance_uuid=None):
meta_changes[0] = diff
self.stubs.Set(compute_rpcapi.ComputeAPI, 'change_instance_metadata',
fake_change_instance_metadata)
# Create a test image
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
inst1_kwargs = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1111',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 1)
}
inst1 = db.instance_create(self.context, inst1_kwargs)
ec2_id = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
# Create some tags
md = {'key': 'foo', 'value': 'bar'}
md_result = {'foo': 'bar'}
self.cloud.create_tags(self.context, resource_id=[ec2_id],
tag=[md])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, md_result)
self.assertEqual(meta_changes, [{'foo': ['+', 'bar']}])
# Delete them
self.cloud.delete_tags(self.context, resource_id=[ec2_id],
tag=[{'key': 'foo', 'value': 'bar'}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, {})
self.assertEqual(meta_changes, [{'foo': ['-']}])
def test_describe_tags(self):
# We need to stub network calls
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
# We need to stub out the MQ call - it won't succeed. We do want
# to check that the method is called, though
meta_changes = [None]
def fake_change_instance_metadata(inst, ctxt, diff, instance=None,
instance_uuid=None):
meta_changes[0] = diff
self.stubs.Set(compute_rpcapi.ComputeAPI, 'change_instance_metadata',
fake_change_instance_metadata)
# Create some test images
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
inst1_kwargs = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1111',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 1)
}
inst2_kwargs = {
'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1112',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 2)
}
inst1 = db.instance_create(self.context, inst1_kwargs)
ec2_id1 = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
inst2 = db.instance_create(self.context, inst2_kwargs)
ec2_id2 = ec2utils.id_to_ec2_inst_id(inst2['uuid'])
# Create some tags
# We get one overlapping pair, and each has a different key value pair
# inst1 : {'foo': 'bar', 'bax': 'wibble'}
# inst1 : {'foo': 'bar', 'baz': 'quux'}
md = {'key': 'foo', 'value': 'bar'}
md_result = {'foo': 'bar'}
self.cloud.create_tags(self.context, resource_id=[ec2_id1, ec2_id2],
tag=[md])
self.assertEqual(meta_changes, [{'foo': ['+', 'bar']}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, md_result)
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst2)
self.assertEqual(metadata, md_result)
md2 = {'key': 'baz', 'value': 'quux'}
md2_result = {'baz': 'quux'}
md2_result.update(md_result)
self.cloud.create_tags(self.context, resource_id=[ec2_id2],
tag=[md2])
self.assertEqual(meta_changes, [{'baz': ['+', 'quux']}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst2)
self.assertEqual(metadata, md2_result)
md3 = {'key': 'bax', 'value': 'wibble'}
md3_result = {'bax': 'wibble'}
md3_result.update(md_result)
self.cloud.create_tags(self.context, resource_id=[ec2_id1],
tag=[md3])
self.assertEqual(meta_changes, [{'bax': ['+', 'wibble']}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, md3_result)
inst1_key_foo = {'key': u'foo', 'resource_id': 'i-00000001',
'resource_type': 'instance', 'value': u'bar'}
inst1_key_bax = {'key': u'bax', 'resource_id': 'i-00000001',
'resource_type': 'instance', 'value': u'wibble'}
inst2_key_foo = {'key': u'foo', 'resource_id': 'i-00000002',
'resource_type': 'instance', 'value': u'bar'}
inst2_key_baz = {'key': u'baz', 'resource_id': 'i-00000002',
'resource_type': 'instance', 'value': u'quux'}
# We should be able to search by:
# No filter
tags = self.cloud.describe_tags(self.context)['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo,
inst2_key_baz, inst1_key_bax])
# Resource ID
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'resource-id',
'value': [ec2_id1]}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst1_key_bax])
# Resource Type
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'resource-type',
'value': ['instance']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo,
inst2_key_baz, inst1_key_bax])
# Key, either bare or with wildcards
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['foo']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz']}])['tagSet']
self.assertEqualSorted(tags, [inst2_key_baz])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['ba?']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_bax, inst2_key_baz])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['b*']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_bax, inst2_key_baz])
# Value, either bare or with wildcards
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'value',
'value': ['bar']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'value',
'value': ['wi*']}])['tagSet']
self.assertEqual(tags, [inst1_key_bax])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'value',
'value': ['quu?']}])['tagSet']
self.assertEqual(tags, [inst2_key_baz])
# Multiple values
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz', 'bax']}])['tagSet']
self.assertEqualSorted(tags, [inst2_key_baz, inst1_key_bax])
# Multiple filters (AND): no match
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz']},
{'name': 'value',
'value': ['wibble']}])['tagSet']
self.assertEqual(tags, [])
# Multiple filters (AND): match
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz']},
{'name': 'value',
'value': ['quux']}])['tagSet']
self.assertEqualSorted(tags, [inst2_key_baz])
# And we should fail on supported resource types
self.assertRaises(exception.InvalidParameterValue,
self.cloud.describe_tags,
self.context,
filter=[{'name': 'resource-type',
'value': ['instance', 'volume']}])
def test_resource_type_from_id(self):
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'i-12345'),
'instance')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'r-12345'),
'reservation')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'vol-12345'),
'volume')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'snap-12345'),
'snapshot')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'ami-12345'),
'image')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'ari-12345'),
'image')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'aki-12345'),
'image')
self.assertIsNone(
ec2utils.resource_type_from_id(self.context, 'x-12345'))
@mock.patch.object(ec2utils, 'ec2_vol_id_to_uuid',
side_effect=lambda
ec2_volume_id: uuidutils.generate_uuid())
def test_detach_volume_unattched_error(self, mock_ec2_vol_id_to_uuid):
# Validates that VolumeUnattached is raised if the volume doesn't
# have an instance_uuid value.
ec2_volume_id = 'vol-987654321'
with mock.patch.object(self.cloud.volume_api, 'get',
side_effect=lambda context, volume_id:
{'id': volume_id}) as mock_get:
self.assertRaises(exception.VolumeUnattached,
self.cloud.detach_volume,
self.context,
ec2_volume_id)
mock_get.assert_called_once_with(self.context, mock.ANY)
mock_ec2_vol_id_to_uuid.assert_called_once_with(ec2_volume_id)
class CloudTestCaseNeutronProxy(test.NoDBTestCase):
def setUp(self):
super(CloudTestCaseNeutronProxy, self).setUp()
cfg.CONF.set_override('security_group_api', 'neutron')
self.cloud = cloud.CloudController()
self.original_client = neutronv2.get_client
neutronv2.get_client = test_neutron.get_client
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
def tearDown(self):
neutronv2.get_client = self.original_client
test_neutron.get_client()._reset()
super(CloudTestCaseNeutronProxy, self).tearDown()
def test_describe_security_groups(self):
# Makes sure describe_security_groups works and filters results.
group_name = 'test'
description = 'test'
self.cloud.create_security_group(self.context, group_name,
description)
result = self.cloud.describe_security_groups(self.context)
# NOTE(vish): should have the default group as well
self.assertEqual(len(result['securityGroupInfo']), 2)
result = self.cloud.describe_security_groups(self.context,
group_name=[group_name])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(result['securityGroupInfo'][0]['groupName'],
group_name)
self.cloud.delete_security_group(self.context, group_name)
def test_describe_security_groups_by_id(self):
group_name = 'test'
description = 'test'
self.cloud.create_security_group(self.context, group_name,
description)
neutron = test_neutron.get_client()
# Get id from neutron since cloud.create_security_group
# does not expose it.
search_opts = {'name': group_name}
groups = neutron.list_security_groups(
**search_opts)['security_groups']
result = self.cloud.describe_security_groups(self.context,
group_id=[groups[0]['id']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
group_name)
self.cloud.delete_security_group(self.context, group_name)
def test_create_delete_security_group(self):
descript = 'test description'
create = self.cloud.create_security_group
result = create(self.context, 'testgrp', descript)
group_descript = result['securityGroupSet'][0]['groupDescription']
self.assertEqual(descript, group_descript)
delete = self.cloud.delete_security_group
self.assertTrue(delete(self.context, 'testgrp'))
| en | 0.842501 | # Copyright (c) 2011 X.commerce, a business unit of eBay Inc. # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # Copyright 2013 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. Kludge the cache into instance(s) without having to create DB entries # set up our cloud # Short-circuit the conductor service # Stub out the notification service so we use the no-op serializer # and avoid lazy-load traces with the wrap_exception decorator in # the compute service. # set up services # make sure we can map ami-00000001/2 to a uuid in FakeImageService # NOTE(vish): create depends on pool, so just call helper directly # Makes sure describe regions runs without raising an exception. # Makes sure describe addresses runs without raising an exception. # Makes sure describe addresses runs without raising an exception. # Makes sure describe specific address works. # Verifies associate runs cleanly without raising an exception. # TODO(jkoelker) Probably need to query for instance_type_id and # make sure we get a valid one Verifies disassociating auto assigned floating IP raises an exception # Makes sure describe_security_groups works and filters results. # NOTE(vish): should have the default group as well # Makes sure describe_security_groups works and filters results. # include all tenants # exclude all tenants # default all tenants # 11'th group should fail # Ensure that a group can not be deleted if in use by an instance. # Makes sure describe_availability_zones works and filters results. # Aggregate based zones # Makes sure describe_availability_zones works and filters results. # Makes sure describe_instances works and filters results. # Now try filtering. # A filter with even one invalid id should cause an exception to be # raised # Makes sure describe_instances works and filters results. # Makes sure describe_instances works and filters results. # Makes sure describe_instances works and filters tag results. # We need to stub network calls # We need to stub out the MQ call - it won't succeed. We do want # to check that the method is called, though # Create some test images # Create some tags # We get one overlapping pair, one overlapping key, and a # disparate pair # inst1 : {'foo': 'bar', 'baz': 'wibble', 'bax': 'wobble'} # inst2 : {'foo': 'bar', 'baz': 'quux', 'zog': 'bobble'} # We should be able to search by: # No filter # Key search # Both should have tags with key 'foo' and value 'bar' # Both should have tags with key 'foo' # Value search # Only inst2 should have tags with key 'baz' and value 'quux' # Only inst2 should have tags with value 'quux' # Multiple values # Both should have tags with key 'baz' and values in the set # ['quux', 'wibble'] # Both should have tags with key 'baz' or tags with value 'bar' # Confirm deletion of tags # Check for format 'tag:' # Check for format 'tag-' # destroy the test instances # Makes sure describe_instances works and is sorted as expected. # Makes sure describe_instances for instanceState works. # Makes sure describe_instances w/ no ipv6 works. # Verifies dnsName doesn't get set if floating IP is set. # list all # provided a valid image_id # provide more than 1 valid image_id # provide a non-existing image_id # NOTE(yamahata): noDevice doesn't make sense when returning mapping # It makes sense only when user overriding existing # mapping. # {'deviceName': '/dev/sdb4', 'noDevice': True}, # {'deviceName': '/dev/sdc4', 'noDevice': True} # NOTE(yamahata): # InstanceBlockDeviceMappingItemType # rootDeviceType # rootDeviceName # blockDeviceMapping # deviceName # virtualName # ebs # snapshotId # volumeSize # deleteOnTermination # noDevice # test for rootDeviceName and blockDeviceMapping. # NOTE(vish): We are mocking s3 so make sure we have converted # to ids instead of uuids. # valid image # invalid image # TODO(soren): We need this until we can stop polling in the rpc code # for unit tests. # assert key fields are equal # 11'th group should fail # NOTE(vish) the assert for this call is in the fake_create method. # Ensure subsequent run_instances calls with same client token # are idempotent and that ones with different client_token are not # make sure terminated instances lose their client tokens restart compute service. NOTE: fake driver forgets all instances. # Makes sure stop/start instance works. # enforce periodic tasks run in short time to avoid wait for 60s. # a running instance can't be started. # a running instance can't be started. # a running instance can't be started. Make sure that CreateImage works. # enforce periodic tasks run in short time to avoid wait for 60s. # Make sure that CreateImage works. # Make sure that CreateImage works. Ensure CreateImage fails as expected for an instance-store-backed instance # enforce periodic tasks run in short time to avoid wait for 60s. # Make sure that describe_instance_attribute works. # NOTE(yamahata): this isn't supported # get_attribute('sourceDestCheck') test describe_instance_attribute attribute instance_initiated_shutdown_behavior # NOTE(yamahata): create ami-3 ... ami-7 # ami-1 and ami-2 is already created by setUp() # We need to stub network calls # We need to stub out the MQ call - it won't succeed. We do want # to check that the method is called, though # Create a test image # Create some tags # Delete them # We need to stub network calls # We need to stub out the MQ call - it won't succeed. We do want # to check that the method is called, though # Create some test images # Create some tags # We get one overlapping pair, and each has a different key value pair # inst1 : {'foo': 'bar', 'bax': 'wibble'} # inst1 : {'foo': 'bar', 'baz': 'quux'} # We should be able to search by: # No filter # Resource ID # Resource Type # Key, either bare or with wildcards # Value, either bare or with wildcards # Multiple values # Multiple filters (AND): no match # Multiple filters (AND): match # And we should fail on supported resource types # Validates that VolumeUnattached is raised if the volume doesn't # have an instance_uuid value. # Makes sure describe_security_groups works and filters results. # NOTE(vish): should have the default group as well # Get id from neutron since cloud.create_security_group # does not expose it. | 1.083352 | 1 |
uamqp/async/mgmt_operation_async.py | gdooper/azure-uamqp-python | 0 | 6624520 | #-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import logging
import asyncio
import functools
import uuid
#from uamqp.session import Session
from uamqp.mgmt_operation import MgmtOperation
from uamqp import Message
from uamqp import constants
_logger = logging.getLogger(__name__)
class MgmtOperationAsync(MgmtOperation):
"""An asynchronous AMQP request/response operation. These are frequently used
for management tasks against a $management node, however any node name can be
specified and the available options will depend on the target service.
:param session: The AMQP session to use for the operation. Nee send and
receive links will be created in this Session.
:type session: ~uamqp.SessionAsync
:param target: The AMQP node to send the request to.
The default is `b"$management"`
:type target: bytes or str
:param status_code_field: Provide an alternate name for the status code in the
response body which can vary between services due to the spec still being in draft.
The default is `b"statusCode"`.
:type status_code_field: bytes or str
:param description_fields: Provide an alternate name for the description in the
response body which can vary between services due to the spec still being in draft.
The default is `b"statusDescription"`.
:type description_fields: bytes or str
:param encoding: The encoding to use for parameters supplied as strings.
Default is 'UTF-8'
:type encoding: str
:param loop: A user specified event loop.
:type loop: ~asycnio.AbstractEventLoop
"""
def __init__(self,
session,
target=None,
status_code_field=b'statusCode',
description_fields=b'statusDescription',
encoding='UTF-8',
loop=None):
self.loop = loop or asyncio.get_event_loop()
super(MgmtOperationAsync, self).__init__(
session,
target=target,
status_code_field=status_code_field,
description_fields=description_fields,
encoding=encoding)
async def execute_async(self, operation, op_type, message, timeout=0):
"""Execute a request and wait on a response asynchronously.
:param operation: The type of operation to be performed. This value will
be service-specific, but common values incluse READ, CREATE and UPDATE.
This value will be added as an application property on the message.
:type operation: bytes
:param op_type: The type on which to carry out the operation. This will
be specific to the entities of the service. This value will be added as
an application property on the message.
:type op_type: bytes
:param message: The message to send in the management request.
:type message: ~uamqp.Message
:param timeout: Provide an optional timeout in milliseconds within which a response
to the management request must be received.
:type timeout: int
:returns: ~uamqp.Message
"""
start_time = self._counter.get_current_ms()
operation_id = str(uuid.uuid4())
self._responses[operation_id] = None
def on_complete(operation_result, status_code, description, wrapped_message):
result = constants.MgmtExecuteResult(operation_result)
if result != constants.MgmtExecuteResult.Ok:
_logger.error("Failed to complete mgmt operation.\nStatus code: {}\nMessage: {}".format(
status_code, description))
self._responses[operation_id] = Message(message=wrapped_message)
self._mgmt_op.execute(operation, op_type, None, message.get_message(), on_complete)
while not self._responses[operation_id] and not self.mgmt_error:
if timeout > 0:
now = self._counter.get_current_ms()
if (now - start_time) >= timeout:
raise TimeoutError("Failed to receive mgmt response in {}ms".format(timeout))
await self.connection.work_async()
if self.mgmt_error:
raise self.mgmt_error
response = self._responses.pop(operation_id)
return response
async def destroy_async(self):
"""Close the send/receive links for this node asynchronously."""
await self.loop.run_in_executor(None, functools.partial(self._mgmt_op.destroy))
| #-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import logging
import asyncio
import functools
import uuid
#from uamqp.session import Session
from uamqp.mgmt_operation import MgmtOperation
from uamqp import Message
from uamqp import constants
_logger = logging.getLogger(__name__)
class MgmtOperationAsync(MgmtOperation):
"""An asynchronous AMQP request/response operation. These are frequently used
for management tasks against a $management node, however any node name can be
specified and the available options will depend on the target service.
:param session: The AMQP session to use for the operation. Nee send and
receive links will be created in this Session.
:type session: ~uamqp.SessionAsync
:param target: The AMQP node to send the request to.
The default is `b"$management"`
:type target: bytes or str
:param status_code_field: Provide an alternate name for the status code in the
response body which can vary between services due to the spec still being in draft.
The default is `b"statusCode"`.
:type status_code_field: bytes or str
:param description_fields: Provide an alternate name for the description in the
response body which can vary between services due to the spec still being in draft.
The default is `b"statusDescription"`.
:type description_fields: bytes or str
:param encoding: The encoding to use for parameters supplied as strings.
Default is 'UTF-8'
:type encoding: str
:param loop: A user specified event loop.
:type loop: ~asycnio.AbstractEventLoop
"""
def __init__(self,
session,
target=None,
status_code_field=b'statusCode',
description_fields=b'statusDescription',
encoding='UTF-8',
loop=None):
self.loop = loop or asyncio.get_event_loop()
super(MgmtOperationAsync, self).__init__(
session,
target=target,
status_code_field=status_code_field,
description_fields=description_fields,
encoding=encoding)
async def execute_async(self, operation, op_type, message, timeout=0):
"""Execute a request and wait on a response asynchronously.
:param operation: The type of operation to be performed. This value will
be service-specific, but common values incluse READ, CREATE and UPDATE.
This value will be added as an application property on the message.
:type operation: bytes
:param op_type: The type on which to carry out the operation. This will
be specific to the entities of the service. This value will be added as
an application property on the message.
:type op_type: bytes
:param message: The message to send in the management request.
:type message: ~uamqp.Message
:param timeout: Provide an optional timeout in milliseconds within which a response
to the management request must be received.
:type timeout: int
:returns: ~uamqp.Message
"""
start_time = self._counter.get_current_ms()
operation_id = str(uuid.uuid4())
self._responses[operation_id] = None
def on_complete(operation_result, status_code, description, wrapped_message):
result = constants.MgmtExecuteResult(operation_result)
if result != constants.MgmtExecuteResult.Ok:
_logger.error("Failed to complete mgmt operation.\nStatus code: {}\nMessage: {}".format(
status_code, description))
self._responses[operation_id] = Message(message=wrapped_message)
self._mgmt_op.execute(operation, op_type, None, message.get_message(), on_complete)
while not self._responses[operation_id] and not self.mgmt_error:
if timeout > 0:
now = self._counter.get_current_ms()
if (now - start_time) >= timeout:
raise TimeoutError("Failed to receive mgmt response in {}ms".format(timeout))
await self.connection.work_async()
if self.mgmt_error:
raise self.mgmt_error
response = self._responses.pop(operation_id)
return response
async def destroy_async(self):
"""Close the send/receive links for this node asynchronously."""
await self.loop.run_in_executor(None, functools.partial(self._mgmt_op.destroy))
| en | 0.788862 | #------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. #-------------------------------------------------------------------------- #from uamqp.session import Session An asynchronous AMQP request/response operation. These are frequently used for management tasks against a $management node, however any node name can be specified and the available options will depend on the target service. :param session: The AMQP session to use for the operation. Nee send and receive links will be created in this Session. :type session: ~uamqp.SessionAsync :param target: The AMQP node to send the request to. The default is `b"$management"` :type target: bytes or str :param status_code_field: Provide an alternate name for the status code in the response body which can vary between services due to the spec still being in draft. The default is `b"statusCode"`. :type status_code_field: bytes or str :param description_fields: Provide an alternate name for the description in the response body which can vary between services due to the spec still being in draft. The default is `b"statusDescription"`. :type description_fields: bytes or str :param encoding: The encoding to use for parameters supplied as strings. Default is 'UTF-8' :type encoding: str :param loop: A user specified event loop. :type loop: ~asycnio.AbstractEventLoop Execute a request and wait on a response asynchronously. :param operation: The type of operation to be performed. This value will be service-specific, but common values incluse READ, CREATE and UPDATE. This value will be added as an application property on the message. :type operation: bytes :param op_type: The type on which to carry out the operation. This will be specific to the entities of the service. This value will be added as an application property on the message. :type op_type: bytes :param message: The message to send in the management request. :type message: ~uamqp.Message :param timeout: Provide an optional timeout in milliseconds within which a response to the management request must be received. :type timeout: int :returns: ~uamqp.Message Close the send/receive links for this node asynchronously. | 2.007431 | 2 |
django_swiss_knife/buttons.py | sthima/django-swiss-knife | 2 | 6624521 | from django.forms.utils import flatatt
from django.utils.safestring import mark_safe
from django.template import loader
class AbstractButton(object):
def __init__(self, url, label, attrs={}):
self.url = url
self.label = label
self.attrs = attrs
def __call__(self):
return self.render()
def render(self, *args, **kwargs):
raise NotImplemented("You need to overwrite this method")
class Button(AbstractButton):
def __init__(self, label, attrs={}):
self.label = label
self.attrs = attrs
def render(self, *args, **kwargs):
return mark_safe("<button %s>%s</button>" % (flatatt(self.attrs), self.label,))
class Anchor(AbstractButton):
def __init__(self, url, label, attrs={}):
self.url = url
self.label = label
self.attrs = attrs
def render(self, *args, **kwargs):
return mark_safe("<a href='%s' %s>%s</a>" % (self.url, flatatt(self.attrs), self.label))
class MultiActionsButton(AbstractButton):
''' A dropdown button that display a all buttons passed on actions
'''
template_name = "django_swiss_knife/bootstrap/multi-actions-dropdown-button.html"
def __init__(self, label, actions=[]):
self.label = label
self.actions = actions
def get_context_data(self, *args, **kwargs):
context = {
'label': self.label,
'actions': self.actions
}
return context
def render(self, *args, **kwargs):
template = loader.get_template(self.template_name)
context = self.get_context_data(*args, **kwargs)
return template.render(context)
class SplitButtonsDropdown(AbstractButton):
''' A button splitted into action and dropdown
'''
template_name = "django_swiss_knife/bootstrap/split-button-dropdown.html"
def __init__(self, main_action, actions=[], split_button_classes="btn btn-default"):
self.main_action = main_action
self.actions = actions
self.split_button_classes = split_button_classes
def get_context_data(self, *args, **kwargs):
context = {
'main_action': self.main_action,
'actions': self.actions,
'split_button_classes': self.split_button_classes
}
return context
def render(self, *args, **kwargs):
template = loader.get_template(self.template_name)
context = self.get_context_data(*args, **kwargs)
return template.render(context)
| from django.forms.utils import flatatt
from django.utils.safestring import mark_safe
from django.template import loader
class AbstractButton(object):
def __init__(self, url, label, attrs={}):
self.url = url
self.label = label
self.attrs = attrs
def __call__(self):
return self.render()
def render(self, *args, **kwargs):
raise NotImplemented("You need to overwrite this method")
class Button(AbstractButton):
def __init__(self, label, attrs={}):
self.label = label
self.attrs = attrs
def render(self, *args, **kwargs):
return mark_safe("<button %s>%s</button>" % (flatatt(self.attrs), self.label,))
class Anchor(AbstractButton):
def __init__(self, url, label, attrs={}):
self.url = url
self.label = label
self.attrs = attrs
def render(self, *args, **kwargs):
return mark_safe("<a href='%s' %s>%s</a>" % (self.url, flatatt(self.attrs), self.label))
class MultiActionsButton(AbstractButton):
''' A dropdown button that display a all buttons passed on actions
'''
template_name = "django_swiss_knife/bootstrap/multi-actions-dropdown-button.html"
def __init__(self, label, actions=[]):
self.label = label
self.actions = actions
def get_context_data(self, *args, **kwargs):
context = {
'label': self.label,
'actions': self.actions
}
return context
def render(self, *args, **kwargs):
template = loader.get_template(self.template_name)
context = self.get_context_data(*args, **kwargs)
return template.render(context)
class SplitButtonsDropdown(AbstractButton):
''' A button splitted into action and dropdown
'''
template_name = "django_swiss_knife/bootstrap/split-button-dropdown.html"
def __init__(self, main_action, actions=[], split_button_classes="btn btn-default"):
self.main_action = main_action
self.actions = actions
self.split_button_classes = split_button_classes
def get_context_data(self, *args, **kwargs):
context = {
'main_action': self.main_action,
'actions': self.actions,
'split_button_classes': self.split_button_classes
}
return context
def render(self, *args, **kwargs):
template = loader.get_template(self.template_name)
context = self.get_context_data(*args, **kwargs)
return template.render(context)
| en | 0.674283 | A dropdown button that display a all buttons passed on actions A button splitted into action and dropdown | 2.344777 | 2 |
tensorflow/examples/saved_model/integration_tests/use_rnn_cell.py | abhaikollara/tensorflow | 848 | 6624522 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Load and use an RNN cell stored as a SavedModel."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
from absl import app
from absl import flags
import numpy as np
import tensorflow.compat.v2 as tf
FLAGS = flags.FLAGS
flags.DEFINE_string("model_dir", None, "Directory to load SavedModel from.")
def main(argv):
del argv
cell = tf.saved_model.load(FLAGS.model_dir)
initial_state = cell.get_initial_state(
tf.constant(np.random.uniform(size=[3, 10]).astype(np.float32)))
cell.next_state(
tf.constant(np.random.uniform(size=[3, 19]).astype(np.float32)),
initial_state)
# This is testing that a model using a SavedModel can be re-exported again,
# e.g. to catch issues such as b/142231881.
tf.saved_model.save(cell, tempfile.mkdtemp())
if __name__ == "__main__":
app.run(main)
| # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Load and use an RNN cell stored as a SavedModel."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
from absl import app
from absl import flags
import numpy as np
import tensorflow.compat.v2 as tf
FLAGS = flags.FLAGS
flags.DEFINE_string("model_dir", None, "Directory to load SavedModel from.")
def main(argv):
del argv
cell = tf.saved_model.load(FLAGS.model_dir)
initial_state = cell.get_initial_state(
tf.constant(np.random.uniform(size=[3, 10]).astype(np.float32)))
cell.next_state(
tf.constant(np.random.uniform(size=[3, 19]).astype(np.float32)),
initial_state)
# This is testing that a model using a SavedModel can be re-exported again,
# e.g. to catch issues such as b/142231881.
tf.saved_model.save(cell, tempfile.mkdtemp())
if __name__ == "__main__":
app.run(main)
| en | 0.847951 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== Load and use an RNN cell stored as a SavedModel. # This is testing that a model using a SavedModel can be re-exported again, # e.g. to catch issues such as b/142231881. | 2.073374 | 2 |
AlienIvansion/bullet.py | FranciscoCabrita1/Cabrita | 5 | 6624523 | import pygame
from pygame.sprite import Sprite
class Bullet(Sprite):
"""Uma classe que administra projéteis disparados pela espaçonave."""
def __init__(self, ai_settings, screen, ship):
"""Cria um objeto para o projétil na posição atual da espaçonave."""
super(Bullet, self).__init__()
self.screen = screen
# Cria um retângulo para o projétil em (0,0) e, em seguida, define a posição correta
self.rect = pygame.Rect(0, 0, ai_settings.bullet_width, ai_settings.bullet_height)
self.rect.centerx = ship.rect.centerx
self.rect.top = ship.rect.top
# Armazena a posição do projétil como um valor decimal
self.y = float(self.rect.y)
self.color = ai_settings.bullet_color
self.speed_factor = ai_settings.bullet_speed_factor
def update(self):
"""Move o projétil para cima na tela."""
# Atualiza a posição decima do projétil
self.y -= self.speed_factor
# Atualiza a posição do rect
self.rect.y = self.y
def draw_bullet(self):
"""Desenha o projétil na tela."""
pygame.draw.rect(self.screen, self.color, self.rect) | import pygame
from pygame.sprite import Sprite
class Bullet(Sprite):
"""Uma classe que administra projéteis disparados pela espaçonave."""
def __init__(self, ai_settings, screen, ship):
"""Cria um objeto para o projétil na posição atual da espaçonave."""
super(Bullet, self).__init__()
self.screen = screen
# Cria um retângulo para o projétil em (0,0) e, em seguida, define a posição correta
self.rect = pygame.Rect(0, 0, ai_settings.bullet_width, ai_settings.bullet_height)
self.rect.centerx = ship.rect.centerx
self.rect.top = ship.rect.top
# Armazena a posição do projétil como um valor decimal
self.y = float(self.rect.y)
self.color = ai_settings.bullet_color
self.speed_factor = ai_settings.bullet_speed_factor
def update(self):
"""Move o projétil para cima na tela."""
# Atualiza a posição decima do projétil
self.y -= self.speed_factor
# Atualiza a posição do rect
self.rect.y = self.y
def draw_bullet(self):
"""Desenha o projétil na tela."""
pygame.draw.rect(self.screen, self.color, self.rect) | pt | 0.988748 | Uma classe que administra projéteis disparados pela espaçonave. Cria um objeto para o projétil na posição atual da espaçonave. # Cria um retângulo para o projétil em (0,0) e, em seguida, define a posição correta # Armazena a posição do projétil como um valor decimal Move o projétil para cima na tela. # Atualiza a posição decima do projétil # Atualiza a posição do rect Desenha o projétil na tela. | 3.926651 | 4 |
examples/__init__.py | musicinmybrain/ppft | 53 | 6624524 | #!/usr/bin/env python
#
# Author: <NAME> (mmckerns @caltech and @uqfoundation)
# Copyright (c) 2018-2021 The Uncertainty Quantification Foundation.
# License: 3-clause BSD. The full license text is available at:
# - https://github.com/uqfoundation/ppft/blob/master/LICENSE
"""
to run this test suite, first build and install `ppft`.
$ python setup.py build
$ python setup.py install
then run the tests with:
$ python -m ppft.tests
or, if `nose` is installed:
$ nosetests
"""
| #!/usr/bin/env python
#
# Author: <NAME> (mmckerns @caltech and @uqfoundation)
# Copyright (c) 2018-2021 The Uncertainty Quantification Foundation.
# License: 3-clause BSD. The full license text is available at:
# - https://github.com/uqfoundation/ppft/blob/master/LICENSE
"""
to run this test suite, first build and install `ppft`.
$ python setup.py build
$ python setup.py install
then run the tests with:
$ python -m ppft.tests
or, if `nose` is installed:
$ nosetests
"""
| en | 0.607003 | #!/usr/bin/env python # # Author: <NAME> (mmckerns @caltech and @uqfoundation) # Copyright (c) 2018-2021 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/ppft/blob/master/LICENSE to run this test suite, first build and install `ppft`. $ python setup.py build $ python setup.py install then run the tests with: $ python -m ppft.tests or, if `nose` is installed: $ nosetests | 1.379166 | 1 |
skfem/element/discrete_field.py | ahhuhtal/scikit-fem | 1 | 6624525 | from typing import NamedTuple, Optional
import numpy as np
from numpy import ndarray
class DiscreteField(NamedTuple):
"""A function defined at the global quadrature points."""
value: Optional[ndarray] = None
grad: Optional[ndarray] = None
div: Optional[ndarray] = None
curl: Optional[ndarray] = None
hess: Optional[ndarray] = None
hod: Optional[ndarray] = None
@property
def f(self):
"""For backwards compatibility; used by old style form decorators."""
return self.value
@property
def df(self):
"""For backwards compatibility; used by old style form decorators."""
if self.grad is not None:
return self.grad
elif self.div is not None:
return self.div
elif self.curl is not None:
return self.curl
return None
@property
def ddf(self):
"""For backwards compatibility; used by old style form decorators."""
return self.hess
def __array__(self):
return self.f
def __mul__(self, other):
if isinstance(other, DiscreteField):
return self.f * other.f
return self.f * other
def _split(self):
"""Split all components based on their first dimension."""
return [DiscreteField(*[f[i] for f in self if f is not None])
for i in range(self.f.shape[0])]
def zeros_like(self):
"""Return zero :class:`~skfem.element.DiscreteField` with same size."""
def zero_or_none(x):
if x is None:
return None
return np.zeros_like(x)
return DiscreteField(*[zero_or_none(field) for field in self])
__rmul__ = __mul__
| from typing import NamedTuple, Optional
import numpy as np
from numpy import ndarray
class DiscreteField(NamedTuple):
"""A function defined at the global quadrature points."""
value: Optional[ndarray] = None
grad: Optional[ndarray] = None
div: Optional[ndarray] = None
curl: Optional[ndarray] = None
hess: Optional[ndarray] = None
hod: Optional[ndarray] = None
@property
def f(self):
"""For backwards compatibility; used by old style form decorators."""
return self.value
@property
def df(self):
"""For backwards compatibility; used by old style form decorators."""
if self.grad is not None:
return self.grad
elif self.div is not None:
return self.div
elif self.curl is not None:
return self.curl
return None
@property
def ddf(self):
"""For backwards compatibility; used by old style form decorators."""
return self.hess
def __array__(self):
return self.f
def __mul__(self, other):
if isinstance(other, DiscreteField):
return self.f * other.f
return self.f * other
def _split(self):
"""Split all components based on their first dimension."""
return [DiscreteField(*[f[i] for f in self if f is not None])
for i in range(self.f.shape[0])]
def zeros_like(self):
"""Return zero :class:`~skfem.element.DiscreteField` with same size."""
def zero_or_none(x):
if x is None:
return None
return np.zeros_like(x)
return DiscreteField(*[zero_or_none(field) for field in self])
__rmul__ = __mul__
| en | 0.842514 | A function defined at the global quadrature points. For backwards compatibility; used by old style form decorators. For backwards compatibility; used by old style form decorators. For backwards compatibility; used by old style form decorators. Split all components based on their first dimension. Return zero :class:`~skfem.element.DiscreteField` with same size. | 3.291492 | 3 |
modified_porechop/nanopore_read.py | shelkmike/Modified_porechop | 1 | 6624526 | #!/usr/bin/env python3
"""
Copyright 2017 <NAME> (<EMAIL>)
https://github.com/rrwick/Porechop
This module contains the class for a Nanopore read.
This file is part of Porechop. Porechop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version. Porechop is distributed in
the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along with Porechop. If
not, see <http://www.gnu.org/licenses/>.
"""
from .cpp_function_wrappers import adapter_alignment
from .misc import yellow, red, add_line_breaks_to_sequence, END_FORMATTING, RED, YELLOW
class NanoporeRead(object):
def __init__(self, name, seq, quals):
self.name = name
self.seq = seq.upper()
if self.seq.count('U') > self.seq.count('T'):
self.rna = True
self.seq = self.seq.replace('U', 'T')
else:
self.rna = False
self.quals = quals
if len(quals) < len(seq):
self.quals += '+' * (len(seq) - len(quals))
self.start_trim_amount = 0
self.end_trim_amount = 0
self.start_adapter_alignments = []
self.end_adapter_alignments = []
self.middle_adapter_positions = set()
self.middle_trim_positions = set()
self.middle_hit_str = ''
self.start_barcode_scores = {}
self.end_barcode_scores = {}
self.best_start_barcode = ('none', 0.0)
self.best_end_barcode = ('none', 0.0)
self.second_best_start_barcode = ('none', 0.0)
self.second_best_end_barcode = ('none', 0.0)
self.barcode_call = 'none'
self.albacore_barcode_call = None
def get_seq_with_start_end_adapters_trimmed(self):
if not self.start_trim_amount and not self.end_trim_amount:
return self.seq
start_pos = self.start_trim_amount
end_pos = len(self.seq) - self.end_trim_amount
trimmed_seq = self.seq[start_pos:end_pos]
return trimmed_seq
def seq_length_with_start_end_adapters_trimmed(self):
return len(self.get_seq_with_start_end_adapters_trimmed())
def get_quals_with_start_end_adapters_trimmed(self):
if not self.start_trim_amount and not self.end_trim_amount:
return self.quals
start_pos = self.start_trim_amount
end_pos = len(self.quals) - self.end_trim_amount
trimmed_quals = self.quals[start_pos:end_pos]
return trimmed_quals
def get_split_read_parts(self, min_split_read_size):
"""
Returns the read split into parts as determined by the middle_trim_positions set.
"""
trimmed_seq = self.get_seq_with_start_end_adapters_trimmed()
trimmed_quals = self.get_quals_with_start_end_adapters_trimmed()
split_read_parts = []
part_seq, part_quals = [], []
for i in range(len(trimmed_seq)):
if i in self.middle_trim_positions:
if part_seq:
split_read_parts.append((''.join(part_seq), ''.join(part_quals)))
part_seq, part_quals = [], []
else:
part_seq.append(trimmed_seq[i])
part_quals.append(trimmed_quals[i])
if part_seq:
split_read_parts.append((''.join(part_seq), ''.join(part_quals)))
split_read_parts = [x for x in split_read_parts if len(x[0]) >= min_split_read_size]
return split_read_parts
def get_fasta(self, min_split_read_size, discard_middle, untrimmed=False):
if not self.middle_trim_positions:
if untrimmed:
seq = self.seq
else:
seq = self.get_seq_with_start_end_adapters_trimmed()
if not seq: # Don't return empty sequences
return ''
if self.rna:
seq = seq.replace('T', 'U')
return ''.join(['>', self.name, '\n', add_line_breaks_to_sequence(seq, 70)])
elif discard_middle:
return ''
else:
fasta_str = ''
for i, split_read_part in enumerate(self.get_split_read_parts(min_split_read_size)):
read_name = add_number_to_read_name(self.name, i + 1)
if not split_read_part[0]: # Don't return empty sequences
return ''
seq = add_line_breaks_to_sequence(split_read_part[0], 70)
if self.rna:
seq = seq.replace('T', 'U')
fasta_str += ''.join(['>', read_name, '\n', seq])
return fasta_str
def get_fastq(self, min_split_read_size, discard_middle, untrimmed=False):
if not self.middle_trim_positions:
if untrimmed:
seq = self.seq
quals = self.quals
else:
seq = self.get_seq_with_start_end_adapters_trimmed()
quals = self.get_quals_with_start_end_adapters_trimmed()
if not seq: # Don't return empty sequences
return ''
if self.rna:
seq = seq.replace('T', 'U')
return ''.join(['@', self.name, '\n', seq, '\n+\n', quals, '\n'])
elif discard_middle:
return ''
else:
fastq_str = ''
for i, split_read_part in enumerate(self.get_split_read_parts(min_split_read_size)):
read_name = add_number_to_read_name(self.name, i + 1)
seq, qual = split_read_part[0], split_read_part[1]
if not seq: # Don't return empty sequences
return ''
if self.rna:
seq = seq.replace('T', 'U')
fastq_str += ''.join(['@', read_name, '\n', seq, '\n+\n', qual, '\n'])
return fastq_str
def align_adapter_set(self, adapter_set, end_size, scoring_scheme_vals):
"""
This function aligns the adapter to the reads and updates the best score for the adapter.
This is not to determine where to trim the reads, but rather to figure out which adapter
sets are present in the data.
"""
if adapter_set.start_sequence:
read_seq_start = self.seq[:end_size]
score, _, _, _ = align_adapter(read_seq_start, adapter_set.start_sequence[1],
scoring_scheme_vals)
adapter_set.best_start_score = max(adapter_set.best_start_score, score)
if adapter_set.end_sequence:
read_seq_end = self.seq[-end_size:]
score, _, _, _ = align_adapter(read_seq_end, adapter_set.end_sequence[1],
scoring_scheme_vals)
adapter_set.best_end_score = max(adapter_set.best_end_score, score)
def find_start_trim(self, adapters, end_size, extra_trim_size, end_threshold,
scoring_scheme_vals, min_trim_size, check_barcodes, forward_or_reverse):
"""
Aligns one or more adapter sequences and possibly adjusts the read's start trim amount based
on the result.
"""
read_seq_start = self.seq[:end_size]
for adapter in adapters:
if not adapter.start_sequence:
continue
full_score, partial_score, read_start, read_end = \
align_adapter(read_seq_start, adapter.start_sequence[1], scoring_scheme_vals)
if partial_score > end_threshold and read_end != end_size and \
read_end - read_start >= min_trim_size:
trim_amount = read_end + extra_trim_size
self.start_trim_amount = max(self.start_trim_amount, trim_amount)
self.start_adapter_alignments.append((adapter, full_score, partial_score,
read_start, read_end))
if check_barcodes and adapter.is_barcode() and \
adapter.barcode_direction() == forward_or_reverse:
self.start_barcode_scores[adapter.get_barcode_name()] = full_score
def find_end_trim(self, adapters, end_size, extra_trim_size, end_threshold,
scoring_scheme_vals, min_trim_size, check_barcodes, forward_or_reverse):
"""
Aligns one or more adapter sequences and possibly adjusts the read's end trim amount based
on the result.
"""
read_seq_end = self.seq[-end_size:]
for adapter in adapters:
if not adapter.end_sequence:
continue
full_score, partial_score, read_start, read_end = \
align_adapter(read_seq_end, adapter.end_sequence[1], scoring_scheme_vals)
if partial_score > end_threshold and read_start != 0 and \
read_end - read_start >= min_trim_size:
trim_amount = (end_size - read_start) + extra_trim_size
self.end_trim_amount = max(self.end_trim_amount, trim_amount)
self.end_adapter_alignments.append((adapter, full_score, partial_score,
read_start, read_end))
if check_barcodes and adapter.is_barcode() and \
adapter.barcode_direction() == forward_or_reverse:
self.end_barcode_scores[adapter.get_barcode_name()] = full_score
def find_middle_adapters(self, adapters, middle_threshold, extra_middle_trim_good_side,
extra_middle_trim_bad_side, scoring_scheme_vals,
start_sequence_names, end_sequence_names):
"""
Aligns an adapter sequence to the whole read to find places where the read should be split.
"""
masked_seq = self.get_seq_with_start_end_adapters_trimmed()
for adapter_name, adapter_seq in adapters:
# We keep aligning adapters as long we get strong hits, so we can find multiple
# occurrences in a single read.
while True:
full_score, _, read_start, read_end = align_adapter(masked_seq, adapter_seq,
scoring_scheme_vals)
if full_score >= middle_threshold:
masked_seq = masked_seq[:read_start] + '-' * (read_end - read_start) + \
masked_seq[read_end:]
self.middle_adapter_positions.update(range(read_start, read_end))
self.middle_hit_str += ' ' + adapter_name + ' (read coords: ' + \
str(read_start) + '-' + str(read_end) + ', ' + \
'identity: ' + '%.1f' % full_score + '%)\n'
trim_start = read_start - extra_middle_trim_good_side
if adapter_name in start_sequence_names:
trim_start = read_start - extra_middle_trim_bad_side
trim_end = read_end + extra_middle_trim_good_side
if adapter_name in end_sequence_names:
trim_end = read_end + extra_middle_trim_bad_side
self.middle_trim_positions.update(range(trim_start, trim_end))
else:
break
def formatted_start_seq(self, end_size, extra_trim_size):
"""
Returns the start of the read sequence, with any found adapters highlighted in red.
"""
start_seq = self.seq[:end_size]
if not self.start_trim_amount:
return start_seq
red_bases = self.start_trim_amount - extra_trim_size
formatted_str = ''
if red_bases:
formatted_str = red(start_seq[:red_bases])
formatted_str += yellow(start_seq[red_bases:red_bases+extra_trim_size])
formatted_str += start_seq[red_bases+extra_trim_size:]
return formatted_str
def formatted_end_seq(self, end_size, extra_trim_size):
"""
Returns the end of the read sequence, with any found adapters highlighted in red.
"""
end_seq = self.seq[-end_size:]
if not self.end_trim_amount:
return end_seq
red_bases = self.end_trim_amount - extra_trim_size
formatted_str = ''
if red_bases:
formatted_str = red(end_seq[-red_bases:])
formatted_str = yellow(end_seq[-(red_bases+extra_trim_size):-red_bases]) + formatted_str
formatted_str = end_seq[:-(red_bases+extra_trim_size)] + formatted_str
return formatted_str
def formatted_whole_seq(self, extra_trim_size):
"""
Returns the entire read sequence, with any found adapters highlighted in red.
"""
if not self.start_trim_amount and not self.end_trim_amount:
return self.seq
red_start_bases, red_end_bases = 0, 0
if self.start_trim_amount:
red_start_bases = self.start_trim_amount - extra_trim_size
if self.end_trim_amount:
red_end_bases = self.end_trim_amount - extra_trim_size
if red_start_bases + red_end_bases >= len(self.seq):
return red(self.seq)
formatted_start, formatted_end = '', ''
if self.start_trim_amount:
formatted_start = red(self.seq[:red_start_bases])
if self.end_trim_amount:
formatted_end = red(self.seq[-red_end_bases:])
middle = self.seq[red_start_bases:len(self.seq)-red_end_bases]
if len(middle) <= extra_trim_size * 2:
middle = yellow(middle)
else:
if self.start_trim_amount:
middle = yellow(middle[:extra_trim_size]) + middle[extra_trim_size:]
if self.end_trim_amount:
middle = middle[:-extra_trim_size] + yellow(middle[-extra_trim_size:])
return formatted_start + middle + formatted_end
def formatted_start_and_end_seq(self, end_size, extra_trim_size, check_barcodes):
read_seq = ''
if check_barcodes:
start_name, start_id = self.best_start_barcode
end_name, end_id = self.best_end_barcode
read_seq += 'start: ' + start_name + ' (' + '%.1f' % start_id + '%), '
read_seq += 'end: ' + end_name + ' (' + '%.1f' % end_id + '%), '
read_seq += 'barcode call: ' + self.barcode_call + ' '
if len(self.seq) <= 2 * end_size:
read_seq += self.formatted_whole_seq(extra_trim_size)
else:
read_seq += (self.formatted_start_seq(end_size, extra_trim_size) + '...' +
self.formatted_end_seq(end_size, extra_trim_size))
return read_seq
def full_start_end_output(self, end_size, extra_trim_size, check_barcodes):
def get_alignment_string(aln):
return aln[0].name + ', full score=' + str(aln[1]) + ', partial score=' + \
str(aln[2]) + ', read position: ' + str(aln[3]) + '-' + str(aln[4])
output = self.name + '\n'
output += ' start: ' + self.formatted_start_seq(end_size, extra_trim_size) + '...\n'
if self.start_adapter_alignments:
output += ' start alignments:\n'
for a in self.start_adapter_alignments:
output += ' ' + get_alignment_string(a) + '\n'
output += ' end: ...' + self.formatted_end_seq(end_size, extra_trim_size) + '\n'
if self.end_adapter_alignments:
output += ' end alignments:\n'
for a in self.end_adapter_alignments:
output += ' ' + get_alignment_string(a) + '\n'
if check_barcodes:
start_name, start_id = self.best_start_barcode
end_name, end_id = self.best_end_barcode
output += ' Barcodes:\n'
all_start_barcodes_str = ', '.join([b[0] + ' (' + '%.1f' % b[1] + '%)'
for b in self.start_barcode_scores.items()])
all_end_barcodes_str = ', '.join([b[0] + ' (' + '%.1f' % b[1] + '%)'
for b in self.end_barcode_scores.items()])
output += ' start barcodes: ' + all_start_barcodes_str + '\n'
output += ' end barcodes: ' + all_end_barcodes_str + '\n'
output += ' best start barcode: ' + start_name + ' (' + '%.1f' % start_id + '%)\n'
output += ' best end barcode: ' + end_name + ' (' + '%.1f' % end_id + '%)\n'
if self.albacore_barcode_call is not None:
output += ' albacore barcode call: ' + self.albacore_barcode_call + '\n'
output += ' final barcode call: ' + self.barcode_call + '\n'
return output
def formatted_middle_seq(self):
"""
If a middle adapter was found, this returns the relevant part of the read sequence, with
the adapter highlighted in red.
"""
if not self.middle_adapter_positions:
return
trimmed_seq = self.get_seq_with_start_end_adapters_trimmed()
range_start = max(0, min(self.middle_trim_positions) - 100)
range_end = min(len(trimmed_seq),
max(self.middle_trim_positions) + 100)
formatted_str = '' if range_start == 0 else '(' + str(range_start) + ' bp)...'
last_colour = None
for i in range(range_start, range_end):
char_colour = None
if i in self.middle_trim_positions:
char_colour = 'yellow'
if i in self.middle_adapter_positions:
char_colour = 'red'
if char_colour != last_colour:
formatted_str += END_FORMATTING
if char_colour == 'yellow':
formatted_str += YELLOW
if char_colour == 'red':
formatted_str += RED
formatted_str += trimmed_seq[i]
last_colour = char_colour
if last_colour is not None:
formatted_str += END_FORMATTING
formatted_str += '' if range_end == len(trimmed_seq) \
else '...(' + str(len(trimmed_seq) - range_end) + ' bp)'
return formatted_str
def middle_adapter_results(self, verbosity):
if not self.middle_adapter_positions:
return ''
results = self.name + '\n' + self.middle_hit_str
if verbosity > 1:
results += self.formatted_middle_seq() + '\n'
return results
def determine_barcode(self, barcode_threshold, barcode_diff, require_two_barcodes):
"""
This function works through the logic of choosing a barcode for the read based on the
settings and the read's barcode alignments. It stores its result in self.barcode_call.
"""
start_barcode_scores = sorted(self.start_barcode_scores.items(), reverse=True,
key=lambda x: x[1])
end_barcode_scores = sorted(self.end_barcode_scores.items(), reverse=True,
key=lambda x: x[1])
if len(start_barcode_scores) >= 1:
self.best_start_barcode = start_barcode_scores[0]
if len(start_barcode_scores) >= 2:
self.second_best_start_barcode = start_barcode_scores[1]
if len(end_barcode_scores) >= 1:
self.best_end_barcode = end_barcode_scores[0]
if len(end_barcode_scores) >= 2:
self.second_best_end_barcode = end_barcode_scores[1]
try:
# If the user set --require_two_barcodes, then the criteria are much more stringent.
# Both the start and end barcodes need to be over the threshold, they both need to be
# sufficiently better than their second-best barcode hit, and they need to match.
if require_two_barcodes:
start_over_threshold = (self.best_start_barcode[1] >= barcode_threshold)
end_over_threshold = (self.best_end_barcode[1] >= barcode_threshold)
start_good_diff = (self.best_start_barcode[1] >=
self.second_best_start_barcode[1] + barcode_diff)
end_good_diff = (self.best_end_barcode[1] >=
self.second_best_end_barcode[1] + barcode_diff)
start_end_match = (self.best_start_barcode[0] == self.best_end_barcode[0])
assert (start_over_threshold and end_over_threshold and
start_good_diff and end_good_diff and start_end_match)
self.barcode_call = self.best_start_barcode[0]
# If the user didn't set --require_two_barcodes, then the criteria aren't so strict.
# The start/end barcodes are analysed all together.
else:
# Combine the start and end barcodes into a single list (i.e. we no longer care
# whether the hit was at the start or end of the read), only keeping the best score
# for each barcode.
all_barcode_scores = []
included_barcodes = set()
for name, score in sorted(start_barcode_scores + end_barcode_scores, reverse=True,
key=lambda x: x[1]):
if name not in included_barcodes:
all_barcode_scores.append((name, score))
included_barcodes.add(name)
if len(all_barcode_scores) >= 1:
best_overall_barcode = all_barcode_scores[0]
else:
best_overall_barcode = ('none', 0.0)
if len(all_barcode_scores) >= 2:
second_best_overall_barcode = all_barcode_scores[1]
else:
second_best_overall_barcode = ('none', 0.0)
over_threshold = (best_overall_barcode[1] >= barcode_threshold)
good_diff = (best_overall_barcode[1] >=
second_best_overall_barcode[1] + barcode_diff)
assert over_threshold
assert good_diff
self.barcode_call = best_overall_barcode[0]
except AssertionError:
self.barcode_call = 'none'
# If the read has been binned by Albacore, then Porechop and Albacore must agree on the
# barcode. If they don't, the read is unclassified.
if self.albacore_barcode_call is not None and \
self.barcode_call != self.albacore_barcode_call:
self.barcode_call = 'none'
def align_adapter(read_seq, adapter_seq, scoring_scheme_vals):
alignment_result = adapter_alignment(read_seq, adapter_seq, scoring_scheme_vals)
result_parts = alignment_result.split(',')
read_start = int(result_parts[0])
# If the read start is -1, that indicates that the alignment failed completely.
if read_start == -1:
read_end = 0
aligned_region_percent_identity = 0.0
full_adapter_percent_identity = 0.0
else:
read_end = int(result_parts[1]) + 1
aligned_region_percent_identity = float(result_parts[5])
full_adapter_percent_identity = float(result_parts[6])
return full_adapter_percent_identity, aligned_region_percent_identity, read_start, read_end
def add_number_to_read_name(read_name, number):
if ' ' not in read_name:
return read_name + '_' + str(number)
else:
return read_name.replace(' ', '_' + str(number) + ' ', 1)
| #!/usr/bin/env python3
"""
Copyright 2017 <NAME> (<EMAIL>)
https://github.com/rrwick/Porechop
This module contains the class for a Nanopore read.
This file is part of Porechop. Porechop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version. Porechop is distributed in
the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along with Porechop. If
not, see <http://www.gnu.org/licenses/>.
"""
from .cpp_function_wrappers import adapter_alignment
from .misc import yellow, red, add_line_breaks_to_sequence, END_FORMATTING, RED, YELLOW
class NanoporeRead(object):
def __init__(self, name, seq, quals):
self.name = name
self.seq = seq.upper()
if self.seq.count('U') > self.seq.count('T'):
self.rna = True
self.seq = self.seq.replace('U', 'T')
else:
self.rna = False
self.quals = quals
if len(quals) < len(seq):
self.quals += '+' * (len(seq) - len(quals))
self.start_trim_amount = 0
self.end_trim_amount = 0
self.start_adapter_alignments = []
self.end_adapter_alignments = []
self.middle_adapter_positions = set()
self.middle_trim_positions = set()
self.middle_hit_str = ''
self.start_barcode_scores = {}
self.end_barcode_scores = {}
self.best_start_barcode = ('none', 0.0)
self.best_end_barcode = ('none', 0.0)
self.second_best_start_barcode = ('none', 0.0)
self.second_best_end_barcode = ('none', 0.0)
self.barcode_call = 'none'
self.albacore_barcode_call = None
def get_seq_with_start_end_adapters_trimmed(self):
if not self.start_trim_amount and not self.end_trim_amount:
return self.seq
start_pos = self.start_trim_amount
end_pos = len(self.seq) - self.end_trim_amount
trimmed_seq = self.seq[start_pos:end_pos]
return trimmed_seq
def seq_length_with_start_end_adapters_trimmed(self):
return len(self.get_seq_with_start_end_adapters_trimmed())
def get_quals_with_start_end_adapters_trimmed(self):
if not self.start_trim_amount and not self.end_trim_amount:
return self.quals
start_pos = self.start_trim_amount
end_pos = len(self.quals) - self.end_trim_amount
trimmed_quals = self.quals[start_pos:end_pos]
return trimmed_quals
def get_split_read_parts(self, min_split_read_size):
"""
Returns the read split into parts as determined by the middle_trim_positions set.
"""
trimmed_seq = self.get_seq_with_start_end_adapters_trimmed()
trimmed_quals = self.get_quals_with_start_end_adapters_trimmed()
split_read_parts = []
part_seq, part_quals = [], []
for i in range(len(trimmed_seq)):
if i in self.middle_trim_positions:
if part_seq:
split_read_parts.append((''.join(part_seq), ''.join(part_quals)))
part_seq, part_quals = [], []
else:
part_seq.append(trimmed_seq[i])
part_quals.append(trimmed_quals[i])
if part_seq:
split_read_parts.append((''.join(part_seq), ''.join(part_quals)))
split_read_parts = [x for x in split_read_parts if len(x[0]) >= min_split_read_size]
return split_read_parts
def get_fasta(self, min_split_read_size, discard_middle, untrimmed=False):
if not self.middle_trim_positions:
if untrimmed:
seq = self.seq
else:
seq = self.get_seq_with_start_end_adapters_trimmed()
if not seq: # Don't return empty sequences
return ''
if self.rna:
seq = seq.replace('T', 'U')
return ''.join(['>', self.name, '\n', add_line_breaks_to_sequence(seq, 70)])
elif discard_middle:
return ''
else:
fasta_str = ''
for i, split_read_part in enumerate(self.get_split_read_parts(min_split_read_size)):
read_name = add_number_to_read_name(self.name, i + 1)
if not split_read_part[0]: # Don't return empty sequences
return ''
seq = add_line_breaks_to_sequence(split_read_part[0], 70)
if self.rna:
seq = seq.replace('T', 'U')
fasta_str += ''.join(['>', read_name, '\n', seq])
return fasta_str
def get_fastq(self, min_split_read_size, discard_middle, untrimmed=False):
if not self.middle_trim_positions:
if untrimmed:
seq = self.seq
quals = self.quals
else:
seq = self.get_seq_with_start_end_adapters_trimmed()
quals = self.get_quals_with_start_end_adapters_trimmed()
if not seq: # Don't return empty sequences
return ''
if self.rna:
seq = seq.replace('T', 'U')
return ''.join(['@', self.name, '\n', seq, '\n+\n', quals, '\n'])
elif discard_middle:
return ''
else:
fastq_str = ''
for i, split_read_part in enumerate(self.get_split_read_parts(min_split_read_size)):
read_name = add_number_to_read_name(self.name, i + 1)
seq, qual = split_read_part[0], split_read_part[1]
if not seq: # Don't return empty sequences
return ''
if self.rna:
seq = seq.replace('T', 'U')
fastq_str += ''.join(['@', read_name, '\n', seq, '\n+\n', qual, '\n'])
return fastq_str
def align_adapter_set(self, adapter_set, end_size, scoring_scheme_vals):
"""
This function aligns the adapter to the reads and updates the best score for the adapter.
This is not to determine where to trim the reads, but rather to figure out which adapter
sets are present in the data.
"""
if adapter_set.start_sequence:
read_seq_start = self.seq[:end_size]
score, _, _, _ = align_adapter(read_seq_start, adapter_set.start_sequence[1],
scoring_scheme_vals)
adapter_set.best_start_score = max(adapter_set.best_start_score, score)
if adapter_set.end_sequence:
read_seq_end = self.seq[-end_size:]
score, _, _, _ = align_adapter(read_seq_end, adapter_set.end_sequence[1],
scoring_scheme_vals)
adapter_set.best_end_score = max(adapter_set.best_end_score, score)
def find_start_trim(self, adapters, end_size, extra_trim_size, end_threshold,
scoring_scheme_vals, min_trim_size, check_barcodes, forward_or_reverse):
"""
Aligns one or more adapter sequences and possibly adjusts the read's start trim amount based
on the result.
"""
read_seq_start = self.seq[:end_size]
for adapter in adapters:
if not adapter.start_sequence:
continue
full_score, partial_score, read_start, read_end = \
align_adapter(read_seq_start, adapter.start_sequence[1], scoring_scheme_vals)
if partial_score > end_threshold and read_end != end_size and \
read_end - read_start >= min_trim_size:
trim_amount = read_end + extra_trim_size
self.start_trim_amount = max(self.start_trim_amount, trim_amount)
self.start_adapter_alignments.append((adapter, full_score, partial_score,
read_start, read_end))
if check_barcodes and adapter.is_barcode() and \
adapter.barcode_direction() == forward_or_reverse:
self.start_barcode_scores[adapter.get_barcode_name()] = full_score
def find_end_trim(self, adapters, end_size, extra_trim_size, end_threshold,
scoring_scheme_vals, min_trim_size, check_barcodes, forward_or_reverse):
"""
Aligns one or more adapter sequences and possibly adjusts the read's end trim amount based
on the result.
"""
read_seq_end = self.seq[-end_size:]
for adapter in adapters:
if not adapter.end_sequence:
continue
full_score, partial_score, read_start, read_end = \
align_adapter(read_seq_end, adapter.end_sequence[1], scoring_scheme_vals)
if partial_score > end_threshold and read_start != 0 and \
read_end - read_start >= min_trim_size:
trim_amount = (end_size - read_start) + extra_trim_size
self.end_trim_amount = max(self.end_trim_amount, trim_amount)
self.end_adapter_alignments.append((adapter, full_score, partial_score,
read_start, read_end))
if check_barcodes and adapter.is_barcode() and \
adapter.barcode_direction() == forward_or_reverse:
self.end_barcode_scores[adapter.get_barcode_name()] = full_score
def find_middle_adapters(self, adapters, middle_threshold, extra_middle_trim_good_side,
extra_middle_trim_bad_side, scoring_scheme_vals,
start_sequence_names, end_sequence_names):
"""
Aligns an adapter sequence to the whole read to find places where the read should be split.
"""
masked_seq = self.get_seq_with_start_end_adapters_trimmed()
for adapter_name, adapter_seq in adapters:
# We keep aligning adapters as long we get strong hits, so we can find multiple
# occurrences in a single read.
while True:
full_score, _, read_start, read_end = align_adapter(masked_seq, adapter_seq,
scoring_scheme_vals)
if full_score >= middle_threshold:
masked_seq = masked_seq[:read_start] + '-' * (read_end - read_start) + \
masked_seq[read_end:]
self.middle_adapter_positions.update(range(read_start, read_end))
self.middle_hit_str += ' ' + adapter_name + ' (read coords: ' + \
str(read_start) + '-' + str(read_end) + ', ' + \
'identity: ' + '%.1f' % full_score + '%)\n'
trim_start = read_start - extra_middle_trim_good_side
if adapter_name in start_sequence_names:
trim_start = read_start - extra_middle_trim_bad_side
trim_end = read_end + extra_middle_trim_good_side
if adapter_name in end_sequence_names:
trim_end = read_end + extra_middle_trim_bad_side
self.middle_trim_positions.update(range(trim_start, trim_end))
else:
break
def formatted_start_seq(self, end_size, extra_trim_size):
"""
Returns the start of the read sequence, with any found adapters highlighted in red.
"""
start_seq = self.seq[:end_size]
if not self.start_trim_amount:
return start_seq
red_bases = self.start_trim_amount - extra_trim_size
formatted_str = ''
if red_bases:
formatted_str = red(start_seq[:red_bases])
formatted_str += yellow(start_seq[red_bases:red_bases+extra_trim_size])
formatted_str += start_seq[red_bases+extra_trim_size:]
return formatted_str
def formatted_end_seq(self, end_size, extra_trim_size):
"""
Returns the end of the read sequence, with any found adapters highlighted in red.
"""
end_seq = self.seq[-end_size:]
if not self.end_trim_amount:
return end_seq
red_bases = self.end_trim_amount - extra_trim_size
formatted_str = ''
if red_bases:
formatted_str = red(end_seq[-red_bases:])
formatted_str = yellow(end_seq[-(red_bases+extra_trim_size):-red_bases]) + formatted_str
formatted_str = end_seq[:-(red_bases+extra_trim_size)] + formatted_str
return formatted_str
def formatted_whole_seq(self, extra_trim_size):
"""
Returns the entire read sequence, with any found adapters highlighted in red.
"""
if not self.start_trim_amount and not self.end_trim_amount:
return self.seq
red_start_bases, red_end_bases = 0, 0
if self.start_trim_amount:
red_start_bases = self.start_trim_amount - extra_trim_size
if self.end_trim_amount:
red_end_bases = self.end_trim_amount - extra_trim_size
if red_start_bases + red_end_bases >= len(self.seq):
return red(self.seq)
formatted_start, formatted_end = '', ''
if self.start_trim_amount:
formatted_start = red(self.seq[:red_start_bases])
if self.end_trim_amount:
formatted_end = red(self.seq[-red_end_bases:])
middle = self.seq[red_start_bases:len(self.seq)-red_end_bases]
if len(middle) <= extra_trim_size * 2:
middle = yellow(middle)
else:
if self.start_trim_amount:
middle = yellow(middle[:extra_trim_size]) + middle[extra_trim_size:]
if self.end_trim_amount:
middle = middle[:-extra_trim_size] + yellow(middle[-extra_trim_size:])
return formatted_start + middle + formatted_end
def formatted_start_and_end_seq(self, end_size, extra_trim_size, check_barcodes):
read_seq = ''
if check_barcodes:
start_name, start_id = self.best_start_barcode
end_name, end_id = self.best_end_barcode
read_seq += 'start: ' + start_name + ' (' + '%.1f' % start_id + '%), '
read_seq += 'end: ' + end_name + ' (' + '%.1f' % end_id + '%), '
read_seq += 'barcode call: ' + self.barcode_call + ' '
if len(self.seq) <= 2 * end_size:
read_seq += self.formatted_whole_seq(extra_trim_size)
else:
read_seq += (self.formatted_start_seq(end_size, extra_trim_size) + '...' +
self.formatted_end_seq(end_size, extra_trim_size))
return read_seq
def full_start_end_output(self, end_size, extra_trim_size, check_barcodes):
def get_alignment_string(aln):
return aln[0].name + ', full score=' + str(aln[1]) + ', partial score=' + \
str(aln[2]) + ', read position: ' + str(aln[3]) + '-' + str(aln[4])
output = self.name + '\n'
output += ' start: ' + self.formatted_start_seq(end_size, extra_trim_size) + '...\n'
if self.start_adapter_alignments:
output += ' start alignments:\n'
for a in self.start_adapter_alignments:
output += ' ' + get_alignment_string(a) + '\n'
output += ' end: ...' + self.formatted_end_seq(end_size, extra_trim_size) + '\n'
if self.end_adapter_alignments:
output += ' end alignments:\n'
for a in self.end_adapter_alignments:
output += ' ' + get_alignment_string(a) + '\n'
if check_barcodes:
start_name, start_id = self.best_start_barcode
end_name, end_id = self.best_end_barcode
output += ' Barcodes:\n'
all_start_barcodes_str = ', '.join([b[0] + ' (' + '%.1f' % b[1] + '%)'
for b in self.start_barcode_scores.items()])
all_end_barcodes_str = ', '.join([b[0] + ' (' + '%.1f' % b[1] + '%)'
for b in self.end_barcode_scores.items()])
output += ' start barcodes: ' + all_start_barcodes_str + '\n'
output += ' end barcodes: ' + all_end_barcodes_str + '\n'
output += ' best start barcode: ' + start_name + ' (' + '%.1f' % start_id + '%)\n'
output += ' best end barcode: ' + end_name + ' (' + '%.1f' % end_id + '%)\n'
if self.albacore_barcode_call is not None:
output += ' albacore barcode call: ' + self.albacore_barcode_call + '\n'
output += ' final barcode call: ' + self.barcode_call + '\n'
return output
def formatted_middle_seq(self):
"""
If a middle adapter was found, this returns the relevant part of the read sequence, with
the adapter highlighted in red.
"""
if not self.middle_adapter_positions:
return
trimmed_seq = self.get_seq_with_start_end_adapters_trimmed()
range_start = max(0, min(self.middle_trim_positions) - 100)
range_end = min(len(trimmed_seq),
max(self.middle_trim_positions) + 100)
formatted_str = '' if range_start == 0 else '(' + str(range_start) + ' bp)...'
last_colour = None
for i in range(range_start, range_end):
char_colour = None
if i in self.middle_trim_positions:
char_colour = 'yellow'
if i in self.middle_adapter_positions:
char_colour = 'red'
if char_colour != last_colour:
formatted_str += END_FORMATTING
if char_colour == 'yellow':
formatted_str += YELLOW
if char_colour == 'red':
formatted_str += RED
formatted_str += trimmed_seq[i]
last_colour = char_colour
if last_colour is not None:
formatted_str += END_FORMATTING
formatted_str += '' if range_end == len(trimmed_seq) \
else '...(' + str(len(trimmed_seq) - range_end) + ' bp)'
return formatted_str
def middle_adapter_results(self, verbosity):
if not self.middle_adapter_positions:
return ''
results = self.name + '\n' + self.middle_hit_str
if verbosity > 1:
results += self.formatted_middle_seq() + '\n'
return results
def determine_barcode(self, barcode_threshold, barcode_diff, require_two_barcodes):
"""
This function works through the logic of choosing a barcode for the read based on the
settings and the read's barcode alignments. It stores its result in self.barcode_call.
"""
start_barcode_scores = sorted(self.start_barcode_scores.items(), reverse=True,
key=lambda x: x[1])
end_barcode_scores = sorted(self.end_barcode_scores.items(), reverse=True,
key=lambda x: x[1])
if len(start_barcode_scores) >= 1:
self.best_start_barcode = start_barcode_scores[0]
if len(start_barcode_scores) >= 2:
self.second_best_start_barcode = start_barcode_scores[1]
if len(end_barcode_scores) >= 1:
self.best_end_barcode = end_barcode_scores[0]
if len(end_barcode_scores) >= 2:
self.second_best_end_barcode = end_barcode_scores[1]
try:
# If the user set --require_two_barcodes, then the criteria are much more stringent.
# Both the start and end barcodes need to be over the threshold, they both need to be
# sufficiently better than their second-best barcode hit, and they need to match.
if require_two_barcodes:
start_over_threshold = (self.best_start_barcode[1] >= barcode_threshold)
end_over_threshold = (self.best_end_barcode[1] >= barcode_threshold)
start_good_diff = (self.best_start_barcode[1] >=
self.second_best_start_barcode[1] + barcode_diff)
end_good_diff = (self.best_end_barcode[1] >=
self.second_best_end_barcode[1] + barcode_diff)
start_end_match = (self.best_start_barcode[0] == self.best_end_barcode[0])
assert (start_over_threshold and end_over_threshold and
start_good_diff and end_good_diff and start_end_match)
self.barcode_call = self.best_start_barcode[0]
# If the user didn't set --require_two_barcodes, then the criteria aren't so strict.
# The start/end barcodes are analysed all together.
else:
# Combine the start and end barcodes into a single list (i.e. we no longer care
# whether the hit was at the start or end of the read), only keeping the best score
# for each barcode.
all_barcode_scores = []
included_barcodes = set()
for name, score in sorted(start_barcode_scores + end_barcode_scores, reverse=True,
key=lambda x: x[1]):
if name not in included_barcodes:
all_barcode_scores.append((name, score))
included_barcodes.add(name)
if len(all_barcode_scores) >= 1:
best_overall_barcode = all_barcode_scores[0]
else:
best_overall_barcode = ('none', 0.0)
if len(all_barcode_scores) >= 2:
second_best_overall_barcode = all_barcode_scores[1]
else:
second_best_overall_barcode = ('none', 0.0)
over_threshold = (best_overall_barcode[1] >= barcode_threshold)
good_diff = (best_overall_barcode[1] >=
second_best_overall_barcode[1] + barcode_diff)
assert over_threshold
assert good_diff
self.barcode_call = best_overall_barcode[0]
except AssertionError:
self.barcode_call = 'none'
# If the read has been binned by Albacore, then Porechop and Albacore must agree on the
# barcode. If they don't, the read is unclassified.
if self.albacore_barcode_call is not None and \
self.barcode_call != self.albacore_barcode_call:
self.barcode_call = 'none'
def align_adapter(read_seq, adapter_seq, scoring_scheme_vals):
alignment_result = adapter_alignment(read_seq, adapter_seq, scoring_scheme_vals)
result_parts = alignment_result.split(',')
read_start = int(result_parts[0])
# If the read start is -1, that indicates that the alignment failed completely.
if read_start == -1:
read_end = 0
aligned_region_percent_identity = 0.0
full_adapter_percent_identity = 0.0
else:
read_end = int(result_parts[1]) + 1
aligned_region_percent_identity = float(result_parts[5])
full_adapter_percent_identity = float(result_parts[6])
return full_adapter_percent_identity, aligned_region_percent_identity, read_start, read_end
def add_number_to_read_name(read_name, number):
if ' ' not in read_name:
return read_name + '_' + str(number)
else:
return read_name.replace(' ', '_' + str(number) + ' ', 1)
| en | 0.905585 | #!/usr/bin/env python3 Copyright 2017 <NAME> (<EMAIL>) https://github.com/rrwick/Porechop This module contains the class for a Nanopore read. This file is part of Porechop. Porechop is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Porechop is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Porechop. If not, see <http://www.gnu.org/licenses/>. Returns the read split into parts as determined by the middle_trim_positions set. # Don't return empty sequences # Don't return empty sequences # Don't return empty sequences # Don't return empty sequences This function aligns the adapter to the reads and updates the best score for the adapter. This is not to determine where to trim the reads, but rather to figure out which adapter sets are present in the data. Aligns one or more adapter sequences and possibly adjusts the read's start trim amount based on the result. Aligns one or more adapter sequences and possibly adjusts the read's end trim amount based on the result. Aligns an adapter sequence to the whole read to find places where the read should be split. # We keep aligning adapters as long we get strong hits, so we can find multiple # occurrences in a single read. Returns the start of the read sequence, with any found adapters highlighted in red. Returns the end of the read sequence, with any found adapters highlighted in red. Returns the entire read sequence, with any found adapters highlighted in red. If a middle adapter was found, this returns the relevant part of the read sequence, with the adapter highlighted in red. This function works through the logic of choosing a barcode for the read based on the settings and the read's barcode alignments. It stores its result in self.barcode_call. # If the user set --require_two_barcodes, then the criteria are much more stringent. # Both the start and end barcodes need to be over the threshold, they both need to be # sufficiently better than their second-best barcode hit, and they need to match. # If the user didn't set --require_two_barcodes, then the criteria aren't so strict. # The start/end barcodes are analysed all together. # Combine the start and end barcodes into a single list (i.e. we no longer care # whether the hit was at the start or end of the read), only keeping the best score # for each barcode. # If the read has been binned by Albacore, then Porechop and Albacore must agree on the # barcode. If they don't, the read is unclassified. # If the read start is -1, that indicates that the alignment failed completely. | 2.859536 | 3 |
resolwe_bio/processes/import_data/methylation_array_idat.py | romunov/resolwe-bio | 12 | 6624527 | """Upload methylation array data (IDAT)."""
from resolwe.process import FileField, Process, SchedulingClass, StringField
def validate_filename_suffix(filename, suffix, resolwe_process=Process):
"""Raise an error if unexpected file name suffix is encountered."""
try:
assert filename.endswith(suffix)
except AssertionError:
resolwe_process.error(
f"Unsupported file name extension. A file {filename} "
f"should end with {suffix}."
)
class UploadIdatData(Process):
"""Upload Illumina methylation array raw IDAT data.
This import process accepts Illumina methylation array BeadChip raw
files in IDAT format. Two input files, one for each of the Green and
Red signal channels, are expected. The uploads of human (HM27, HM450,
EPIC) and mouse (MM285) array types are supported.
"""
slug = "upload-idat"
name = "IDAT file"
process_type = "data:methylationarray:idat"
version = "1.0.0"
category = "Import"
scheduling_class = SchedulingClass.BATCH
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/s4q6j6e8/resolwebio/common:2.7.0"}
},
"resources": {"cores": 1, "memory": 2048},
}
entity = {
"type": "sample",
"descriptor_schema": "sample",
}
data_name = "{{ red_channel.file|default('?') }}"
class Input:
"""Input field to process UploadIdatData."""
red_channel = FileField(label="Red channel IDAT file (*_Red.idat)")
green_channel = FileField(label="Green channel IDAT file (*_Grn.idat)")
species = StringField(
label="Species",
description="Select a species name from the dropdown menu.",
default="Homo sapiens",
choices=[
("Homo sapiens", "Homo sapiens"),
("Mus musculus", "Mus musculus"),
],
)
platform = StringField(
label="Protein ID database source",
description="Select a methylation array platform for human "
"(HM450, HM27, EPIC) or mouse (MM285) samples.",
default="HM450",
choices=[
("HM450", "HM450"),
("HM27", "HM27"),
("EPIC", "EPIC"),
("MM285", "MM285"),
],
)
class Output:
"""Output field of the process UploadProteomicsData."""
red_channel = FileField(label="Red channel IDAT file")
green_channel = FileField(label="Green channel IDAT file")
species = StringField(label="Species")
platform = StringField(label="Platform")
def run(self, inputs, outputs):
"""Run the analysis."""
if inputs.species == "Mus musculus" and inputs.platform != "MM285":
self.error(
f"Platform type {inputs.platform} does not match the selected species {inputs.species}."
)
red = inputs.red_channel.import_file(imported_format="compressed")
grn = inputs.green_channel.import_file(imported_format="compressed")
validate_filename_suffix(red, "_Red.idat.gz")
validate_filename_suffix(grn, "_Grn.idat.gz")
sample_name_red = red[:-12]
sample_name_grn = grn[:-12]
if sample_name_red != sample_name_grn:
self.error(
"The input IDAT files don't have a matching filename prefix. "
"The sample data might be mismatched."
)
outputs.red_channel = red
outputs.green_channel = grn
outputs.species = inputs.species
outputs.platform = inputs.platform
| """Upload methylation array data (IDAT)."""
from resolwe.process import FileField, Process, SchedulingClass, StringField
def validate_filename_suffix(filename, suffix, resolwe_process=Process):
"""Raise an error if unexpected file name suffix is encountered."""
try:
assert filename.endswith(suffix)
except AssertionError:
resolwe_process.error(
f"Unsupported file name extension. A file {filename} "
f"should end with {suffix}."
)
class UploadIdatData(Process):
"""Upload Illumina methylation array raw IDAT data.
This import process accepts Illumina methylation array BeadChip raw
files in IDAT format. Two input files, one for each of the Green and
Red signal channels, are expected. The uploads of human (HM27, HM450,
EPIC) and mouse (MM285) array types are supported.
"""
slug = "upload-idat"
name = "IDAT file"
process_type = "data:methylationarray:idat"
version = "1.0.0"
category = "Import"
scheduling_class = SchedulingClass.BATCH
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/s4q6j6e8/resolwebio/common:2.7.0"}
},
"resources": {"cores": 1, "memory": 2048},
}
entity = {
"type": "sample",
"descriptor_schema": "sample",
}
data_name = "{{ red_channel.file|default('?') }}"
class Input:
"""Input field to process UploadIdatData."""
red_channel = FileField(label="Red channel IDAT file (*_Red.idat)")
green_channel = FileField(label="Green channel IDAT file (*_Grn.idat)")
species = StringField(
label="Species",
description="Select a species name from the dropdown menu.",
default="Homo sapiens",
choices=[
("Homo sapiens", "Homo sapiens"),
("Mus musculus", "Mus musculus"),
],
)
platform = StringField(
label="Protein ID database source",
description="Select a methylation array platform for human "
"(HM450, HM27, EPIC) or mouse (MM285) samples.",
default="HM450",
choices=[
("HM450", "HM450"),
("HM27", "HM27"),
("EPIC", "EPIC"),
("MM285", "MM285"),
],
)
class Output:
"""Output field of the process UploadProteomicsData."""
red_channel = FileField(label="Red channel IDAT file")
green_channel = FileField(label="Green channel IDAT file")
species = StringField(label="Species")
platform = StringField(label="Platform")
def run(self, inputs, outputs):
"""Run the analysis."""
if inputs.species == "Mus musculus" and inputs.platform != "MM285":
self.error(
f"Platform type {inputs.platform} does not match the selected species {inputs.species}."
)
red = inputs.red_channel.import_file(imported_format="compressed")
grn = inputs.green_channel.import_file(imported_format="compressed")
validate_filename_suffix(red, "_Red.idat.gz")
validate_filename_suffix(grn, "_Grn.idat.gz")
sample_name_red = red[:-12]
sample_name_grn = grn[:-12]
if sample_name_red != sample_name_grn:
self.error(
"The input IDAT files don't have a matching filename prefix. "
"The sample data might be mismatched."
)
outputs.red_channel = red
outputs.green_channel = grn
outputs.species = inputs.species
outputs.platform = inputs.platform
| en | 0.706634 | Upload methylation array data (IDAT). Raise an error if unexpected file name suffix is encountered. Upload Illumina methylation array raw IDAT data. This import process accepts Illumina methylation array BeadChip raw files in IDAT format. Two input files, one for each of the Green and Red signal channels, are expected. The uploads of human (HM27, HM450, EPIC) and mouse (MM285) array types are supported. Input field to process UploadIdatData. Output field of the process UploadProteomicsData. Run the analysis. | 2.493422 | 2 |
precompiler/_version/pc_version.py | raxvan/precompiler | 6 | 6624528 | <filename>precompiler/_version/pc_version.py
_PCVER_HIGH_ = 0
_PCVER_LOW0_ = 1
_PCVER_LOW1_ = 6
| <filename>precompiler/_version/pc_version.py
_PCVER_HIGH_ = 0
_PCVER_LOW0_ = 1
_PCVER_LOW1_ = 6
| none | 1 | 1.194167 | 1 | |
trove/db/sqlalchemy/migrate_repo/versions/012_backup.py | zhujzhuo/openstack-trove | 1 | 6624529 | #Copyright [2013] Hewlett-Packard Development Company, L.P.
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
#http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
from sqlalchemy.schema import Column
from sqlalchemy.schema import MetaData
from trove.db.sqlalchemy.migrate_repo.schema import create_tables
from trove.db.sqlalchemy.migrate_repo.schema import DateTime
from trove.db.sqlalchemy.migrate_repo.schema import drop_tables
from trove.db.sqlalchemy.migrate_repo.schema import Float
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy.migrate_repo.schema import Table
from trove.db.sqlalchemy.migrate_repo.schema import Boolean
meta = MetaData()
backups = Table('backups', meta,
Column('id', String(36), primary_key=True, nullable=False),
Column('name', String(255), nullable=False),
Column('description', String(512)),
Column('location', String(1024)),
Column('backup_type', String(32)),
Column('size', Float()),
Column('tenant_id', String(36)),
Column('state', String(32), nullable=False),
Column('instance_id', String(36)),
Column('checksum', String(32)),
Column('backup_timestamp', DateTime()),
Column('deleted', Boolean()),
Column('created', DateTime()),
Column('updated', DateTime()),
Column('deleted_at', DateTime()))
def upgrade(migrate_engine):
meta.bind = migrate_engine
create_tables([backups, ])
def downgrade(migrate_engine):
meta.bind = migrate_engine
drop_tables([backups, ])
| #Copyright [2013] Hewlett-Packard Development Company, L.P.
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
#http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
from sqlalchemy.schema import Column
from sqlalchemy.schema import MetaData
from trove.db.sqlalchemy.migrate_repo.schema import create_tables
from trove.db.sqlalchemy.migrate_repo.schema import DateTime
from trove.db.sqlalchemy.migrate_repo.schema import drop_tables
from trove.db.sqlalchemy.migrate_repo.schema import Float
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy.migrate_repo.schema import Table
from trove.db.sqlalchemy.migrate_repo.schema import Boolean
meta = MetaData()
backups = Table('backups', meta,
Column('id', String(36), primary_key=True, nullable=False),
Column('name', String(255), nullable=False),
Column('description', String(512)),
Column('location', String(1024)),
Column('backup_type', String(32)),
Column('size', Float()),
Column('tenant_id', String(36)),
Column('state', String(32), nullable=False),
Column('instance_id', String(36)),
Column('checksum', String(32)),
Column('backup_timestamp', DateTime()),
Column('deleted', Boolean()),
Column('created', DateTime()),
Column('updated', DateTime()),
Column('deleted_at', DateTime()))
def upgrade(migrate_engine):
meta.bind = migrate_engine
create_tables([backups, ])
def downgrade(migrate_engine):
meta.bind = migrate_engine
drop_tables([backups, ])
| en | 0.834911 | #Copyright [2013] Hewlett-Packard Development Company, L.P. #Licensed under the Apache License, Version 2.0 (the "License"); #you may not use this file except in compliance with the License. #You may obtain a copy of the License at # #http://www.apache.org/licenses/LICENSE-2.0 # #Unless required by applicable law or agreed to in writing, software #distributed under the License is distributed on an "AS IS" BASIS, #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #See the License for the specific language governing permissions and #limitations under the License. | 1.763495 | 2 |
core/NetworkFactory.py | CVLAB-Unibo/ATDT | 16 | 6624530 | <reponame>CVLAB-Unibo/ATDT
import numpy as np
import tensorflow as tf
import tensorflow.contrib.slim as slim
from core.ops import *
from core.models import *
from core.input import *
from core.losses import *
from utils.utils import *
def factory(inputs, params,reuse_variables=False):
if params.task=='semantic': return SemanticNetwork(inputs, params,reuse_variables)
elif params.task == 'depth': return DepthNetwork(inputs,params,reuse_variables)
elif params.task == 'normals': return NormalNetwork(inputs,params,reuse_variables)
else: raise NotImplementedError("Please Implement this Task Network")
class Network(object):
def __init__(self,inputs, params, reuse_variables=False):
self.inputs=inputs
self.params = params
self.reuse_variables = reuse_variables
if self.params.mode == 'train':
self.images, self.labels = self.inputs
training=True
else:
self.images = self.inputs
training=False
if self.params.normalizer_fn == 'batch_norm':
self.normalizer_fn = lambda x : tf.layers.batch_normalization(x,training=training)
elif self.params.normalizer_fn == 'group_norm':
self.normalizer_fn = lambda x : group_norm(x)
elif self.params.normalizer_fn == 'instance_norm':
self.normalizer_fn = lambda x : instance_norm(x)
else:
self.normalizer_fn=None
self.summary_images=[tf.summary.image("image",self.images)]
self.summary_scalar=[]
self.build()
if self.params.mode == 'train':
self.build_summary()
def build(self):
raise NotImplementedError("Please Implement this method")
def build_summary(self):
raise NotImplementedError("Please Implement this method")
class SemanticNetwork(Network):
def __init__(self,inputs, params, reuse_variables=False):
super().__init__(inputs, params,reuse_variables)
def build(self):
self.logits, self.features = build_model(self.images, self.params.num_classes, use_skips=self.params.use_skips, encoder=self.params.encoder,normalizer_fn=self.normalizer_fn)
if self.params.mode == 'train':
self.loss = cross_entropy_loss(self.logits,self.labels,self.params.num_classes)
self.pred_map = tf.expand_dims(tf.argmax(self.logits, axis=-1),axis=-1)
self.pred = color_tensorflow(self.pred_map)
def build_summary(self):
gts_sum = color_tensorflow(self.labels)
self.summary_images.append(tf.summary.image("pred",self.pred))
self.summary_images.append(tf.summary.image("gt",gts_sum))
self.summary_scalar.append(tf.summary.scalar("loss", self.loss))
class DepthNetwork(Network):
def __init__(self,inputs, params, reuse_variables=False):
super().__init__(inputs, params,reuse_variables)
def build(self):
self.pred_map, self.features = build_model(self.images, 1, use_skips=self.params.use_skips, encoder=self.params.encoder,normalizer_fn=self.normalizer_fn)
if self.params.mode == 'train':
self.loss = l1_loss(self.pred_map,self.labels)
self.pred = colormap_depth(tf.clip_by_value(self.pred_map,0,1), cmap='jet')
def build_summary(self):
gts_sum = colormap_depth(tf.clip_by_value(self.labels,0,100),cmap='jet')
self.summary_images.append(tf.summary.image("pred",self.pred))
self.summary_images.append(tf.summary.image("gt",gts_sum))
self.summary_scalar.append(tf.summary.scalar("loss", self.loss))
class NormalNetwork(Network):
def __init__(self,inputs, params, reuse_variables=False):
super().__init__(inputs, params,reuse_variables)
def build(self):
self.pred_map, self.features = build_model(self.images, 3, use_skips=self.params.use_skips, encoder=self.params.encoder,normalizer_fn=self.normalizer_fn)
self.pred_map = tf.nn.tanh(self.pred_map)
if self.params.mode == 'train':
self.loss = cos_loss(self.pred_map,self.labels)
self.pred = (self.pred_map+1)/2*255
def build_summary(self):
gts_sum = tf.cast(tf.clip_by_value((self.labels+1)/2*255,0,255),tf.uint8)
self.summary_images.append(tf.summary.image("pred",self.pred))
self.summary_images.append(tf.summary.image("gt",gts_sum))
self.summary_scalar.append(tf.summary.scalar("loss", self.loss))
class TransferNetwork(Network):
def __init__(self,inputs, params, model='dilated-resnet', encoder_source=True, encoder_target=True, decoder_target=True, reuse_variables=False, feature_level=-1):
self.encoder_source = encoder_source
self.encoder_target = encoder_target
self.decoder_target = decoder_target
self.target_task = params.task
self.model=model
self.feature_level=feature_level
if params.normalizer_fn == 'batch_norm':
self.normalizer_fn_frozen = lambda x : tf.layers.batch_normalization(x,training=False)
elif params.normalizer_fn == 'group_norm':
self.normalizer_fn_frozen = lambda x : group_norm(x)
elif params.normalizer_fn == 'instance_norm':
self.normalizer_fn_frozen = lambda x : instance_norm(x)
super().__init__(inputs, params,reuse_variables)
def build_encoder(self):
with tf.variable_scope('model'):
if self.model == 'vgg':
print("Building VGG Encoder")
features, skips = build_vgg(self.images, self.params.use_skips, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
elif self.model == 'resnet':
print("Building ResNet50 Encoder")
features, skips = build_resnet50(self.images, self.params.use_skips, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
elif self.model == 'dilated-resnet':
print("Building Dilated-Resnet Encoder")
features, skips = build_dilated_resnet50(self.images, self.params.use_skips, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
return features, skips
def build_decoder(self,features):
if self.target_task == 'semantic':
ch=self.params.num_classes
elif self.target_task == 'depth':
ch=1
else:
ch=3
with tf.variable_scope('model'):
if self.model == 'vgg':
print("Building VGG Decoder")
output = build_decoder_vgg(features, ch, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
elif self.model == 'resnet':
print("Building ResNet50 Decoder")
output = build_decoder_resnet(features, ch, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
elif self.model == 'dilated-resnet':
print("Building Dilated-Resnet Decoder")
output = build_decoder_dilated_resnet(features, ch, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
return output
def build(self):
#### ENCODERS ####
if self.encoder_source:
with tf.variable_scope('source'):
self.features_source, skips = self.build_encoder()
self.features_source = self.features_source[self.feature_level]
if self.encoder_target:
with tf.variable_scope('target'):
self.features_target, skips = self.build_encoder()
self.features_target = self.features_target[self.feature_level]
#### TRANSFER ####
with tf.variable_scope('transfer',reuse=self.reuse_variables):
print("Building Transfer Network")
self.adapted_features=transfer_network(self.features_source)
#### DECODERS ####
if self.decoder_target:
self.pred_map = self.build_decoder(self.adapted_features)
if self.target_task == 'semantic':
self.pred = tf.expand_dims(tf.cast(tf.argmax(self.pred_map,axis=-1),tf.uint8),axis=-1)
elif self.target_task == 'depth':
self.pred = tf.clip_by_value(self.pred_map,0,1)
elif self.target_task == 'normals':
self.pred = tf.nn.tanh((self.pred_map)+1)/2*255
else:
self.pred_map = self.adapted_features
self.pred = self.adapted_features
if self.params.mode == 'train':
self.loss=tf.reduce_mean(tf.pow(self.features_target-self.adapted_features,2))
def build_summary(self):
if self.decoder_target:
if self.target_task == 'semantic':
output_sum = color_tensorflow(self.pred)
elif self.target_task == 'depth':
output_sum = colormap_depth(self.pred, cmap='jet')
elif self.target_task == 'normals':
output_sum = (self.pred)
self.summary_images.append(tf.summary.image("pred", output_sum ,max_outputs=1))
self.summary_scalar.append(tf.summary.scalar("loss", self.loss))
| import numpy as np
import tensorflow as tf
import tensorflow.contrib.slim as slim
from core.ops import *
from core.models import *
from core.input import *
from core.losses import *
from utils.utils import *
def factory(inputs, params,reuse_variables=False):
if params.task=='semantic': return SemanticNetwork(inputs, params,reuse_variables)
elif params.task == 'depth': return DepthNetwork(inputs,params,reuse_variables)
elif params.task == 'normals': return NormalNetwork(inputs,params,reuse_variables)
else: raise NotImplementedError("Please Implement this Task Network")
class Network(object):
def __init__(self,inputs, params, reuse_variables=False):
self.inputs=inputs
self.params = params
self.reuse_variables = reuse_variables
if self.params.mode == 'train':
self.images, self.labels = self.inputs
training=True
else:
self.images = self.inputs
training=False
if self.params.normalizer_fn == 'batch_norm':
self.normalizer_fn = lambda x : tf.layers.batch_normalization(x,training=training)
elif self.params.normalizer_fn == 'group_norm':
self.normalizer_fn = lambda x : group_norm(x)
elif self.params.normalizer_fn == 'instance_norm':
self.normalizer_fn = lambda x : instance_norm(x)
else:
self.normalizer_fn=None
self.summary_images=[tf.summary.image("image",self.images)]
self.summary_scalar=[]
self.build()
if self.params.mode == 'train':
self.build_summary()
def build(self):
raise NotImplementedError("Please Implement this method")
def build_summary(self):
raise NotImplementedError("Please Implement this method")
class SemanticNetwork(Network):
def __init__(self,inputs, params, reuse_variables=False):
super().__init__(inputs, params,reuse_variables)
def build(self):
self.logits, self.features = build_model(self.images, self.params.num_classes, use_skips=self.params.use_skips, encoder=self.params.encoder,normalizer_fn=self.normalizer_fn)
if self.params.mode == 'train':
self.loss = cross_entropy_loss(self.logits,self.labels,self.params.num_classes)
self.pred_map = tf.expand_dims(tf.argmax(self.logits, axis=-1),axis=-1)
self.pred = color_tensorflow(self.pred_map)
def build_summary(self):
gts_sum = color_tensorflow(self.labels)
self.summary_images.append(tf.summary.image("pred",self.pred))
self.summary_images.append(tf.summary.image("gt",gts_sum))
self.summary_scalar.append(tf.summary.scalar("loss", self.loss))
class DepthNetwork(Network):
def __init__(self,inputs, params, reuse_variables=False):
super().__init__(inputs, params,reuse_variables)
def build(self):
self.pred_map, self.features = build_model(self.images, 1, use_skips=self.params.use_skips, encoder=self.params.encoder,normalizer_fn=self.normalizer_fn)
if self.params.mode == 'train':
self.loss = l1_loss(self.pred_map,self.labels)
self.pred = colormap_depth(tf.clip_by_value(self.pred_map,0,1), cmap='jet')
def build_summary(self):
gts_sum = colormap_depth(tf.clip_by_value(self.labels,0,100),cmap='jet')
self.summary_images.append(tf.summary.image("pred",self.pred))
self.summary_images.append(tf.summary.image("gt",gts_sum))
self.summary_scalar.append(tf.summary.scalar("loss", self.loss))
class NormalNetwork(Network):
def __init__(self,inputs, params, reuse_variables=False):
super().__init__(inputs, params,reuse_variables)
def build(self):
self.pred_map, self.features = build_model(self.images, 3, use_skips=self.params.use_skips, encoder=self.params.encoder,normalizer_fn=self.normalizer_fn)
self.pred_map = tf.nn.tanh(self.pred_map)
if self.params.mode == 'train':
self.loss = cos_loss(self.pred_map,self.labels)
self.pred = (self.pred_map+1)/2*255
def build_summary(self):
gts_sum = tf.cast(tf.clip_by_value((self.labels+1)/2*255,0,255),tf.uint8)
self.summary_images.append(tf.summary.image("pred",self.pred))
self.summary_images.append(tf.summary.image("gt",gts_sum))
self.summary_scalar.append(tf.summary.scalar("loss", self.loss))
class TransferNetwork(Network):
def __init__(self,inputs, params, model='dilated-resnet', encoder_source=True, encoder_target=True, decoder_target=True, reuse_variables=False, feature_level=-1):
self.encoder_source = encoder_source
self.encoder_target = encoder_target
self.decoder_target = decoder_target
self.target_task = params.task
self.model=model
self.feature_level=feature_level
if params.normalizer_fn == 'batch_norm':
self.normalizer_fn_frozen = lambda x : tf.layers.batch_normalization(x,training=False)
elif params.normalizer_fn == 'group_norm':
self.normalizer_fn_frozen = lambda x : group_norm(x)
elif params.normalizer_fn == 'instance_norm':
self.normalizer_fn_frozen = lambda x : instance_norm(x)
super().__init__(inputs, params,reuse_variables)
def build_encoder(self):
with tf.variable_scope('model'):
if self.model == 'vgg':
print("Building VGG Encoder")
features, skips = build_vgg(self.images, self.params.use_skips, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
elif self.model == 'resnet':
print("Building ResNet50 Encoder")
features, skips = build_resnet50(self.images, self.params.use_skips, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
elif self.model == 'dilated-resnet':
print("Building Dilated-Resnet Encoder")
features, skips = build_dilated_resnet50(self.images, self.params.use_skips, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
return features, skips
def build_decoder(self,features):
if self.target_task == 'semantic':
ch=self.params.num_classes
elif self.target_task == 'depth':
ch=1
else:
ch=3
with tf.variable_scope('model'):
if self.model == 'vgg':
print("Building VGG Decoder")
output = build_decoder_vgg(features, ch, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
elif self.model == 'resnet':
print("Building ResNet50 Decoder")
output = build_decoder_resnet(features, ch, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
elif self.model == 'dilated-resnet':
print("Building Dilated-Resnet Decoder")
output = build_decoder_dilated_resnet(features, ch, normalizer_fn=self.normalizer_fn_frozen, reuse_variables=self.reuse_variables)
return output
def build(self):
#### ENCODERS ####
if self.encoder_source:
with tf.variable_scope('source'):
self.features_source, skips = self.build_encoder()
self.features_source = self.features_source[self.feature_level]
if self.encoder_target:
with tf.variable_scope('target'):
self.features_target, skips = self.build_encoder()
self.features_target = self.features_target[self.feature_level]
#### TRANSFER ####
with tf.variable_scope('transfer',reuse=self.reuse_variables):
print("Building Transfer Network")
self.adapted_features=transfer_network(self.features_source)
#### DECODERS ####
if self.decoder_target:
self.pred_map = self.build_decoder(self.adapted_features)
if self.target_task == 'semantic':
self.pred = tf.expand_dims(tf.cast(tf.argmax(self.pred_map,axis=-1),tf.uint8),axis=-1)
elif self.target_task == 'depth':
self.pred = tf.clip_by_value(self.pred_map,0,1)
elif self.target_task == 'normals':
self.pred = tf.nn.tanh((self.pred_map)+1)/2*255
else:
self.pred_map = self.adapted_features
self.pred = self.adapted_features
if self.params.mode == 'train':
self.loss=tf.reduce_mean(tf.pow(self.features_target-self.adapted_features,2))
def build_summary(self):
if self.decoder_target:
if self.target_task == 'semantic':
output_sum = color_tensorflow(self.pred)
elif self.target_task == 'depth':
output_sum = colormap_depth(self.pred, cmap='jet')
elif self.target_task == 'normals':
output_sum = (self.pred)
self.summary_images.append(tf.summary.image("pred", output_sum ,max_outputs=1))
self.summary_scalar.append(tf.summary.scalar("loss", self.loss)) | de | 0.659736 | #### ENCODERS #### #### TRANSFER #### #### DECODERS #### | 2.398142 | 2 |
examples/training/avg_word_embeddings/training_stsbenchmark_bow.py | dd-dos/sentence-transformers | 1 | 6624531 | """
This example uses a simple bag-of-words (BoW) approach. A sentence is mapped
to a sparse vector with e.g. 25,000 dimensions. Optionally, you can also use tf-idf.
To make the model trainable, we add multiple dense layers to create a Deep Averaging Network (DAN).
"""
import torch
from torch.utils.data import DataLoader
import math
from sentence_transformers import models, losses
from sentence_transformers import SentencesDataset, LoggingHandler, SentenceTransformer
from sentence_transformers.evaluation import EmbeddingSimilarityEvaluator
from sentence_transformers.readers import *
from sentence_transformers.models.tokenizer.WordTokenizer import ENGLISH_STOP_WORDS
import logging
from datetime import datetime
#### Just some code to print debug information to stdout
logging.basicConfig(format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO,
handlers=[LoggingHandler()])
#### /print debug information to stdout
# Read the dataset
batch_size = 32
sts_reader = STSBenchmarkDataReader('../datasets/stsbenchmark')
model_save_path = 'output/training_tf-idf_word_embeddings-'+datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
# Create the vocab for the BoW model
stop_words = ENGLISH_STOP_WORDS
max_vocab_size = 25000 #This is also the size of the BoW sentence vector.
#Read the most common max_vocab_size words. Skip stop-words
vocab = set()
weights = {}
lines = open('wikipedia_doc_frequencies.txt', encoding='utf8').readlines()
num_docs = int(lines[0])
for line in lines[1:]:
word, freq = line.lower().strip().split("\t")
if word in stop_words:
continue
vocab.add(word)
weights[word] = math.log(num_docs/int(freq))
if len(vocab) >= max_vocab_size:
break
#Create the BoW model. Because we set word_weights to the IDF values and cumulative_term_frequency=True, we
#get tf-idf vectors. Set word_weights to an empty dict and cumulative_term_frequency=False to get a 1-hot sentence encoding
bow = models.BoW(vocab=vocab, word_weights=weights, cumulative_term_frequency=True)
# Add two trainable feed-forward networks (DAN) with max_vocab_size -> 768 -> 512 dimensions.
sent_embeddings_dimension = max_vocab_size
dan1 = models.Dense(in_features=sent_embeddings_dimension, out_features=768)
dan2 = models.Dense(in_features=768, out_features=512)
model = SentenceTransformer(modules=[bow, dan1, dan2])
# Convert the dataset to a DataLoader ready for training
logging.info("Read STSbenchmark train dataset")
train_data = SentencesDataset(sts_reader.get_examples('sts-train.csv'), model=model)
train_dataloader = DataLoader(train_data, shuffle=True, batch_size=batch_size)
train_loss = losses.CosineSimilarityLoss(model=model)
logging.info("Read STSbenchmark dev dataset")
evaluator = EmbeddingSimilarityEvaluator.from_input_examples(sts_reader.get_examples('sts-dev.csv'))
# Configure the training
num_epochs = 10
warmup_steps = math.ceil(len(train_data) * num_epochs / batch_size * 0.1) #10% of train data for warm-up
logging.info("Warmup-steps: {}".format(warmup_steps))
# Train the model
model.fit(train_objectives=[(train_dataloader, train_loss)],
evaluator=evaluator,
epochs=num_epochs,
warmup_steps=warmup_steps,
output_path=model_save_path
)
##############################################################################
#
# Load the stored model and evaluate its performance on STS benchmark dataset
#
##############################################################################
model = SentenceTransformer(model_save_path)
evaluator = EmbeddingSimilarityEvaluator.from_input_examples(sts_reader.get_examples("sts-test.csv"))
model.evaluate(evaluator) | """
This example uses a simple bag-of-words (BoW) approach. A sentence is mapped
to a sparse vector with e.g. 25,000 dimensions. Optionally, you can also use tf-idf.
To make the model trainable, we add multiple dense layers to create a Deep Averaging Network (DAN).
"""
import torch
from torch.utils.data import DataLoader
import math
from sentence_transformers import models, losses
from sentence_transformers import SentencesDataset, LoggingHandler, SentenceTransformer
from sentence_transformers.evaluation import EmbeddingSimilarityEvaluator
from sentence_transformers.readers import *
from sentence_transformers.models.tokenizer.WordTokenizer import ENGLISH_STOP_WORDS
import logging
from datetime import datetime
#### Just some code to print debug information to stdout
logging.basicConfig(format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO,
handlers=[LoggingHandler()])
#### /print debug information to stdout
# Read the dataset
batch_size = 32
sts_reader = STSBenchmarkDataReader('../datasets/stsbenchmark')
model_save_path = 'output/training_tf-idf_word_embeddings-'+datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
# Create the vocab for the BoW model
stop_words = ENGLISH_STOP_WORDS
max_vocab_size = 25000 #This is also the size of the BoW sentence vector.
#Read the most common max_vocab_size words. Skip stop-words
vocab = set()
weights = {}
lines = open('wikipedia_doc_frequencies.txt', encoding='utf8').readlines()
num_docs = int(lines[0])
for line in lines[1:]:
word, freq = line.lower().strip().split("\t")
if word in stop_words:
continue
vocab.add(word)
weights[word] = math.log(num_docs/int(freq))
if len(vocab) >= max_vocab_size:
break
#Create the BoW model. Because we set word_weights to the IDF values and cumulative_term_frequency=True, we
#get tf-idf vectors. Set word_weights to an empty dict and cumulative_term_frequency=False to get a 1-hot sentence encoding
bow = models.BoW(vocab=vocab, word_weights=weights, cumulative_term_frequency=True)
# Add two trainable feed-forward networks (DAN) with max_vocab_size -> 768 -> 512 dimensions.
sent_embeddings_dimension = max_vocab_size
dan1 = models.Dense(in_features=sent_embeddings_dimension, out_features=768)
dan2 = models.Dense(in_features=768, out_features=512)
model = SentenceTransformer(modules=[bow, dan1, dan2])
# Convert the dataset to a DataLoader ready for training
logging.info("Read STSbenchmark train dataset")
train_data = SentencesDataset(sts_reader.get_examples('sts-train.csv'), model=model)
train_dataloader = DataLoader(train_data, shuffle=True, batch_size=batch_size)
train_loss = losses.CosineSimilarityLoss(model=model)
logging.info("Read STSbenchmark dev dataset")
evaluator = EmbeddingSimilarityEvaluator.from_input_examples(sts_reader.get_examples('sts-dev.csv'))
# Configure the training
num_epochs = 10
warmup_steps = math.ceil(len(train_data) * num_epochs / batch_size * 0.1) #10% of train data for warm-up
logging.info("Warmup-steps: {}".format(warmup_steps))
# Train the model
model.fit(train_objectives=[(train_dataloader, train_loss)],
evaluator=evaluator,
epochs=num_epochs,
warmup_steps=warmup_steps,
output_path=model_save_path
)
##############################################################################
#
# Load the stored model and evaluate its performance on STS benchmark dataset
#
##############################################################################
model = SentenceTransformer(model_save_path)
evaluator = EmbeddingSimilarityEvaluator.from_input_examples(sts_reader.get_examples("sts-test.csv"))
model.evaluate(evaluator) | en | 0.677895 | This example uses a simple bag-of-words (BoW) approach. A sentence is mapped to a sparse vector with e.g. 25,000 dimensions. Optionally, you can also use tf-idf. To make the model trainable, we add multiple dense layers to create a Deep Averaging Network (DAN). #### Just some code to print debug information to stdout #### /print debug information to stdout # Read the dataset # Create the vocab for the BoW model #This is also the size of the BoW sentence vector. #Read the most common max_vocab_size words. Skip stop-words #Create the BoW model. Because we set word_weights to the IDF values and cumulative_term_frequency=True, we #get tf-idf vectors. Set word_weights to an empty dict and cumulative_term_frequency=False to get a 1-hot sentence encoding # Add two trainable feed-forward networks (DAN) with max_vocab_size -> 768 -> 512 dimensions. # Convert the dataset to a DataLoader ready for training # Configure the training #10% of train data for warm-up # Train the model ############################################################################## # # Load the stored model and evaluate its performance on STS benchmark dataset # ############################################################################## | 2.937237 | 3 |
util/chplenv/chpl_llvm.py | cassella/chapel | 0 | 6624532 | <gh_stars>0
#!/usr/bin/env python3
import optparse
import os
import sys
import chpl_bin_subdir, chpl_arch, chpl_compiler, chpl_platform, overrides
from chpl_home_utils import get_chpl_third_party
from utils import memoize, error, run_command, try_run_command, warning
# returns a tuple of supported major LLVM versions as strings
def llvm_versions():
# Which major release - only need one number for that with current
# llvm (since LLVM 4.0).
# These will be tried in order.
return ('11',)
@memoize
def get_uniq_cfg_path_for(llvm_val):
if llvm_val == "bundled":
# put platform-arch-compiler for included llvm
host_bin_subdir = chpl_bin_subdir.get('host')
host_compiler = chpl_compiler.get('host')
llvm_target_dir = '{0}-{1}'.format(host_bin_subdir, host_compiler)
else:
# just put 'system' for system llvm
llvm_target_dir = llvm_val
return llvm_target_dir
@memoize
def get_uniq_cfg_path():
llvm_val = get()
return get_uniq_cfg_path_for(llvm_val)
def get_bundled_llvm_dir():
chpl_third_party = get_chpl_third_party()
llvm_target_dir = get_uniq_cfg_path_for('bundled')
llvm_subdir = os.path.join(chpl_third_party, 'llvm', 'install',
llvm_target_dir)
return llvm_subdir
def is_included_llvm_built():
llvm_subdir = get_bundled_llvm_dir()
llvm_header = os.path.join(llvm_subdir, 'include', 'llvm',
'PassSupport.h')
if os.path.exists(llvm_header):
return True
else:
return False
def compatible_platform_for_llvm():
target_arch = chpl_arch.get('target')
target_platform = chpl_platform.get('target')
return (target_arch != "i368" and target_platform != "linux32")
# returns a string of the supported llvm versions suitable for error msgs
def llvm_versions_string():
return ', '.join(llvm_versions())
# llvm_config is the llvm-config command we want to check out.
# returns (version_number, config_error_message)
@memoize
def check_llvm_config(llvm_config):
if llvm_config == 'none':
return (0, "no llvm-config detected")
got_version = 0
version_ok = False
llvm_header = ''
llvm_include_ok = False
clang_header = ''
clang_include_ok = False
exists, returncode, my_stdout, my_stderr = try_run_command([llvm_config,
'--version'])
if exists and returncode == 0:
version_string = my_stdout.strip()
got_version = version_string.split('.')[0]
version_ok = got_version in llvm_versions()
else:
s = "could not run llvm-config at {0}".format(llvm_config)
return (0, s)
include_dir = run_command([llvm_config, '--includedir']).strip()
if os.path.isdir(include_dir):
llvm_header = os.path.join(include_dir,
'llvm', 'Config', 'llvm-config.h')
llvm_include_ok = os.path.exists(llvm_header)
clang_header = os.path.join(include_dir, 'clang', 'Basic', 'Version.h')
clang_include_ok = os.path.exists(clang_header)
s = ''
if not version_ok:
s = ("LLVM version {0} is not one of the supported versions: {1}"
.format(got_version, llvm_versions_string()))
return (got_version, s)
if not llvm_include_ok:
s = "Could not find the LLVM header {0}".format(llvm_header)
s += "\nPerhaps you need to install clang and llvm dev packages"
return (got_version, s)
elif not clang_include_ok:
s = "Could not find the clang header {0}".format(clang_header)
s += "\nPerhaps you need to install clang and llvm dev packages"
return (got_version, s)
return (got_version, '')
@memoize
def find_system_llvm_config():
paths = [ ]
for vers in llvm_versions():
paths.append("llvm-config-" + vers + ".0")
paths.append("llvm-config-" + vers)
# next ones are for Homebrew
paths.append("/usr/local/opt/llvm@" + vers + ".0/bin/llvm-config")
paths.append("/usr/local/opt/llvm@" + vers + "/bin/llvm-config")
# check also unversioned commands
paths.append("llvm-config")
# next for Homebrew
paths.append("/usr/local/opt/llvm/bin/llvm-config")
all_found = [ ]
for command in paths:
found_version, found_config_err = check_llvm_config(command)
all_found.append( (command, found_version, found_config_err) )
found = ('', '', '')
for vers in llvm_versions():
for entry in all_found:
if entry[1] == vers:
found = entry
break
# command set, version > 0, no error
command = found[0]
version = found[1]
config_err = found[2]
if command and version and not config_err:
return found[0]
return 'none'
@memoize
def get_llvm_config():
llvm_val = get()
llvm_config = overrides.get('CHPL_LLVM_CONFIG', 'none')
if llvm_val == 'bundled':
llvm_subdir = get_bundled_llvm_dir()
bundled_config = os.path.join(llvm_subdir, 'bin', 'llvm-config')
if llvm_config != 'none' and llvm_config != bundled_config:
warning("CHPL_LLVM_CONFIG is ignored for CHPL_LLVM=bundled");
llvm_config = bundled_config
elif llvm_config == 'none' and llvm_val == 'system':
llvm_config = find_system_llvm_config()
return llvm_config
@memoize
def validate_llvm_config(llvm_config=None):
llvm_val = get()
# We pass in llvm_config if has already been computed (so we don't
# end up in an infinite loop).
if llvm_config is None:
llvm_config = get_llvm_config()
if llvm_val == 'system':
if llvm_config == 'none':
error("CHPL_LLVM=system but could not find an installed LLVM"
" with one of the supported versions: {0}".format(
llvm_versions_string()))
if (llvm_val == 'system' or
(llvm_val == 'bundled' and os.path.exists(llvm_config))):
version, config_error = check_llvm_config(llvm_config)
if config_error:
error("Problem with llvm-config at {0} -- {1}"
.format(llvm_config, config_error))
@memoize
def get_system_llvm_config_bindir():
llvm_config = get_llvm_config()
validate_llvm_config(llvm_config)
bindir = run_command([llvm_config, '--bindir']).strip()
if os.path.isdir(bindir):
pass
else:
error("llvm-config command {0} provides missing bin directory {0}"
.format(llvm_config, bindir))
return bindir
def get_llvm_clang_command_name(lang):
lang_upper = lang.upper()
if lang_upper == 'C++':
lang_upper = 'CXX'
if lang_upper == 'CXX':
return 'clang++'
else:
return 'clang'
# lang should be C or CXX
@memoize
def get_llvm_clang(lang):
clang_name = get_llvm_clang_command_name(lang)
llvm_val = get()
if llvm_val == 'system':
bindir = get_system_llvm_config_bindir()
return os.path.join(bindir, clang_name)
elif llvm_val == 'bundled':
llvm_subdir = get_bundled_llvm_dir()
return os.path.join(llvm_subdir, 'bin', clang_name)
else:
return ''
def has_compatible_installed_llvm():
llvm_config = find_system_llvm_config()
if llvm_config:
return True
else:
return False
@memoize
def get():
llvm_val = overrides.get('CHPL_LLVM')
if not llvm_val:
llvm_val = 'unset'
if compatible_platform_for_llvm():
if is_included_llvm_built():
llvm_val = 'bundled'
elif has_compatible_installed_llvm():
llvm_val = 'system'
else:
# This platform doesn't work with the LLVM backend
# for one reason or another. So default to CHPL_LLVM=none.
llvm_val = 'none'
if llvm_val == 'llvm':
warning("CHPL_LLVM=llvm is deprecated. Use CHPL_LLVM=bundled instead")
llvm_val = 'bundled'
if not compatible_platform_for_llvm():
if llvm_val != 'none' and llvm_val != 'unset':
warning("CHPL_LLVM={0} is not compatible with this "
"platform".format(llvm_val))
return llvm_val
def llvm_enabled():
llvm_val = get()
if llvm_val == 'bundled' or llvm_val == 'system':
return True
return False
def _main():
llvm_val = get()
llvm_config = get_llvm_config()
parser = optparse.OptionParser(usage='usage: %prog [--needs-llvm-runtime]')
parser.add_option('--needs-llvm-runtime', dest='action',
action='store_const',
const='needsllvm', default='')
parser.add_option('--llvm-config', dest='action',
action='store_const',
const='llvmconfig', default='')
(options, args) = parser.parse_args()
#if --needs-llvm-runtime is set, print out llvm if runtime is needed,
# and print out nothing if it is not.
if options.action == 'needsllvm':
if llvm_val == 'system' or llvm_val == 'bundled':
sys.stdout.write("llvm\n");
elif options.action == 'llvmconfig':
sys.stdout.write("{0}\n".format(llvm_config))
validate_llvm_config()
else:
sys.stdout.write("{0}\n".format(llvm_val))
if __name__ == '__main__':
_main()
| #!/usr/bin/env python3
import optparse
import os
import sys
import chpl_bin_subdir, chpl_arch, chpl_compiler, chpl_platform, overrides
from chpl_home_utils import get_chpl_third_party
from utils import memoize, error, run_command, try_run_command, warning
# returns a tuple of supported major LLVM versions as strings
def llvm_versions():
# Which major release - only need one number for that with current
# llvm (since LLVM 4.0).
# These will be tried in order.
return ('11',)
@memoize
def get_uniq_cfg_path_for(llvm_val):
if llvm_val == "bundled":
# put platform-arch-compiler for included llvm
host_bin_subdir = chpl_bin_subdir.get('host')
host_compiler = chpl_compiler.get('host')
llvm_target_dir = '{0}-{1}'.format(host_bin_subdir, host_compiler)
else:
# just put 'system' for system llvm
llvm_target_dir = llvm_val
return llvm_target_dir
@memoize
def get_uniq_cfg_path():
llvm_val = get()
return get_uniq_cfg_path_for(llvm_val)
def get_bundled_llvm_dir():
chpl_third_party = get_chpl_third_party()
llvm_target_dir = get_uniq_cfg_path_for('bundled')
llvm_subdir = os.path.join(chpl_third_party, 'llvm', 'install',
llvm_target_dir)
return llvm_subdir
def is_included_llvm_built():
llvm_subdir = get_bundled_llvm_dir()
llvm_header = os.path.join(llvm_subdir, 'include', 'llvm',
'PassSupport.h')
if os.path.exists(llvm_header):
return True
else:
return False
def compatible_platform_for_llvm():
target_arch = chpl_arch.get('target')
target_platform = chpl_platform.get('target')
return (target_arch != "i368" and target_platform != "linux32")
# returns a string of the supported llvm versions suitable for error msgs
def llvm_versions_string():
return ', '.join(llvm_versions())
# llvm_config is the llvm-config command we want to check out.
# returns (version_number, config_error_message)
@memoize
def check_llvm_config(llvm_config):
if llvm_config == 'none':
return (0, "no llvm-config detected")
got_version = 0
version_ok = False
llvm_header = ''
llvm_include_ok = False
clang_header = ''
clang_include_ok = False
exists, returncode, my_stdout, my_stderr = try_run_command([llvm_config,
'--version'])
if exists and returncode == 0:
version_string = my_stdout.strip()
got_version = version_string.split('.')[0]
version_ok = got_version in llvm_versions()
else:
s = "could not run llvm-config at {0}".format(llvm_config)
return (0, s)
include_dir = run_command([llvm_config, '--includedir']).strip()
if os.path.isdir(include_dir):
llvm_header = os.path.join(include_dir,
'llvm', 'Config', 'llvm-config.h')
llvm_include_ok = os.path.exists(llvm_header)
clang_header = os.path.join(include_dir, 'clang', 'Basic', 'Version.h')
clang_include_ok = os.path.exists(clang_header)
s = ''
if not version_ok:
s = ("LLVM version {0} is not one of the supported versions: {1}"
.format(got_version, llvm_versions_string()))
return (got_version, s)
if not llvm_include_ok:
s = "Could not find the LLVM header {0}".format(llvm_header)
s += "\nPerhaps you need to install clang and llvm dev packages"
return (got_version, s)
elif not clang_include_ok:
s = "Could not find the clang header {0}".format(clang_header)
s += "\nPerhaps you need to install clang and llvm dev packages"
return (got_version, s)
return (got_version, '')
@memoize
def find_system_llvm_config():
paths = [ ]
for vers in llvm_versions():
paths.append("llvm-config-" + vers + ".0")
paths.append("llvm-config-" + vers)
# next ones are for Homebrew
paths.append("/usr/local/opt/llvm@" + vers + ".0/bin/llvm-config")
paths.append("/usr/local/opt/llvm@" + vers + "/bin/llvm-config")
# check also unversioned commands
paths.append("llvm-config")
# next for Homebrew
paths.append("/usr/local/opt/llvm/bin/llvm-config")
all_found = [ ]
for command in paths:
found_version, found_config_err = check_llvm_config(command)
all_found.append( (command, found_version, found_config_err) )
found = ('', '', '')
for vers in llvm_versions():
for entry in all_found:
if entry[1] == vers:
found = entry
break
# command set, version > 0, no error
command = found[0]
version = found[1]
config_err = found[2]
if command and version and not config_err:
return found[0]
return 'none'
@memoize
def get_llvm_config():
llvm_val = get()
llvm_config = overrides.get('CHPL_LLVM_CONFIG', 'none')
if llvm_val == 'bundled':
llvm_subdir = get_bundled_llvm_dir()
bundled_config = os.path.join(llvm_subdir, 'bin', 'llvm-config')
if llvm_config != 'none' and llvm_config != bundled_config:
warning("CHPL_LLVM_CONFIG is ignored for CHPL_LLVM=bundled");
llvm_config = bundled_config
elif llvm_config == 'none' and llvm_val == 'system':
llvm_config = find_system_llvm_config()
return llvm_config
@memoize
def validate_llvm_config(llvm_config=None):
llvm_val = get()
# We pass in llvm_config if has already been computed (so we don't
# end up in an infinite loop).
if llvm_config is None:
llvm_config = get_llvm_config()
if llvm_val == 'system':
if llvm_config == 'none':
error("CHPL_LLVM=system but could not find an installed LLVM"
" with one of the supported versions: {0}".format(
llvm_versions_string()))
if (llvm_val == 'system' or
(llvm_val == 'bundled' and os.path.exists(llvm_config))):
version, config_error = check_llvm_config(llvm_config)
if config_error:
error("Problem with llvm-config at {0} -- {1}"
.format(llvm_config, config_error))
@memoize
def get_system_llvm_config_bindir():
llvm_config = get_llvm_config()
validate_llvm_config(llvm_config)
bindir = run_command([llvm_config, '--bindir']).strip()
if os.path.isdir(bindir):
pass
else:
error("llvm-config command {0} provides missing bin directory {0}"
.format(llvm_config, bindir))
return bindir
def get_llvm_clang_command_name(lang):
lang_upper = lang.upper()
if lang_upper == 'C++':
lang_upper = 'CXX'
if lang_upper == 'CXX':
return 'clang++'
else:
return 'clang'
# lang should be C or CXX
@memoize
def get_llvm_clang(lang):
clang_name = get_llvm_clang_command_name(lang)
llvm_val = get()
if llvm_val == 'system':
bindir = get_system_llvm_config_bindir()
return os.path.join(bindir, clang_name)
elif llvm_val == 'bundled':
llvm_subdir = get_bundled_llvm_dir()
return os.path.join(llvm_subdir, 'bin', clang_name)
else:
return ''
def has_compatible_installed_llvm():
llvm_config = find_system_llvm_config()
if llvm_config:
return True
else:
return False
@memoize
def get():
llvm_val = overrides.get('CHPL_LLVM')
if not llvm_val:
llvm_val = 'unset'
if compatible_platform_for_llvm():
if is_included_llvm_built():
llvm_val = 'bundled'
elif has_compatible_installed_llvm():
llvm_val = 'system'
else:
# This platform doesn't work with the LLVM backend
# for one reason or another. So default to CHPL_LLVM=none.
llvm_val = 'none'
if llvm_val == 'llvm':
warning("CHPL_LLVM=llvm is deprecated. Use CHPL_LLVM=bundled instead")
llvm_val = 'bundled'
if not compatible_platform_for_llvm():
if llvm_val != 'none' and llvm_val != 'unset':
warning("CHPL_LLVM={0} is not compatible with this "
"platform".format(llvm_val))
return llvm_val
def llvm_enabled():
llvm_val = get()
if llvm_val == 'bundled' or llvm_val == 'system':
return True
return False
def _main():
llvm_val = get()
llvm_config = get_llvm_config()
parser = optparse.OptionParser(usage='usage: %prog [--needs-llvm-runtime]')
parser.add_option('--needs-llvm-runtime', dest='action',
action='store_const',
const='needsllvm', default='')
parser.add_option('--llvm-config', dest='action',
action='store_const',
const='llvmconfig', default='')
(options, args) = parser.parse_args()
#if --needs-llvm-runtime is set, print out llvm if runtime is needed,
# and print out nothing if it is not.
if options.action == 'needsllvm':
if llvm_val == 'system' or llvm_val == 'bundled':
sys.stdout.write("llvm\n");
elif options.action == 'llvmconfig':
sys.stdout.write("{0}\n".format(llvm_config))
validate_llvm_config()
else:
sys.stdout.write("{0}\n".format(llvm_val))
if __name__ == '__main__':
_main() | en | 0.823108 | #!/usr/bin/env python3 # returns a tuple of supported major LLVM versions as strings # Which major release - only need one number for that with current # llvm (since LLVM 4.0). # These will be tried in order. # put platform-arch-compiler for included llvm # just put 'system' for system llvm # returns a string of the supported llvm versions suitable for error msgs # llvm_config is the llvm-config command we want to check out. # returns (version_number, config_error_message) # next ones are for Homebrew # check also unversioned commands # next for Homebrew # command set, version > 0, no error # We pass in llvm_config if has already been computed (so we don't # end up in an infinite loop). # lang should be C or CXX # This platform doesn't work with the LLVM backend # for one reason or another. So default to CHPL_LLVM=none. #if --needs-llvm-runtime is set, print out llvm if runtime is needed, # and print out nothing if it is not. | 2.130681 | 2 |
scrape_test2.py | peterhogan/python | 0 | 6624533 | from bs4 import BeautifulSoup
from urllib2 import urlopen
from time import sleep # be nice
BASE_URL = "http://www.chicagoreader.com"
def make_soup(url):
html = urlopen(url).read()
return BeautifulSoup(html, "lxml")
def get_category_links(section_url):
soup = make_soup(section_url)
boccat = soup.find("dl", "boccat")
category_links = [BASE_URL + dd.a["href"] for dd in boccat.findAll("dd")]
return category_links
def get_category_winner(category_url):
soup = make_soup(category_url)
category = soup.find("h1", "headline").string
winner = [h2.string for h2 in soup.findAll("h2", "boc1")]
runners_up = [h2.string for h2 in soup.findAll("h2", "boc2")]
return {"category": category,
"category_url": category_url,
"winner": winner,
"runners_up": runners_up}
if __name__ == '__main__':
food_n_drink = ("http://www.chicagoreader.com/chicago/"
"best-of-chicago-2011-food-drink/BestOf?oid=4106228")
categories = get_category_links(food_n_drink)
data = [] # a list to store our dictionaries
for category in categories:
winner = get_category_winner(category)
data.append(winner)
sleep(1) # be nice
print data
| from bs4 import BeautifulSoup
from urllib2 import urlopen
from time import sleep # be nice
BASE_URL = "http://www.chicagoreader.com"
def make_soup(url):
html = urlopen(url).read()
return BeautifulSoup(html, "lxml")
def get_category_links(section_url):
soup = make_soup(section_url)
boccat = soup.find("dl", "boccat")
category_links = [BASE_URL + dd.a["href"] for dd in boccat.findAll("dd")]
return category_links
def get_category_winner(category_url):
soup = make_soup(category_url)
category = soup.find("h1", "headline").string
winner = [h2.string for h2 in soup.findAll("h2", "boc1")]
runners_up = [h2.string for h2 in soup.findAll("h2", "boc2")]
return {"category": category,
"category_url": category_url,
"winner": winner,
"runners_up": runners_up}
if __name__ == '__main__':
food_n_drink = ("http://www.chicagoreader.com/chicago/"
"best-of-chicago-2011-food-drink/BestOf?oid=4106228")
categories = get_category_links(food_n_drink)
data = [] # a list to store our dictionaries
for category in categories:
winner = get_category_winner(category)
data.append(winner)
sleep(1) # be nice
print data
| en | 0.714382 | # be nice # a list to store our dictionaries # be nice | 3.248366 | 3 |
Microsoft/functions30-2.py | marconipoveda/Python101 | 0 | 6624534 | <reponame>marconipoveda/Python101<filename>Microsoft/functions30-2.py
def get_initial(name):
return name[0:1].upper()
fn=input('Enter your first name: ')
mn=input('Enter your middle name: ')
ln=input('Enter your last name: ')
# 0 - Before creating the function get_initial()
# fn_i = fn[0:1]
# mn_i = mn[0:1]
# ln_i = ln[0:1]
# 1 - Before calling function inside the print statement
# fn_i = get_initial(fn)
# mn_i = get_initial(mn)
# ln_i = get_initial(ln)
# 0 - Before calling function inside the print statement
# 1 - print('Your initials are: ' + fn_i + mn_i + ln_i)
print('Your initials are: ' + get_initial(fn) + get_initial(mn) + get_initial(ln)) | def get_initial(name):
return name[0:1].upper()
fn=input('Enter your first name: ')
mn=input('Enter your middle name: ')
ln=input('Enter your last name: ')
# 0 - Before creating the function get_initial()
# fn_i = fn[0:1]
# mn_i = mn[0:1]
# ln_i = ln[0:1]
# 1 - Before calling function inside the print statement
# fn_i = get_initial(fn)
# mn_i = get_initial(mn)
# ln_i = get_initial(ln)
# 0 - Before calling function inside the print statement
# 1 - print('Your initials are: ' + fn_i + mn_i + ln_i)
print('Your initials are: ' + get_initial(fn) + get_initial(mn) + get_initial(ln)) | en | 0.462938 | # 0 - Before creating the function get_initial() # fn_i = fn[0:1] # mn_i = mn[0:1] # ln_i = ln[0:1] # 1 - Before calling function inside the print statement # fn_i = get_initial(fn) # mn_i = get_initial(mn) # ln_i = get_initial(ln) # 0 - Before calling function inside the print statement # 1 - print('Your initials are: ' + fn_i + mn_i + ln_i) | 4.082369 | 4 |
avilla/core/service/sqlmodel.py | BlueGlassBlock/Avilla | 0 | 6624535 | <filename>avilla/core/service/sqlmodel.py
from typing import TYPE_CHECKING, Type
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import SQLModel
from avilla.core.launch import LaunchComponent
from avilla.core.service import Service
from avilla.core.service.entity import ExportInterface
if TYPE_CHECKING:
from avilla.core import Avilla
class EngineProvider(ExportInterface):
engine: AsyncEngine
def __init__(self, engine: AsyncEngine):
self.engine = engine
def get(self) -> AsyncEngine:
return self.engine
class SqlmodelService(Service):
supported_interface_types = {EngineProvider}
supported_description_types = set()
url: str
engine: AsyncEngine
def __init__(self, url: str) -> None:
self.url = url
self.engine = create_async_engine(self.url, future=True)
super().__init__()
def get_interface(self, interface_type: Type[EngineProvider]):
return EngineProvider(self.engine)
def get_status(self, entity):
raise NotImplementedError
def set_status(self, entity, available: bool, description: str):
raise NotImplementedError
def set_current_status(self, available: bool, description: str):
raise NotImplementedError
async def launch_prepare(self, avilla: "Avilla"):
async with self.engine.begin() as conn:
await conn.run_sync(SQLModel.metadata.create_all)
async def launch_cleanup(self, avilla: "Avilla"):
await self.engine.dispose()
@property
def launch_component(self) -> LaunchComponent:
return LaunchComponent(
"storage.sqlmodel",
set(),
prepare=self.launch_prepare,
cleanup=self.launch_cleanup,
)
| <filename>avilla/core/service/sqlmodel.py
from typing import TYPE_CHECKING, Type
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import SQLModel
from avilla.core.launch import LaunchComponent
from avilla.core.service import Service
from avilla.core.service.entity import ExportInterface
if TYPE_CHECKING:
from avilla.core import Avilla
class EngineProvider(ExportInterface):
engine: AsyncEngine
def __init__(self, engine: AsyncEngine):
self.engine = engine
def get(self) -> AsyncEngine:
return self.engine
class SqlmodelService(Service):
supported_interface_types = {EngineProvider}
supported_description_types = set()
url: str
engine: AsyncEngine
def __init__(self, url: str) -> None:
self.url = url
self.engine = create_async_engine(self.url, future=True)
super().__init__()
def get_interface(self, interface_type: Type[EngineProvider]):
return EngineProvider(self.engine)
def get_status(self, entity):
raise NotImplementedError
def set_status(self, entity, available: bool, description: str):
raise NotImplementedError
def set_current_status(self, available: bool, description: str):
raise NotImplementedError
async def launch_prepare(self, avilla: "Avilla"):
async with self.engine.begin() as conn:
await conn.run_sync(SQLModel.metadata.create_all)
async def launch_cleanup(self, avilla: "Avilla"):
await self.engine.dispose()
@property
def launch_component(self) -> LaunchComponent:
return LaunchComponent(
"storage.sqlmodel",
set(),
prepare=self.launch_prepare,
cleanup=self.launch_cleanup,
)
| none | 1 | 2.28615 | 2 | |
engage/adapter_pkg/datablox_engage_adapter/file_locator.py | mpi-sws-rse/datablox | 0 | 6624536 | import os
import os.path
import sys
def np(p):
"""normalize a path"""
return os.path.abspath(os.path.expanduser(p))
def check_dir(dirpath):
if not os.path.isdir(dirpath):
raise Exception("Could not find directory '%s' - is your Engage environment set up correctly?" % dirpath)
def check_file(filepath):
if not os.path.exists(filepath):
raise Exception("Could not find file '%s' - is your Engage environment set up correctly?" % filepath)
class FileLocator(object):
"""This class has methods to return the locations of various files and
directories used by Datablox and Engage, assuming that Datablox was deployed
by Engage.
"""
def __init__(self):
# First get the deployment home by searching up the directory tree.
# We need to resolve any symlinks first due to the new virtualenv structure
# on Ubuntu Linux 11.
self.dh = np(os.path.os.path.join(os.path.realpath(os.path.dirname(__file__)),
"../../../../../.."))
self.config_dir = os.path.join(self.dh, "config")
check_dir(self.config_dir)
self.engage_dir = os.path.join(self.dh, "engage")
check_dir(self.engage_dir)
self.blox_dir = os.path.join(self.dh, "blox")
check_dir(self.engage_dir)
self.installed_res_file = os.path.join(self.config_dir,
"installed_resources.json")
self.svcctl_exe = os.path.join(self.engage_dir,
"bin/svcctl")
check_file(self.svcctl_exe)
self.deployer_exe = os.path.join(self.engage_dir,
"bin/deployer")
check_file(self.deployer_exe)
self.master_pw_file = os.path.join(self.config_dir, "master.pw")
check_file(self.master_pw_file)
log_dir_ref_file = os.path.join(self.config_dir, "log_directory.txt")
check_file(log_dir_ref_file)
with open(log_dir_ref_file, "r") as f:
self.log_directory = f.read().rstrip()
assert self.log_directory, "%s does not seem to contain a valid directory" %\
log_dir_ref_file
self.sw_packages_dir = os.path.join(self.engage_dir, "sw_packages")
check_dir(self.sw_packages_dir)
def get_dh(self):
return self.dh
def get_blox_dir(self):
return self.blox_dir
def get_config_dir(self):
return self.config_dir
def get_installed_resources_file(self):
"""Return the path to the installed resources file.
"""
return self.installed_res_file
def is_installed_resources_file_present(self):
return os.path.exists(self.installed_res_file)
def move_installed_resources_file(self, backup_extn=".prev"):
"""Move the installed resources file to a backup file so that we
can write a new one.
"""
check_file(self.installed_res_file)
backup_name = self.installed_res_file + backup_extn
os.rename(self.installed_res_file, backup_name)
def get_svcctl_exe(self):
return self.svcctl_exe
def get_deployer_exe(self):
return self.deployer_exe
def get_master_pw_file(self):
return self.master_pw_file
def get_file_server_key_file(self):
return os.path.join(self.dh, "datablox_file_server_key")
def get_djm_server_dir(self):
return os.path.join(self.dh, "djm")
def get_log_directory(self):
return self.log_directory
def get_engage_distribution_file(self):
engage_dist_file = os.path.join(self.engage_dir, "engage-dist.tar.gz")
if not os.path.exists(engage_dist_file):
raise Exception("Engage distribution file not found at %s" %
engage_dist_file)
return engage_dist_file
def get_sw_packages_dir(self):
return self.sw_packages_dir
| import os
import os.path
import sys
def np(p):
"""normalize a path"""
return os.path.abspath(os.path.expanduser(p))
def check_dir(dirpath):
if not os.path.isdir(dirpath):
raise Exception("Could not find directory '%s' - is your Engage environment set up correctly?" % dirpath)
def check_file(filepath):
if not os.path.exists(filepath):
raise Exception("Could not find file '%s' - is your Engage environment set up correctly?" % filepath)
class FileLocator(object):
"""This class has methods to return the locations of various files and
directories used by Datablox and Engage, assuming that Datablox was deployed
by Engage.
"""
def __init__(self):
# First get the deployment home by searching up the directory tree.
# We need to resolve any symlinks first due to the new virtualenv structure
# on Ubuntu Linux 11.
self.dh = np(os.path.os.path.join(os.path.realpath(os.path.dirname(__file__)),
"../../../../../.."))
self.config_dir = os.path.join(self.dh, "config")
check_dir(self.config_dir)
self.engage_dir = os.path.join(self.dh, "engage")
check_dir(self.engage_dir)
self.blox_dir = os.path.join(self.dh, "blox")
check_dir(self.engage_dir)
self.installed_res_file = os.path.join(self.config_dir,
"installed_resources.json")
self.svcctl_exe = os.path.join(self.engage_dir,
"bin/svcctl")
check_file(self.svcctl_exe)
self.deployer_exe = os.path.join(self.engage_dir,
"bin/deployer")
check_file(self.deployer_exe)
self.master_pw_file = os.path.join(self.config_dir, "master.pw")
check_file(self.master_pw_file)
log_dir_ref_file = os.path.join(self.config_dir, "log_directory.txt")
check_file(log_dir_ref_file)
with open(log_dir_ref_file, "r") as f:
self.log_directory = f.read().rstrip()
assert self.log_directory, "%s does not seem to contain a valid directory" %\
log_dir_ref_file
self.sw_packages_dir = os.path.join(self.engage_dir, "sw_packages")
check_dir(self.sw_packages_dir)
def get_dh(self):
return self.dh
def get_blox_dir(self):
return self.blox_dir
def get_config_dir(self):
return self.config_dir
def get_installed_resources_file(self):
"""Return the path to the installed resources file.
"""
return self.installed_res_file
def is_installed_resources_file_present(self):
return os.path.exists(self.installed_res_file)
def move_installed_resources_file(self, backup_extn=".prev"):
"""Move the installed resources file to a backup file so that we
can write a new one.
"""
check_file(self.installed_res_file)
backup_name = self.installed_res_file + backup_extn
os.rename(self.installed_res_file, backup_name)
def get_svcctl_exe(self):
return self.svcctl_exe
def get_deployer_exe(self):
return self.deployer_exe
def get_master_pw_file(self):
return self.master_pw_file
def get_file_server_key_file(self):
return os.path.join(self.dh, "datablox_file_server_key")
def get_djm_server_dir(self):
return os.path.join(self.dh, "djm")
def get_log_directory(self):
return self.log_directory
def get_engage_distribution_file(self):
engage_dist_file = os.path.join(self.engage_dir, "engage-dist.tar.gz")
if not os.path.exists(engage_dist_file):
raise Exception("Engage distribution file not found at %s" %
engage_dist_file)
return engage_dist_file
def get_sw_packages_dir(self):
return self.sw_packages_dir
| en | 0.927274 | normalize a path This class has methods to return the locations of various files and directories used by Datablox and Engage, assuming that Datablox was deployed by Engage. # First get the deployment home by searching up the directory tree. # We need to resolve any symlinks first due to the new virtualenv structure # on Ubuntu Linux 11. Return the path to the installed resources file. Move the installed resources file to a backup file so that we can write a new one. | 2.484545 | 2 |
runtests.py | weijia/django-dhcp | 1 | 6624537 | import sys
try:
from django.conf import settings
settings.configure(
DEBUG=True,
USE_TZ=True,
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
ROOT_URLCONF="django_dhcp.urls",
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"django_dhcp",
],
SITE_ID=1,
NOSE_ARGS=['-s'],
)
try:
import django
setup = django.setup
except AttributeError:
pass
else:
setup()
from django_nose import NoseTestSuiteRunner
except ImportError:
raise ImportError("To fix this error, run: pip install -r requirements-test.txt")
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
failures = test_runner.run_tests(test_args)
if failures:
sys.exit(failures)
if __name__ == '__main__':
run_tests(*sys.argv[1:]) | import sys
try:
from django.conf import settings
settings.configure(
DEBUG=True,
USE_TZ=True,
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
ROOT_URLCONF="django_dhcp.urls",
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"django_dhcp",
],
SITE_ID=1,
NOSE_ARGS=['-s'],
)
try:
import django
setup = django.setup
except AttributeError:
pass
else:
setup()
from django_nose import NoseTestSuiteRunner
except ImportError:
raise ImportError("To fix this error, run: pip install -r requirements-test.txt")
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
failures = test_runner.run_tests(test_args)
if failures:
sys.exit(failures)
if __name__ == '__main__':
run_tests(*sys.argv[1:]) | en | 0.624159 | # Run tests | 1.988507 | 2 |
buildscripts/resmokelib/logging/handlers.py | MartinNeupauer/mongo | 1 | 6624538 | <filename>buildscripts/resmokelib/logging/handlers.py
"""
Additional handlers that are used as the base classes of the buildlogger
handler.
"""
from __future__ import absolute_import
import json
import logging
import sys
import threading
import warnings
import requests
import requests.auth
try:
import requests.packages.urllib3.exceptions as urllib3_exceptions
except ImportError:
# Versions of the requests package prior to 1.2.0 did not vendor the urllib3 package.
urllib3_exceptions = None
from . import flush
from .. import utils
_TIMEOUT_SECS = 10
class BufferedHandler(logging.Handler):
"""
A handler class that buffers logging records in memory. Whenever
each record is added to the buffer, a check is made to see if the
buffer should be flushed. If it should, then flush() is expected to
do what's needed.
"""
def __init__(self, capacity, interval_secs):
"""
Initializes the handler with the buffer size and timeout after
which the buffer is flushed regardless.
"""
logging.Handler.__init__(self)
if not isinstance(capacity, int):
raise TypeError("capacity must be an integer")
elif capacity <= 0:
raise ValueError("capacity must be a positive integer")
if not isinstance(interval_secs, (int, float)):
raise TypeError("interval_secs must be a number")
elif interval_secs <= 0.0:
raise ValueError("interval_secs must be a positive number")
self.capacity = capacity
self.interval_secs = interval_secs
# self.__emit_lock prohibits concurrent access to 'self.__emit_buffer',
# 'self.__flush_event', and self.__flush_scheduled_by_emit.
self.__emit_lock = threading.Lock()
self.__emit_buffer = []
self.__flush_event = None # A handle to the event that calls self.flush().
self.__flush_scheduled_by_emit = False
self.__flush_lock = threading.Lock() # Serializes callers of self.flush().
# We override createLock(), acquire(), and release() to be no-ops since emit(), flush(), and
# close() serialize accesses to 'self.__emit_buffer' in a more granular way via
# 'self.__emit_lock'.
def createLock(self):
pass
def acquire(self):
pass
def release(self):
pass
def process_record(self, record):
"""
Applies a transformation to the record before it gets added to
the buffer.
The default implementation returns 'record' unmodified.
"""
return record
def emit(self, record):
"""
Emits a record.
Append the record to the buffer after it has been transformed by
process_record(). If the length of the buffer is greater than or
equal to its capacity, then the flush() event is rescheduled to
immediately process the buffer.
"""
processed_record = self.process_record(record)
with self.__emit_lock:
self.__emit_buffer.append(processed_record)
if self.__flush_event is None:
# Now that we've added our first record to the buffer, we schedule a call to flush()
# to occur 'self.interval_secs' seconds from now. 'self.__flush_event' should never
# be None after this point.
self.__flush_event = flush.flush_after(self, delay=self.interval_secs)
if not self.__flush_scheduled_by_emit and len(self.__emit_buffer) >= self.capacity:
# Attempt to flush the buffer early if we haven't already done so. We don't bother
# calling flush.cancel() and flush.flush_after() when 'self.__flush_event' is
# already scheduled to happen as soon as possible to avoid introducing unnecessary
# delays in emit().
if flush.cancel(self.__flush_event):
self.__flush_event = flush.flush_after(self, delay=0.0)
self.__flush_scheduled_by_emit = True
def flush(self):
"""
Ensures all logging output has been flushed.
"""
self.__flush(close_called=False)
with self.__emit_lock:
if self.__flush_event is not None:
# We cancel 'self.__flush_event' in case flush() was called by someone other than
# the flush thread to avoid having multiple flush() events scheduled.
flush.cancel(self.__flush_event)
self.__flush_event = flush.flush_after(self, delay=self.interval_secs)
self.__flush_scheduled_by_emit = False
def __flush(self, close_called):
"""
Ensures all logging output has been flushed.
"""
with self.__emit_lock:
buf = self.__emit_buffer
self.__emit_buffer = []
# The buffer 'buf' is flushed without holding 'self.__emit_lock' to avoid causing callers of
# self.emit() to block behind the completion of a potentially long-running flush operation.
if buf:
with self.__flush_lock:
self._flush_buffer_with_lock(buf, close_called)
def _flush_buffer_with_lock(self, buf, close_called):
"""
Ensures all logging output has been flushed.
"""
raise NotImplementedError("_flush_buffer_with_lock must be implemented by BufferedHandler"
" subclasses")
def close(self):
"""
Flushes the buffer and tidies up any resources used by this
handler.
"""
with self.__emit_lock:
if self.__flush_event is not None:
flush.cancel(self.__flush_event)
self.__flush(close_called=True)
logging.Handler.close(self)
class HTTPHandler(object):
"""
A class which sends data to a web server using POST requests.
"""
def __init__(self, url_root, username, password):
"""
Initializes the handler with the necessary authentication
credentials.
"""
self.auth_handler = requests.auth.HTTPBasicAuth(username, password)
self.url_root = url_root
def _make_url(self, endpoint):
return "%s/%s/" % (self.url_root.rstrip("/"), endpoint.strip("/"))
def post(self, endpoint, data=None, headers=None, timeout_secs=_TIMEOUT_SECS):
"""
Sends a POST request to the specified endpoint with the supplied
data.
Returns the response, either as a string or a JSON object based
on the content type.
"""
data = utils.default_if_none(data, [])
data = json.dumps(data, encoding="utf-8")
headers = utils.default_if_none(headers, {})
headers["Content-Type"] = "application/json; charset=utf-8"
url = self._make_url(endpoint)
# Versions of Python earlier than 2.7.9 do not support certificate validation. So we
# disable certificate validation for older Python versions.
should_validate_certificates = sys.version_info >= (2, 7, 9)
with warnings.catch_warnings():
if urllib3_exceptions is not None and not should_validate_certificates:
try:
warnings.simplefilter("ignore", urllib3_exceptions.InsecurePlatformWarning)
except AttributeError:
# Versions of urllib3 prior to 1.10.3 didn't define InsecurePlatformWarning.
# Versions of requests prior to 2.6.0 didn't have a vendored copy of urllib3
# that defined InsecurePlatformWarning.
pass
try:
warnings.simplefilter("ignore", urllib3_exceptions.InsecureRequestWarning)
except AttributeError:
# Versions of urllib3 prior to 1.9 didn't define InsecureRequestWarning.
# Versions of requests prior to 2.4.0 didn't have a vendored copy of urllib3
# that defined InsecureRequestWarning.
pass
response = requests.post(url,
data=data,
headers=headers,
timeout=timeout_secs,
auth=self.auth_handler,
verify=should_validate_certificates)
response.raise_for_status()
if not response.encoding:
response.encoding = "utf-8"
headers = response.headers
if headers["Content-Type"].startswith("application/json"):
return response.json()
return response.text
| <filename>buildscripts/resmokelib/logging/handlers.py
"""
Additional handlers that are used as the base classes of the buildlogger
handler.
"""
from __future__ import absolute_import
import json
import logging
import sys
import threading
import warnings
import requests
import requests.auth
try:
import requests.packages.urllib3.exceptions as urllib3_exceptions
except ImportError:
# Versions of the requests package prior to 1.2.0 did not vendor the urllib3 package.
urllib3_exceptions = None
from . import flush
from .. import utils
_TIMEOUT_SECS = 10
class BufferedHandler(logging.Handler):
"""
A handler class that buffers logging records in memory. Whenever
each record is added to the buffer, a check is made to see if the
buffer should be flushed. If it should, then flush() is expected to
do what's needed.
"""
def __init__(self, capacity, interval_secs):
"""
Initializes the handler with the buffer size and timeout after
which the buffer is flushed regardless.
"""
logging.Handler.__init__(self)
if not isinstance(capacity, int):
raise TypeError("capacity must be an integer")
elif capacity <= 0:
raise ValueError("capacity must be a positive integer")
if not isinstance(interval_secs, (int, float)):
raise TypeError("interval_secs must be a number")
elif interval_secs <= 0.0:
raise ValueError("interval_secs must be a positive number")
self.capacity = capacity
self.interval_secs = interval_secs
# self.__emit_lock prohibits concurrent access to 'self.__emit_buffer',
# 'self.__flush_event', and self.__flush_scheduled_by_emit.
self.__emit_lock = threading.Lock()
self.__emit_buffer = []
self.__flush_event = None # A handle to the event that calls self.flush().
self.__flush_scheduled_by_emit = False
self.__flush_lock = threading.Lock() # Serializes callers of self.flush().
# We override createLock(), acquire(), and release() to be no-ops since emit(), flush(), and
# close() serialize accesses to 'self.__emit_buffer' in a more granular way via
# 'self.__emit_lock'.
def createLock(self):
pass
def acquire(self):
pass
def release(self):
pass
def process_record(self, record):
"""
Applies a transformation to the record before it gets added to
the buffer.
The default implementation returns 'record' unmodified.
"""
return record
def emit(self, record):
"""
Emits a record.
Append the record to the buffer after it has been transformed by
process_record(). If the length of the buffer is greater than or
equal to its capacity, then the flush() event is rescheduled to
immediately process the buffer.
"""
processed_record = self.process_record(record)
with self.__emit_lock:
self.__emit_buffer.append(processed_record)
if self.__flush_event is None:
# Now that we've added our first record to the buffer, we schedule a call to flush()
# to occur 'self.interval_secs' seconds from now. 'self.__flush_event' should never
# be None after this point.
self.__flush_event = flush.flush_after(self, delay=self.interval_secs)
if not self.__flush_scheduled_by_emit and len(self.__emit_buffer) >= self.capacity:
# Attempt to flush the buffer early if we haven't already done so. We don't bother
# calling flush.cancel() and flush.flush_after() when 'self.__flush_event' is
# already scheduled to happen as soon as possible to avoid introducing unnecessary
# delays in emit().
if flush.cancel(self.__flush_event):
self.__flush_event = flush.flush_after(self, delay=0.0)
self.__flush_scheduled_by_emit = True
def flush(self):
"""
Ensures all logging output has been flushed.
"""
self.__flush(close_called=False)
with self.__emit_lock:
if self.__flush_event is not None:
# We cancel 'self.__flush_event' in case flush() was called by someone other than
# the flush thread to avoid having multiple flush() events scheduled.
flush.cancel(self.__flush_event)
self.__flush_event = flush.flush_after(self, delay=self.interval_secs)
self.__flush_scheduled_by_emit = False
def __flush(self, close_called):
"""
Ensures all logging output has been flushed.
"""
with self.__emit_lock:
buf = self.__emit_buffer
self.__emit_buffer = []
# The buffer 'buf' is flushed without holding 'self.__emit_lock' to avoid causing callers of
# self.emit() to block behind the completion of a potentially long-running flush operation.
if buf:
with self.__flush_lock:
self._flush_buffer_with_lock(buf, close_called)
def _flush_buffer_with_lock(self, buf, close_called):
"""
Ensures all logging output has been flushed.
"""
raise NotImplementedError("_flush_buffer_with_lock must be implemented by BufferedHandler"
" subclasses")
def close(self):
"""
Flushes the buffer and tidies up any resources used by this
handler.
"""
with self.__emit_lock:
if self.__flush_event is not None:
flush.cancel(self.__flush_event)
self.__flush(close_called=True)
logging.Handler.close(self)
class HTTPHandler(object):
"""
A class which sends data to a web server using POST requests.
"""
def __init__(self, url_root, username, password):
"""
Initializes the handler with the necessary authentication
credentials.
"""
self.auth_handler = requests.auth.HTTPBasicAuth(username, password)
self.url_root = url_root
def _make_url(self, endpoint):
return "%s/%s/" % (self.url_root.rstrip("/"), endpoint.strip("/"))
def post(self, endpoint, data=None, headers=None, timeout_secs=_TIMEOUT_SECS):
"""
Sends a POST request to the specified endpoint with the supplied
data.
Returns the response, either as a string or a JSON object based
on the content type.
"""
data = utils.default_if_none(data, [])
data = json.dumps(data, encoding="utf-8")
headers = utils.default_if_none(headers, {})
headers["Content-Type"] = "application/json; charset=utf-8"
url = self._make_url(endpoint)
# Versions of Python earlier than 2.7.9 do not support certificate validation. So we
# disable certificate validation for older Python versions.
should_validate_certificates = sys.version_info >= (2, 7, 9)
with warnings.catch_warnings():
if urllib3_exceptions is not None and not should_validate_certificates:
try:
warnings.simplefilter("ignore", urllib3_exceptions.InsecurePlatformWarning)
except AttributeError:
# Versions of urllib3 prior to 1.10.3 didn't define InsecurePlatformWarning.
# Versions of requests prior to 2.6.0 didn't have a vendored copy of urllib3
# that defined InsecurePlatformWarning.
pass
try:
warnings.simplefilter("ignore", urllib3_exceptions.InsecureRequestWarning)
except AttributeError:
# Versions of urllib3 prior to 1.9 didn't define InsecureRequestWarning.
# Versions of requests prior to 2.4.0 didn't have a vendored copy of urllib3
# that defined InsecureRequestWarning.
pass
response = requests.post(url,
data=data,
headers=headers,
timeout=timeout_secs,
auth=self.auth_handler,
verify=should_validate_certificates)
response.raise_for_status()
if not response.encoding:
response.encoding = "utf-8"
headers = response.headers
if headers["Content-Type"].startswith("application/json"):
return response.json()
return response.text
| en | 0.894368 | Additional handlers that are used as the base classes of the buildlogger handler. # Versions of the requests package prior to 1.2.0 did not vendor the urllib3 package. A handler class that buffers logging records in memory. Whenever each record is added to the buffer, a check is made to see if the buffer should be flushed. If it should, then flush() is expected to do what's needed. Initializes the handler with the buffer size and timeout after which the buffer is flushed regardless. # self.__emit_lock prohibits concurrent access to 'self.__emit_buffer', # 'self.__flush_event', and self.__flush_scheduled_by_emit. # A handle to the event that calls self.flush(). # Serializes callers of self.flush(). # We override createLock(), acquire(), and release() to be no-ops since emit(), flush(), and # close() serialize accesses to 'self.__emit_buffer' in a more granular way via # 'self.__emit_lock'. Applies a transformation to the record before it gets added to the buffer. The default implementation returns 'record' unmodified. Emits a record. Append the record to the buffer after it has been transformed by process_record(). If the length of the buffer is greater than or equal to its capacity, then the flush() event is rescheduled to immediately process the buffer. # Now that we've added our first record to the buffer, we schedule a call to flush() # to occur 'self.interval_secs' seconds from now. 'self.__flush_event' should never # be None after this point. # Attempt to flush the buffer early if we haven't already done so. We don't bother # calling flush.cancel() and flush.flush_after() when 'self.__flush_event' is # already scheduled to happen as soon as possible to avoid introducing unnecessary # delays in emit(). Ensures all logging output has been flushed. # We cancel 'self.__flush_event' in case flush() was called by someone other than # the flush thread to avoid having multiple flush() events scheduled. Ensures all logging output has been flushed. # The buffer 'buf' is flushed without holding 'self.__emit_lock' to avoid causing callers of # self.emit() to block behind the completion of a potentially long-running flush operation. Ensures all logging output has been flushed. Flushes the buffer and tidies up any resources used by this handler. A class which sends data to a web server using POST requests. Initializes the handler with the necessary authentication credentials. Sends a POST request to the specified endpoint with the supplied data. Returns the response, either as a string or a JSON object based on the content type. # Versions of Python earlier than 2.7.9 do not support certificate validation. So we # disable certificate validation for older Python versions. # Versions of urllib3 prior to 1.10.3 didn't define InsecurePlatformWarning. # Versions of requests prior to 2.6.0 didn't have a vendored copy of urllib3 # that defined InsecurePlatformWarning. # Versions of urllib3 prior to 1.9 didn't define InsecureRequestWarning. # Versions of requests prior to 2.4.0 didn't have a vendored copy of urllib3 # that defined InsecureRequestWarning. | 2.2544 | 2 |
qclib/state_preparation/util/tree_utils.py | adjs/qclib | 1 | 6624539 | # Copyright 2021 qclib project.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
https://arxiv.org/abs/2108.10182
"""
from graphviz import Digraph
def is_leaf(tree):
"""
:param tree: a tree node
:return: True if tree is a leaf
"""
if tree.left is None and tree.right is None:
return True
return False
def remove_leafs(tree):
""" remove tree leafs """
if tree.left:
if is_leaf(tree.left):
tree.left = None
else:
remove_leafs(tree.left)
if tree.right:
if is_leaf(tree.right):
tree.right = None
else:
remove_leafs(tree.right)
def leftmost(tree):
"""
:param tree: a tree node
:return: the leftmost node relative to tree, or None if tree is leaf.
"""
if tree.left:
return tree.left
return tree.right
def node_index(tree):
"""
:param tree: a tree node
:return: the total index of the node in the tree.
"""
return 2**tree.level-1+tree.index
def root_node(tree, level):
"""
:param tree: a tree node
:param level: level of the subtree (0 for the tree root)
:return: subtree root at level
"""
root = tree
while root.level > level:
root = root.parent
return root
def children(nodes):
"""
Search and list all the nodes childs.
:param nodes: a list with tree nodes
:return: a list with nodes childs
"""
child = []
for node in nodes:
if node.left:
child.append(node.left)
if node.right:
child.append(node.right)
return child
def length(tree):
"""
Count the total number of the tree nodes.
:param tree: a tree node
:return: the total of nodes in the subtree
"""
if tree:
n_nodes = length(tree.left)
n_nodes += length(tree.right)
n_nodes += 1
return n_nodes
return 0
def level_length(tree, level):
"""
Count the total number of the tree nodes in the level.
:param tree: a tree node
:param level: a tree level
:return: the total of nodes in the subtree level
"""
if tree:
if tree.level < level:
n_nodes_level = level_length(tree.left, level)
n_nodes_level += level_length(tree.right, level)
return n_nodes_level
return 1
return 0
def height(root):
"""
Count the number of levels in the tree.
:param root: subtree root node
:return: the total of levels in the subtree defined by root
"""
n_levels = 0
left = root
while left:
n_levels += 1
left = leftmost(left)
return n_levels
def left_view(root, stop_level):
"""
:param root: subtree root node
:param stop_level: level below root to stop the search
:return: list of leftmost nodes between root level and stop_level
"""
branch = []
left = root
while left and left.level <= stop_level:
branch.append(left)
left = leftmost(left)
return branch
def subtree_level_index(root, tree):
"""
:param root: subtree root node
:param tree: a tree node
:return: the index of tree node repective to the subtree defined by root
"""
return tree.index - root.index * 2 ** (tree.level - root.level)
def subtree_level_leftmost(root, level):
"""
:param root: subtree root node
:param level: level to search for the leftmost node
:return: the leftmost tree node repective to the subtree defined by root
"""
left = root
while left and left.level < level:
left = leftmost(left)
return left
def subtree_level_nodes(tree, level, level_nodes):
"""
Search and list all the nodes in the indicated level of the tree defined by
the first value of tree (subtree root).
:param tree: current tree node, starts with subtree root node
:param level: level to search for the nodes
:out param level_nodes: a list with the level tree nodes repective to the
subtree defined by root, ordered from left to right
"""
if tree.level < level:
if tree.left:
subtree_level_nodes(tree.left, level, level_nodes)
if tree.right:
subtree_level_nodes(tree.right, level, level_nodes)
else:
level_nodes.append(tree)
def tree_visual_representation(tree, dot=None):
"""
:param tree: A binary tree, with str(tree) defined
"""
if dot is None:
dot = Digraph()
dot.node(str(tree))
if tree.left:
dot.node(str(tree.left))
dot.edge(str(tree), str(tree.left))
dot = tree_visual_representation(tree.left, dot=dot)
if tree.right:
dot.node(str(tree.right))
dot.edge(str(tree), str(tree.right))
dot = tree_visual_representation(tree.right, dot=dot)
return dot
| # Copyright 2021 qclib project.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
https://arxiv.org/abs/2108.10182
"""
from graphviz import Digraph
def is_leaf(tree):
"""
:param tree: a tree node
:return: True if tree is a leaf
"""
if tree.left is None and tree.right is None:
return True
return False
def remove_leafs(tree):
""" remove tree leafs """
if tree.left:
if is_leaf(tree.left):
tree.left = None
else:
remove_leafs(tree.left)
if tree.right:
if is_leaf(tree.right):
tree.right = None
else:
remove_leafs(tree.right)
def leftmost(tree):
"""
:param tree: a tree node
:return: the leftmost node relative to tree, or None if tree is leaf.
"""
if tree.left:
return tree.left
return tree.right
def node_index(tree):
"""
:param tree: a tree node
:return: the total index of the node in the tree.
"""
return 2**tree.level-1+tree.index
def root_node(tree, level):
"""
:param tree: a tree node
:param level: level of the subtree (0 for the tree root)
:return: subtree root at level
"""
root = tree
while root.level > level:
root = root.parent
return root
def children(nodes):
"""
Search and list all the nodes childs.
:param nodes: a list with tree nodes
:return: a list with nodes childs
"""
child = []
for node in nodes:
if node.left:
child.append(node.left)
if node.right:
child.append(node.right)
return child
def length(tree):
"""
Count the total number of the tree nodes.
:param tree: a tree node
:return: the total of nodes in the subtree
"""
if tree:
n_nodes = length(tree.left)
n_nodes += length(tree.right)
n_nodes += 1
return n_nodes
return 0
def level_length(tree, level):
"""
Count the total number of the tree nodes in the level.
:param tree: a tree node
:param level: a tree level
:return: the total of nodes in the subtree level
"""
if tree:
if tree.level < level:
n_nodes_level = level_length(tree.left, level)
n_nodes_level += level_length(tree.right, level)
return n_nodes_level
return 1
return 0
def height(root):
"""
Count the number of levels in the tree.
:param root: subtree root node
:return: the total of levels in the subtree defined by root
"""
n_levels = 0
left = root
while left:
n_levels += 1
left = leftmost(left)
return n_levels
def left_view(root, stop_level):
"""
:param root: subtree root node
:param stop_level: level below root to stop the search
:return: list of leftmost nodes between root level and stop_level
"""
branch = []
left = root
while left and left.level <= stop_level:
branch.append(left)
left = leftmost(left)
return branch
def subtree_level_index(root, tree):
"""
:param root: subtree root node
:param tree: a tree node
:return: the index of tree node repective to the subtree defined by root
"""
return tree.index - root.index * 2 ** (tree.level - root.level)
def subtree_level_leftmost(root, level):
"""
:param root: subtree root node
:param level: level to search for the leftmost node
:return: the leftmost tree node repective to the subtree defined by root
"""
left = root
while left and left.level < level:
left = leftmost(left)
return left
def subtree_level_nodes(tree, level, level_nodes):
"""
Search and list all the nodes in the indicated level of the tree defined by
the first value of tree (subtree root).
:param tree: current tree node, starts with subtree root node
:param level: level to search for the nodes
:out param level_nodes: a list with the level tree nodes repective to the
subtree defined by root, ordered from left to right
"""
if tree.level < level:
if tree.left:
subtree_level_nodes(tree.left, level, level_nodes)
if tree.right:
subtree_level_nodes(tree.right, level, level_nodes)
else:
level_nodes.append(tree)
def tree_visual_representation(tree, dot=None):
"""
:param tree: A binary tree, with str(tree) defined
"""
if dot is None:
dot = Digraph()
dot.node(str(tree))
if tree.left:
dot.node(str(tree.left))
dot.edge(str(tree), str(tree.left))
dot = tree_visual_representation(tree.left, dot=dot)
if tree.right:
dot.node(str(tree.right))
dot.edge(str(tree), str(tree.right))
dot = tree_visual_representation(tree.right, dot=dot)
return dot
| en | 0.820063 | # Copyright 2021 qclib project. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. https://arxiv.org/abs/2108.10182 :param tree: a tree node :return: True if tree is a leaf remove tree leafs :param tree: a tree node :return: the leftmost node relative to tree, or None if tree is leaf. :param tree: a tree node :return: the total index of the node in the tree. :param tree: a tree node :param level: level of the subtree (0 for the tree root) :return: subtree root at level Search and list all the nodes childs. :param nodes: a list with tree nodes :return: a list with nodes childs Count the total number of the tree nodes. :param tree: a tree node :return: the total of nodes in the subtree Count the total number of the tree nodes in the level. :param tree: a tree node :param level: a tree level :return: the total of nodes in the subtree level Count the number of levels in the tree. :param root: subtree root node :return: the total of levels in the subtree defined by root :param root: subtree root node :param stop_level: level below root to stop the search :return: list of leftmost nodes between root level and stop_level :param root: subtree root node :param tree: a tree node :return: the index of tree node repective to the subtree defined by root :param root: subtree root node :param level: level to search for the leftmost node :return: the leftmost tree node repective to the subtree defined by root Search and list all the nodes in the indicated level of the tree defined by the first value of tree (subtree root). :param tree: current tree node, starts with subtree root node :param level: level to search for the nodes :out param level_nodes: a list with the level tree nodes repective to the subtree defined by root, ordered from left to right :param tree: A binary tree, with str(tree) defined | 2.707849 | 3 |
tests/deriva/utils/catalog/manage/test_derivaCatalogToString.py | informatics-isi-edu/deriva-catalog-manage | 0 | 6624540 | from unittest import TestCase
import tempfile
import sys
import deriva.core.ermrest_model as em
from deriva.utils.catalog.manage.utils import TempErmrestCatalog, load_module_from_path
from deriva.core import get_credential
from deriva.core import tag as chaise_tags
from deriva.utils.catalog.manage.dump_catalog import DerivaCatalogToString
from urllib.parse import urlparse
from tests.deriva.utils.catalog.test_utils import *
class TestDerivaCatalogToString(TestCase):
def setUp(self):
self.server = 'dev.isrd.isi.edu'
self.credentials = get_credential(self.server)
def test_substitute_variables(self):
pass
def test_variable_to_str(self):
pass
def test_tag_variables_to_str(self):
pass
def test_annotations_to_str(self):
pass
def test_schema_to_str(self):
catalog = create_catalog(self.server)
try:
catalog.create_schema('TestSchema')
generate_test_tables(catalog, 'TestSchema')
stringer = DerivaCatalogToString(catalog.ermrest_catalog)
schema_string = stringer.schema_to_str('TestSchema')
tdir = tempfile.mkdtemp()
modfile = '{}/TestSchema.py'.format(tdir)
with open(modfile, mode='w') as f:
print(schema_string, file=f)
m = load_module_from_path(modfile)
test_catalog = create_catalog(self.server)
m.main(test_catalog.ermrest_catalog, 'schema')
m.main(test_catalog.ermrest_catalog, 'annotations')
m.main(test_catalog.ermrest_catalog, 'acls')
m.main(test_catalog.ermrest_catalog, 'comment')
finally:
delete_catalog(catalog.ermrest_catalog)
def test_catalog_to_str(self):
catalog = create_catalog(self.server)
try:
catalog.create_schema('TestSchema')
stringer = DerivaCatalogToString(catalog.ermrest_catalog)
catalog_string = stringer.catalog_to_str()
tdir = tempfile.mkdtemp()
modfile = '{}/TestCatalog.py'.format(tdir)
with open(modfile, mode='w') as f:
print(catalog_string, file=f)
m = load_module_from_path(modfile)
test_catalog = create_catalog(self.server)
m.main(test_catalog.ermrest_catalog, 'annotations')
finally:
delete_catalog(catalog.ermrest_catalog)
def test_table_annotations_to_str(self):
pass
def test_column_annotations_to_str(self):
pass
def test_foreign_key_defs_to_str(self):
pass
def test_key_defs_to_str(self):
pass
def test_column_defs_to_str(self):
pass
def test_table_def_to_str(self):
pass
def test_table_to_str(self):
catalog = create_catalog(self.server)
try:
catalog.create_schema('TestSchema')
generate_test_tables(catalog, 'TestSchema')
stringer = DerivaCatalogToString(catalog.ermrest_catalog)
table_string = stringer.table_to_str('TestSchema','Table1')
tdir = tempfile.mkdtemp()
modfile = '{}/TestTable.py'.format(tdir)
with open(modfile, mode='w') as f:
print(table_string, file=f)
m = load_module_from_path(modfile)
test_catalog = create_catalog(self.server)
test_catalog.create_schema('TestSchema')
m.main(test_catalog.ermrest_catalog, 'table')
m.main(test_catalog.ermrest_catalog, 'annotations')
m.main(test_catalog.ermrest_catalog, 'acls')
m.main(test_catalog.ermrest_catalog, 'comment')
m.main(test_catalog.ermrest_catalog, 'keys')
m.main(test_catalog.ermrest_catalog, 'fkeys')
# m.main(test_catalog, 'columns', replace=True, really=True)
finally:
delete_catalog(catalog.ermrest_catalog)
| from unittest import TestCase
import tempfile
import sys
import deriva.core.ermrest_model as em
from deriva.utils.catalog.manage.utils import TempErmrestCatalog, load_module_from_path
from deriva.core import get_credential
from deriva.core import tag as chaise_tags
from deriva.utils.catalog.manage.dump_catalog import DerivaCatalogToString
from urllib.parse import urlparse
from tests.deriva.utils.catalog.test_utils import *
class TestDerivaCatalogToString(TestCase):
def setUp(self):
self.server = 'dev.isrd.isi.edu'
self.credentials = get_credential(self.server)
def test_substitute_variables(self):
pass
def test_variable_to_str(self):
pass
def test_tag_variables_to_str(self):
pass
def test_annotations_to_str(self):
pass
def test_schema_to_str(self):
catalog = create_catalog(self.server)
try:
catalog.create_schema('TestSchema')
generate_test_tables(catalog, 'TestSchema')
stringer = DerivaCatalogToString(catalog.ermrest_catalog)
schema_string = stringer.schema_to_str('TestSchema')
tdir = tempfile.mkdtemp()
modfile = '{}/TestSchema.py'.format(tdir)
with open(modfile, mode='w') as f:
print(schema_string, file=f)
m = load_module_from_path(modfile)
test_catalog = create_catalog(self.server)
m.main(test_catalog.ermrest_catalog, 'schema')
m.main(test_catalog.ermrest_catalog, 'annotations')
m.main(test_catalog.ermrest_catalog, 'acls')
m.main(test_catalog.ermrest_catalog, 'comment')
finally:
delete_catalog(catalog.ermrest_catalog)
def test_catalog_to_str(self):
catalog = create_catalog(self.server)
try:
catalog.create_schema('TestSchema')
stringer = DerivaCatalogToString(catalog.ermrest_catalog)
catalog_string = stringer.catalog_to_str()
tdir = tempfile.mkdtemp()
modfile = '{}/TestCatalog.py'.format(tdir)
with open(modfile, mode='w') as f:
print(catalog_string, file=f)
m = load_module_from_path(modfile)
test_catalog = create_catalog(self.server)
m.main(test_catalog.ermrest_catalog, 'annotations')
finally:
delete_catalog(catalog.ermrest_catalog)
def test_table_annotations_to_str(self):
pass
def test_column_annotations_to_str(self):
pass
def test_foreign_key_defs_to_str(self):
pass
def test_key_defs_to_str(self):
pass
def test_column_defs_to_str(self):
pass
def test_table_def_to_str(self):
pass
def test_table_to_str(self):
catalog = create_catalog(self.server)
try:
catalog.create_schema('TestSchema')
generate_test_tables(catalog, 'TestSchema')
stringer = DerivaCatalogToString(catalog.ermrest_catalog)
table_string = stringer.table_to_str('TestSchema','Table1')
tdir = tempfile.mkdtemp()
modfile = '{}/TestTable.py'.format(tdir)
with open(modfile, mode='w') as f:
print(table_string, file=f)
m = load_module_from_path(modfile)
test_catalog = create_catalog(self.server)
test_catalog.create_schema('TestSchema')
m.main(test_catalog.ermrest_catalog, 'table')
m.main(test_catalog.ermrest_catalog, 'annotations')
m.main(test_catalog.ermrest_catalog, 'acls')
m.main(test_catalog.ermrest_catalog, 'comment')
m.main(test_catalog.ermrest_catalog, 'keys')
m.main(test_catalog.ermrest_catalog, 'fkeys')
# m.main(test_catalog, 'columns', replace=True, really=True)
finally:
delete_catalog(catalog.ermrest_catalog)
| vi | 0.058601 | # m.main(test_catalog, 'columns', replace=True, really=True) | 2.273252 | 2 |
selfdrive/car/gm/interface.py | solardude2/openpilot | 0 | 6624541 | #!/usr/bin/env python3
from cereal import car
from math import fabs
from common.conversions import Conversions as CV
from common.params import Params
from selfdrive.car import STD_CARGO_KG, scale_rot_inertia, scale_tire_stiffness, gen_empty_fingerprint, get_safety_config
from selfdrive.car.gm.values import CAR, CruiseButtons, \
CarControllerParams, NO_ASCM
from selfdrive.car.interfaces import CarInterfaceBase
ButtonType = car.CarState.ButtonEvent.Type
EventName = car.CarEvent.EventName
GearShifter = car.CarState.GearShifter
def get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED):
x = ANGLE * (desired_angle + ANGLE_OFFSET)
sigmoid = x / (1 + fabs(x))
return (SIGMOID_SPEED * sigmoid * v_ego) + (SIGMOID * sigmoid) + (SPEED * v_ego)
class CarInterface(CarInterfaceBase):
@staticmethod
def get_pid_accel_limits(CP, current_speed, cruise_speed):
params = CarControllerParams()
return params.ACCEL_MIN, params.ACCEL_MAX
# Determined by iteratively plotting and minimizing error for f(angle, speed) = steer.
@staticmethod
def get_steer_feedforward_volt(desired_angle, v_ego):
ANGLE = 0.03093722278106523
ANGLE_OFFSET = 0.#46341000035928637
SIGMOID_SPEED = 0.07928458395144745
SIGMOID = 0.4983180128530419
SPEED = -0.0024896011696167266
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
@staticmethod
def get_steer_feedforward_acadia(desired_angle, v_ego):
ANGLE = 0.1314029550298617
ANGLE_OFFSET = 0.#8317776927522815
SIGMOID_SPEED = 0.03820691400292691
SIGMOID = 0.3785405719285944
SPEED = -0.0010868615264700465
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
@staticmethod
def get_steer_feedforward_bolt_euv(desired_angle, v_ego):
ANGLE = 0.0758345580739845
ANGLE_OFFSET = 0.#31396926577596984
SIGMOID_SPEED = 0.04367532050459129
SIGMOID = 0.43144116109994846
SPEED = -0.002654134623368279
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
@staticmethod
def get_steer_feedforward_bolt(desired_angle, v_ego):
ANGLE = 0.06370624896135679
ANGLE_OFFSET = 0.#32536345911579184
SIGMOID_SPEED = 0.06479105208670367
SIGMOID = 0.34485246691603205
SPEED = -0.0010645479469461995
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
@staticmethod
def get_steer_feedforward_silverado(desired_angle, v_ego):
ANGLE = 0.06539361463056717
ANGLE_OFFSET = -0.#8390269362439537
SIGMOID_SPEED = 0.023681877712247515
SIGMOID = 0.5709779025308087
SPEED = -0.0016656455765509301
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
@staticmethod
def get_steer_feedforward_suburban(desired_angle, v_ego):
ANGLE = 0.06562376600261893
ANGLE_OFFSET = 0.#-2.656819831714162
SIGMOID_SPEED = 0.04648878299738527
SIGMOID = 0.21826990273744493
SPEED = -0.001355528078762762
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
def get_steer_feedforward_function(self):
if self.CP.carFingerprint == CAR.VOLT or self.CP.carFingerprint == CAR.VOLT_NR:
return self.get_steer_feedforward_volt
elif self.CP.carFingerprint == CAR.ACADIA:
return self.get_steer_feedforward_acadia
elif self.CP.carFingerprint == CAR.BOLT_EUV:
return self.get_steer_feedforward_bolt_euv
elif self.CP.carFingerprint == CAR.BOLT_NR:
return self.get_steer_feedforward_bolt
elif self.CP.carFingerprint == CAR.SILVERADO_NR:
return self.get_steer_feedforward_silverado
elif self.CP.carFingerprint == CAR.SUBURBAN:
return self.get_steer_feedforward_suburban
else:
return CarInterfaceBase.get_steer_feedforward_default
@staticmethod
def get_params(candidate, fingerprint=gen_empty_fingerprint(), car_fw=None, disable_radar=False):
ret = CarInterfaceBase.get_std_params(candidate, fingerprint)
ret.carName = "gm"
ret.safetyConfigs = [get_safety_config(car.CarParams.SafetyModel.gm)]
ret.alternativeExperience = 1 # UNSAFE_DISABLE_DISENGAGE_ON_GAS # TODO: JJS this value should come from the toggle
ret.pcmCruise = False # stock cruise control is kept off for vehicles with an ASCM
# For vehicle that are using the stock ACC (presently either )
ret.openpilotLongitudinalControl = True # ASCM vehicles use OP for long
ret.radarOffCan = False # ASCM vehicles (typically) have radar TODO: This should be detected from the fingerprint, not assumed
# I'm not sure it's normal to read from Params() in interface.py... but
# It seems the values populated in controlsd.py are set after this
# Meaning the option wasn't returning true _in here_
ret.forceVoacc = Params().get_bool("ForceVoacc")
# These cars have been put into dashcam only due to both a lack of users and test coverage.
# These cars likely still work fine. Once a user confirms each car works and a test route is
# added to selfdrive/car/tests/routes.py, we can remove it from this list.
ret.dashcamOnly = candidate in {CAR.CADILLAC_ATS, CAR.HOLDEN_ASTRA, CAR.MALIBU, CAR.BUICK_REGAL}
# Default to Panda forwarding ACC
ret.safetyConfigs[0].safetyParam = 0
# Presence of a camera on the object bus is ok.
# Have to go to read_only if ASCM is online (ACC-enabled cars),
# or camera is on powertrain bus (LKA cars without ACC).
# LKAS only - no radar, no long
if candidate in NO_ASCM:
ret.openpilotLongitudinalControl = False
ret.radarOffCan = True
# TODO: How Do we detect vehicles using stock cam-based ACC?
#ret.pcmCruise = True
tire_stiffness_factor = 0.444 # not optimized yet
# Start with a baseline lateral tuning for all GM vehicles. Override tuning as needed in each model section below.
ret.minSteerSpeed = 7 * CV.MPH_TO_MS
ret.lateralTuning.pid.kpBP = [0.]
ret.lateralTuning.pid.kpV = [0.2]
ret.lateralTuning.pid.kiBP = [0.]
ret.lateralTuning.pid.kiV = [0.00]
ret.lateralTuning.pid.kf = 0.00004 # full torque for 20 deg at 80mph means 0.00007818594
ret.steerRateCost = 0.5
ret.steerActuatorDelay = 0.1 # Default delay, not measured yet
ret.enableGasInterceptor = 0x201 in fingerprint[0]
# # Check for Electronic Parking Brake
# TODO: JJS: Add param to cereal
# ret.hasEPB = 0x230 in fingerprint[0]
# baseline longitudinal tune
ret.longitudinalTuning.kpBP = [5., 35.]
ret.longitudinalTuning.kpV = [2.4, 1.5]
ret.longitudinalTuning.kiBP = [0.]
ret.longitudinalTuning.kiV = [0.36]
ret.steerLimitTimer = 0.4
ret.radarTimeStep = 0.0667 # GM radar runs at 15Hz instead of standard 20Hz
if ret.enableGasInterceptor:
ret.openpilotLongitudinalControl = True
if candidate == CAR.VOLT or candidate == CAR.VOLT_NR:
# supports stop and go, but initial engage must be above 18mph (which include conservatism)
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.mass = 1607. + STD_CARGO_KG
ret.wheelbase = 2.69
ret.steerRatio = 17.7 # Stock 15.7, LiveParameters
ret.steerRateCost = 1.0
tire_stiffness_factor = 0.469 # Stock Michelin Energy Saver A/S, LiveParameters
ret.steerRatioRear = 0.
ret.centerToFront = 0.45 * ret.wheelbase # from Volt Gen 1
ret.lateralTuning.pid.kpBP = [0., 40.]
ret.lateralTuning.pid.kpV = [0., .16]
ret.lateralTuning.pid.kiBP = [0.]
ret.lateralTuning.pid.kiV = [.023]
ret.lateralTuning.pid.kdBP = [0.]
ret.lateralTuning.pid.kdV = [.6]
ret.lateralTuning.pid.kf = 1. # !!! ONLY for sigmoid feedforward !!!
# Only tuned to reduce oscillations. TODO.
ret.longitudinalTuning.kpBP = [5., 15., 35.]
ret.longitudinalTuning.kpV = [1.25, 1.6, 1.3]
ret.longitudinalTuning.kiBP = [5., 15., 35.]
ret.longitudinalTuning.kiV = [0.18, 0.31, 0.34]
ret.longitudinalTuning.kdBP = [5., 25.]
ret.longitudinalTuning.kdV = [0.6, 0.0]
elif candidate == CAR.MALIBU or candidate == CAR.MALIBU_NR:
# supports stop and go, but initial engage must be above 18mph (which include conservatism)
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.mass = 1496. + STD_CARGO_KG
ret.wheelbase = 2.83
ret.steerRatio = 15.8
ret.steerRatioRear = 0.
ret.centerToFront = ret.wheelbase * 0.4 # wild guess
elif candidate == CAR.HOLDEN_ASTRA:
ret.mass = 1363. + STD_CARGO_KG
ret.wheelbase = 2.662
# Remaining parameters copied from Volt for now
ret.centerToFront = ret.wheelbase * 0.4
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.steerRatio = 15.7
ret.steerRatioRear = 0.
elif candidate == CAR.ACADIA or candidate == CAR.ACADIA_NR:
ret.minEnableSpeed = -1. # engage speed is decided by pcm
ret.mass = 4353. * CV.LB_TO_KG + STD_CARGO_KG
ret.wheelbase = 2.86
ret.steerRatio = 14.4 # end to end is 13.46
ret.steerRatioRear = 0.
ret.centerToFront = ret.wheelbase * 0.4
ret.lateralTuning.pid.kf = 1. # get_steer_feedforward_acadia()
elif candidate == CAR.BUICK_REGAL:
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.mass = 3779. * CV.LB_TO_KG + STD_CARGO_KG # (3849+3708)/2
ret.wheelbase = 2.83 # 111.4 inches in meters
ret.steerRatio = 14.4 # guess for tourx
ret.steerRatioRear = 0.
ret.centerToFront = ret.wheelbase * 0.4 # guess for tourx
elif candidate == CAR.CADILLAC_ATS:
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.mass = 1601. + STD_CARGO_KG
ret.wheelbase = 2.78
ret.steerRatio = 15.3
ret.steerRatioRear = 0.
ret.centerToFront = ret.wheelbase * 0.49
elif candidate == CAR.ESCALADE_ESV:
ret.minEnableSpeed = -1. # engage speed is decided by pcm
ret.mass = 2739. + STD_CARGO_KG
ret.wheelbase = 3.302
ret.steerRatio = 17.3
ret.centerToFront = ret.wheelbase * 0.49
ret.lateralTuning.pid.kpBP = [10., 41.0]
ret.lateralTuning.pid.kpV = [0.13, 0.24]
ret.lateralTuning.pid.kiBP = [10., 41.0]
ret.lateralTuning.pid.kiV = [0.01, 0.02]
ret.lateralTuning.pid.kf = 0.000045
tire_stiffness_factor = 1.0
elif candidate == CAR.BOLT_NR:
ret.minEnableSpeed = -1
ret.minSteerSpeed = 5 * CV.MPH_TO_MS
ret.mass = 1616. + STD_CARGO_KG
ret.wheelbase = 2.60096
ret.steerRatio = 16.8
ret.steerRatioRear = 0.
ret.centerToFront = 2.0828 #ret.wheelbase * 0.4 # wild guess
tire_stiffness_factor = 1.0
# TODO: Improve stability in turns
# still working on improving lateral
# TODO: Should steerRateCost and ActuatorDelay be converted to BPV arrays?
# TODO: Check if the actuator delay changes based on vehicle speed
ret.steerRateCost = 0.5
ret.steerActuatorDelay = 0.
ret.lateralTuning.pid.kpBP, ret.lateralTuning.pid.kiBP = [[10., 41.0], [10., 41.0]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.14, 0.24], [0.01, 0.021]]
ret.lateralTuning.pid.kdBP = [0.]
ret.lateralTuning.pid.kdV = [0.5]
ret.lateralTuning.pid.kf = 1. # for get_steer_feedforward_bolt()
# TODO: Needs refinement for stop and go, doesn't fully stop
# Assumes the Bolt is using L-Mode for regen braking
ret.longitudinalTuning.kpBP = [0., 35.]
ret.longitudinalTuning.kpV = [0.21, 0.46]
ret.longitudinalTuning.kiBP = [0., 35.]
ret.longitudinalTuning.kiV = [0.22, 0.33]
ret.stoppingDecelRate = 0.17 # reach stopping target smoothly, brake_travel/s while trying to stop
ret.stopAccel = 0. # Required acceleraton to keep vehicle stationary
ret.vEgoStopping = 0.6 # Speed at which the car goes into stopping state, when car starts requesting stopping accel
ret.vEgoStarting = 0.6 # Speed at which the car goes into starting state, when car starts requesting starting accel,
# vEgoStarting needs to be > or == vEgoStopping to avoid state transition oscillation
ret.stoppingControl = True
ret.longitudinalTuning.deadzoneBP = [0.]
ret.longitudinalTuning.deadzoneV = [0.]
elif candidate == CAR.EQUINOX_NR:
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.mass = 3500. * CV.LB_TO_KG + STD_CARGO_KG # (3849+3708)/2
ret.wheelbase = 2.72 #107.3 inches in meters
ret.steerRatio = 14.4 # guess for tourx
ret.steerRatioRear = 0. # unknown online
ret.centerToFront = ret.wheelbase * 0.4 # wild guess
elif candidate == CAR.TAHOE_NR:
ret.minEnableSpeed = -1. # engage speed is decided by pcmFalse
ret.minSteerSpeed = -1 * CV.MPH_TO_MS
ret.mass = 5602. * CV.LB_TO_KG + STD_CARGO_KG # (3849+3708)/2
ret.wheelbase = 2.95 #116 inches in meters
ret.steerRatio = 16.3 # guess for tourx
ret.steerRatioRear = 0. # unknown online
ret.centerToFront = 2.59 # ret.wheelbase * 0.4 # wild guess
ret.steerActuatorDelay = 0.2
ret.pcmCruise = True # TODO: see if this resolves cruiseMismatch
ret.openpilotLongitudinalControl = False # ASCM vehicles use OP for long
ret.radarOffCan = True # ASCM vehicles (typically) have radar
# According to JYoung, decrease MAX_LAT_ACCEL if it is understeering
# friction may need to be increased slowly as well
# I'm not sure what to do about centering / wandering
MAX_LAT_ACCEL = 2.5
ret.lateralTuning.init('torque')
ret.lateralTuning.torque.useSteeringAngle = True
ret.lateralTuning.torque.kp = 2.0 / MAX_LAT_ACCEL
ret.lateralTuning.torque.kf = 1.0 / MAX_LAT_ACCEL
ret.lateralTuning.torque.ki = 0.50 / MAX_LAT_ACCEL
ret.lateralTuning.torque.friction = 0.1
elif candidate == CAR.SILVERADO_NR:
ret.minEnableSpeed = -1.
ret.minSteerSpeed = -1 * CV.MPH_TO_MS
ret.mass = 2400. + STD_CARGO_KG
ret.wheelbase = 3.745
ret.steerRatio = 16.3
ret.pcmCruise = True
ret.centerToFront = ret.wheelbase * .49
ret.steerRateCost = .4
ret.steerActuatorDelay = 0.11
ret.lateralTuning.pid.kpBP = [11., 15.5, 22., 31.]
ret.lateralTuning.pid.kpV = [0.11, 0.14, 0.20, 0.25]
ret.lateralTuning.pid.kdBP = [0.]
ret.lateralTuning.pid.kdV = [0.05]
ret.lateralTuning.pid.kf = .6 # when turning right. use with get_steer_feedforward_silverado()
ret.lateralTuning.pid.kfLeft = .4 # when turning left. use with get_steer_feedforward_silverado()
ret.longitudinalTuning.kpBP = [5., 35.]
ret.longitudinalTuning.kpV = [2.8, 1.5]
ret.longitudinalTuning.kiBP = [5., 35.]
ret.longitudinalTuning.kiV = [0.37, 0.30]
elif candidate == CAR.SUBURBAN:
ret.minEnableSpeed = -1. # engage speed is decided by pcmFalse
ret.minSteerSpeed = -1 * CV.MPH_TO_MS
ret.mass = 2731. + STD_CARGO_KG
ret.wheelbase = 3.302
ret.steerRatio = 23.2 # LiveParams 17.3 From 2016 spec (unlisted for newer models) TODO: Use LiveParameters to find calculated
ret.centerToFront = ret.wheelbase * 0.49
ret.pcmCruise = True # TODO: see if this resolves cruiseMismatch
ret.openpilotLongitudinalControl = False # ASCM vehicles use OP for long
ret.radarOffCan = True # ASCM vehicles (typically) have radar
ret.steerActuatorDelay = 0.253 # Per <NAME> - I got 0.074
ret.lateralTuning.pid.kpBP, ret.lateralTuning.pid.kiBP = [[10., 41.0], [10., 41.0]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.11, 0.19], [0.02, 0.12]]
ret.lateralTuning.pid.kpBP = [10., 41.]
ret.lateralTuning.pid.kpV = [0.11, 0.19]
ret.lateralTuning.pid.kiBP = [10., 41.]
ret.lateralTuning.pid.kiV = [0.02, 0.12]
ret.lateralTuning.pid.kdBP = [0.]
ret.lateralTuning.pid.kdV = [0.6]
ret.lateralTuning.pid.kf = 1.0
ret.steerLimitTimer = 0.5
# ret.lateralTuning.pid.kpBP, ret.lateralTuning.pid.kiBP = [[10., 41.0], [10., 41.0]]
# ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.13, 0.24], [0.01, 0.06]]
# ret.lateralTuning.pid.kf = 0.000060
tire_stiffness_factor = 1.0
elif candidate == CAR.BOLT_EUV:
ret.minEnableSpeed = -1
ret.minSteerSpeed = 5 * CV.MPH_TO_MS
ret.mass = 1616. + STD_CARGO_KG
ret.wheelbase = 2.60096
ret.steerRatio = 16.8
ret.steerRatioRear = 0.
ret.centerToFront = 2.0828 #ret.wheelbase * 0.4 # wild guess
tire_stiffness_factor = 1.0
# TODO: Improve stability in turns
# still working on improving lateral
ret.steerRateCost = 0.5
ret.steerActuatorDelay = 0.
ret.lateralTuning.pid.kpBP, ret.lateralTuning.pid.kiBP = [[10., 40.0], [0., 40.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.1, 0.22], [0.01, 0.021]]
ret.lateralTuning.pid.kdBP = [0.]
ret.lateralTuning.pid.kdV = [0.6]
ret.lateralTuning.pid.kf = 1. # use with get_feedforward_bolt_euv
ret.pcmCruise = True # TODO: see if this resolves cruiseMismatch
ret.openpilotLongitudinalControl = False # Using Stock ACC
ret.radarOffCan = True # No Radar
# Note: No Long tuning as we are using stock long
if ret.forceVoacc:
ret.safetyConfigs[0].safetyParam = 1 # Inform panda to block ACC frames from camera
ret.openpilotLongitudinalControl = True # OP needs to know it's in charge...
ret.radarOffCan = True # Forced VOACC will blow up (controls mismatch probably) if ACC unit not disabled
ret.pcmCruise = False # Tells OP not to depend on the car's CC.
# TODO: get actual value, for now starting with reasonable value for
# civic and scaling by mass and wheelbase
ret.rotationalInertia = scale_rot_inertia(ret.mass, ret.wheelbase)
# TODO: start from empirically derived lateral slip stiffness for the civic and scale by
# mass and CG position, so all cars will have approximately similar dyn behaviors
ret.tireStiffnessFront, ret.tireStiffnessRear = scale_tire_stiffness(ret.mass, ret.wheelbase, ret.centerToFront,
tire_stiffness_factor=tire_stiffness_factor)
return ret
# returns a car.CarState
def _update(self, c):
ret = self.CS.update(self.cp, self.cp_loopback, self.cp_body)
ret.steeringRateLimited = self.CC.steer_rate_limited if self.CC is not None else False
buttonEvents = []
if self.CS.cruise_buttons != self.CS.prev_cruise_buttons and self.CS.prev_cruise_buttons != CruiseButtons.INIT:
be = car.CarState.ButtonEvent.new_message()
be.type = ButtonType.unknown
if self.CS.cruise_buttons != CruiseButtons.UNPRESS:
be.pressed = True
but = self.CS.cruise_buttons
else:
be.pressed = False
but = self.CS.prev_cruise_buttons
if but == CruiseButtons.RES_ACCEL:
if not (ret.cruiseState.enabled and ret.standstill):
be.type = ButtonType.accelCruise # Suppress resume button if we're resuming from stop so we don't adjust speed.
elif but == CruiseButtons.DECEL_SET:
be.type = ButtonType.decelCruise
elif but == CruiseButtons.CANCEL:
be.type = ButtonType.cancel
elif but == CruiseButtons.MAIN:
be.type = ButtonType.altButton3
buttonEvents.append(be)
ret.buttonEvents = buttonEvents
# TODO: JJS Move this to appropriate place (check other brands)
EXTRA_GEARS = [GearShifter.sport, GearShifter.low, GearShifter.eco, GearShifter.manumatic]
events = self.create_common_events(ret, extra_gears = EXTRA_GEARS, pcm_enable=self.CS.CP.pcmCruise)
if ret.vEgo < self.CP.minEnableSpeed:
events.add(EventName.belowEngageSpeed)
if ret.cruiseState.standstill:
events.add(EventName.resumeRequired)
if ret.vEgo < self.CP.minSteerSpeed:
events.add(car.CarEvent.EventName.belowSteerSpeed)
# handle button presses
for b in ret.buttonEvents:
# do enable on both accel and decel buttons
if b.type in (ButtonType.accelCruise, ButtonType.decelCruise) and not b.pressed:
events.add(EventName.buttonEnable)
# do disable on button down
if b.type == ButtonType.cancel and b.pressed:
events.add(EventName.buttonCancel)
ret.events = events.to_msg()
return ret
def apply(self, c):
ret = self.CC.update(c, self.CS)
return ret
| #!/usr/bin/env python3
from cereal import car
from math import fabs
from common.conversions import Conversions as CV
from common.params import Params
from selfdrive.car import STD_CARGO_KG, scale_rot_inertia, scale_tire_stiffness, gen_empty_fingerprint, get_safety_config
from selfdrive.car.gm.values import CAR, CruiseButtons, \
CarControllerParams, NO_ASCM
from selfdrive.car.interfaces import CarInterfaceBase
ButtonType = car.CarState.ButtonEvent.Type
EventName = car.CarEvent.EventName
GearShifter = car.CarState.GearShifter
def get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED):
x = ANGLE * (desired_angle + ANGLE_OFFSET)
sigmoid = x / (1 + fabs(x))
return (SIGMOID_SPEED * sigmoid * v_ego) + (SIGMOID * sigmoid) + (SPEED * v_ego)
class CarInterface(CarInterfaceBase):
@staticmethod
def get_pid_accel_limits(CP, current_speed, cruise_speed):
params = CarControllerParams()
return params.ACCEL_MIN, params.ACCEL_MAX
# Determined by iteratively plotting and minimizing error for f(angle, speed) = steer.
@staticmethod
def get_steer_feedforward_volt(desired_angle, v_ego):
ANGLE = 0.03093722278106523
ANGLE_OFFSET = 0.#46341000035928637
SIGMOID_SPEED = 0.07928458395144745
SIGMOID = 0.4983180128530419
SPEED = -0.0024896011696167266
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
@staticmethod
def get_steer_feedforward_acadia(desired_angle, v_ego):
ANGLE = 0.1314029550298617
ANGLE_OFFSET = 0.#8317776927522815
SIGMOID_SPEED = 0.03820691400292691
SIGMOID = 0.3785405719285944
SPEED = -0.0010868615264700465
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
@staticmethod
def get_steer_feedforward_bolt_euv(desired_angle, v_ego):
ANGLE = 0.0758345580739845
ANGLE_OFFSET = 0.#31396926577596984
SIGMOID_SPEED = 0.04367532050459129
SIGMOID = 0.43144116109994846
SPEED = -0.002654134623368279
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
@staticmethod
def get_steer_feedforward_bolt(desired_angle, v_ego):
ANGLE = 0.06370624896135679
ANGLE_OFFSET = 0.#32536345911579184
SIGMOID_SPEED = 0.06479105208670367
SIGMOID = 0.34485246691603205
SPEED = -0.0010645479469461995
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
@staticmethod
def get_steer_feedforward_silverado(desired_angle, v_ego):
ANGLE = 0.06539361463056717
ANGLE_OFFSET = -0.#8390269362439537
SIGMOID_SPEED = 0.023681877712247515
SIGMOID = 0.5709779025308087
SPEED = -0.0016656455765509301
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
@staticmethod
def get_steer_feedforward_suburban(desired_angle, v_ego):
ANGLE = 0.06562376600261893
ANGLE_OFFSET = 0.#-2.656819831714162
SIGMOID_SPEED = 0.04648878299738527
SIGMOID = 0.21826990273744493
SPEED = -0.001355528078762762
return get_steer_feedforward_sigmoid(desired_angle, v_ego, ANGLE, ANGLE_OFFSET, SIGMOID_SPEED, SIGMOID, SPEED)
def get_steer_feedforward_function(self):
if self.CP.carFingerprint == CAR.VOLT or self.CP.carFingerprint == CAR.VOLT_NR:
return self.get_steer_feedforward_volt
elif self.CP.carFingerprint == CAR.ACADIA:
return self.get_steer_feedforward_acadia
elif self.CP.carFingerprint == CAR.BOLT_EUV:
return self.get_steer_feedforward_bolt_euv
elif self.CP.carFingerprint == CAR.BOLT_NR:
return self.get_steer_feedforward_bolt
elif self.CP.carFingerprint == CAR.SILVERADO_NR:
return self.get_steer_feedforward_silverado
elif self.CP.carFingerprint == CAR.SUBURBAN:
return self.get_steer_feedforward_suburban
else:
return CarInterfaceBase.get_steer_feedforward_default
@staticmethod
def get_params(candidate, fingerprint=gen_empty_fingerprint(), car_fw=None, disable_radar=False):
ret = CarInterfaceBase.get_std_params(candidate, fingerprint)
ret.carName = "gm"
ret.safetyConfigs = [get_safety_config(car.CarParams.SafetyModel.gm)]
ret.alternativeExperience = 1 # UNSAFE_DISABLE_DISENGAGE_ON_GAS # TODO: JJS this value should come from the toggle
ret.pcmCruise = False # stock cruise control is kept off for vehicles with an ASCM
# For vehicle that are using the stock ACC (presently either )
ret.openpilotLongitudinalControl = True # ASCM vehicles use OP for long
ret.radarOffCan = False # ASCM vehicles (typically) have radar TODO: This should be detected from the fingerprint, not assumed
# I'm not sure it's normal to read from Params() in interface.py... but
# It seems the values populated in controlsd.py are set after this
# Meaning the option wasn't returning true _in here_
ret.forceVoacc = Params().get_bool("ForceVoacc")
# These cars have been put into dashcam only due to both a lack of users and test coverage.
# These cars likely still work fine. Once a user confirms each car works and a test route is
# added to selfdrive/car/tests/routes.py, we can remove it from this list.
ret.dashcamOnly = candidate in {CAR.CADILLAC_ATS, CAR.HOLDEN_ASTRA, CAR.MALIBU, CAR.BUICK_REGAL}
# Default to Panda forwarding ACC
ret.safetyConfigs[0].safetyParam = 0
# Presence of a camera on the object bus is ok.
# Have to go to read_only if ASCM is online (ACC-enabled cars),
# or camera is on powertrain bus (LKA cars without ACC).
# LKAS only - no radar, no long
if candidate in NO_ASCM:
ret.openpilotLongitudinalControl = False
ret.radarOffCan = True
# TODO: How Do we detect vehicles using stock cam-based ACC?
#ret.pcmCruise = True
tire_stiffness_factor = 0.444 # not optimized yet
# Start with a baseline lateral tuning for all GM vehicles. Override tuning as needed in each model section below.
ret.minSteerSpeed = 7 * CV.MPH_TO_MS
ret.lateralTuning.pid.kpBP = [0.]
ret.lateralTuning.pid.kpV = [0.2]
ret.lateralTuning.pid.kiBP = [0.]
ret.lateralTuning.pid.kiV = [0.00]
ret.lateralTuning.pid.kf = 0.00004 # full torque for 20 deg at 80mph means 0.00007818594
ret.steerRateCost = 0.5
ret.steerActuatorDelay = 0.1 # Default delay, not measured yet
ret.enableGasInterceptor = 0x201 in fingerprint[0]
# # Check for Electronic Parking Brake
# TODO: JJS: Add param to cereal
# ret.hasEPB = 0x230 in fingerprint[0]
# baseline longitudinal tune
ret.longitudinalTuning.kpBP = [5., 35.]
ret.longitudinalTuning.kpV = [2.4, 1.5]
ret.longitudinalTuning.kiBP = [0.]
ret.longitudinalTuning.kiV = [0.36]
ret.steerLimitTimer = 0.4
ret.radarTimeStep = 0.0667 # GM radar runs at 15Hz instead of standard 20Hz
if ret.enableGasInterceptor:
ret.openpilotLongitudinalControl = True
if candidate == CAR.VOLT or candidate == CAR.VOLT_NR:
# supports stop and go, but initial engage must be above 18mph (which include conservatism)
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.mass = 1607. + STD_CARGO_KG
ret.wheelbase = 2.69
ret.steerRatio = 17.7 # Stock 15.7, LiveParameters
ret.steerRateCost = 1.0
tire_stiffness_factor = 0.469 # Stock Michelin Energy Saver A/S, LiveParameters
ret.steerRatioRear = 0.
ret.centerToFront = 0.45 * ret.wheelbase # from Volt Gen 1
ret.lateralTuning.pid.kpBP = [0., 40.]
ret.lateralTuning.pid.kpV = [0., .16]
ret.lateralTuning.pid.kiBP = [0.]
ret.lateralTuning.pid.kiV = [.023]
ret.lateralTuning.pid.kdBP = [0.]
ret.lateralTuning.pid.kdV = [.6]
ret.lateralTuning.pid.kf = 1. # !!! ONLY for sigmoid feedforward !!!
# Only tuned to reduce oscillations. TODO.
ret.longitudinalTuning.kpBP = [5., 15., 35.]
ret.longitudinalTuning.kpV = [1.25, 1.6, 1.3]
ret.longitudinalTuning.kiBP = [5., 15., 35.]
ret.longitudinalTuning.kiV = [0.18, 0.31, 0.34]
ret.longitudinalTuning.kdBP = [5., 25.]
ret.longitudinalTuning.kdV = [0.6, 0.0]
elif candidate == CAR.MALIBU or candidate == CAR.MALIBU_NR:
# supports stop and go, but initial engage must be above 18mph (which include conservatism)
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.mass = 1496. + STD_CARGO_KG
ret.wheelbase = 2.83
ret.steerRatio = 15.8
ret.steerRatioRear = 0.
ret.centerToFront = ret.wheelbase * 0.4 # wild guess
elif candidate == CAR.HOLDEN_ASTRA:
ret.mass = 1363. + STD_CARGO_KG
ret.wheelbase = 2.662
# Remaining parameters copied from Volt for now
ret.centerToFront = ret.wheelbase * 0.4
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.steerRatio = 15.7
ret.steerRatioRear = 0.
elif candidate == CAR.ACADIA or candidate == CAR.ACADIA_NR:
ret.minEnableSpeed = -1. # engage speed is decided by pcm
ret.mass = 4353. * CV.LB_TO_KG + STD_CARGO_KG
ret.wheelbase = 2.86
ret.steerRatio = 14.4 # end to end is 13.46
ret.steerRatioRear = 0.
ret.centerToFront = ret.wheelbase * 0.4
ret.lateralTuning.pid.kf = 1. # get_steer_feedforward_acadia()
elif candidate == CAR.BUICK_REGAL:
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.mass = 3779. * CV.LB_TO_KG + STD_CARGO_KG # (3849+3708)/2
ret.wheelbase = 2.83 # 111.4 inches in meters
ret.steerRatio = 14.4 # guess for tourx
ret.steerRatioRear = 0.
ret.centerToFront = ret.wheelbase * 0.4 # guess for tourx
elif candidate == CAR.CADILLAC_ATS:
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.mass = 1601. + STD_CARGO_KG
ret.wheelbase = 2.78
ret.steerRatio = 15.3
ret.steerRatioRear = 0.
ret.centerToFront = ret.wheelbase * 0.49
elif candidate == CAR.ESCALADE_ESV:
ret.minEnableSpeed = -1. # engage speed is decided by pcm
ret.mass = 2739. + STD_CARGO_KG
ret.wheelbase = 3.302
ret.steerRatio = 17.3
ret.centerToFront = ret.wheelbase * 0.49
ret.lateralTuning.pid.kpBP = [10., 41.0]
ret.lateralTuning.pid.kpV = [0.13, 0.24]
ret.lateralTuning.pid.kiBP = [10., 41.0]
ret.lateralTuning.pid.kiV = [0.01, 0.02]
ret.lateralTuning.pid.kf = 0.000045
tire_stiffness_factor = 1.0
elif candidate == CAR.BOLT_NR:
ret.minEnableSpeed = -1
ret.minSteerSpeed = 5 * CV.MPH_TO_MS
ret.mass = 1616. + STD_CARGO_KG
ret.wheelbase = 2.60096
ret.steerRatio = 16.8
ret.steerRatioRear = 0.
ret.centerToFront = 2.0828 #ret.wheelbase * 0.4 # wild guess
tire_stiffness_factor = 1.0
# TODO: Improve stability in turns
# still working on improving lateral
# TODO: Should steerRateCost and ActuatorDelay be converted to BPV arrays?
# TODO: Check if the actuator delay changes based on vehicle speed
ret.steerRateCost = 0.5
ret.steerActuatorDelay = 0.
ret.lateralTuning.pid.kpBP, ret.lateralTuning.pid.kiBP = [[10., 41.0], [10., 41.0]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.14, 0.24], [0.01, 0.021]]
ret.lateralTuning.pid.kdBP = [0.]
ret.lateralTuning.pid.kdV = [0.5]
ret.lateralTuning.pid.kf = 1. # for get_steer_feedforward_bolt()
# TODO: Needs refinement for stop and go, doesn't fully stop
# Assumes the Bolt is using L-Mode for regen braking
ret.longitudinalTuning.kpBP = [0., 35.]
ret.longitudinalTuning.kpV = [0.21, 0.46]
ret.longitudinalTuning.kiBP = [0., 35.]
ret.longitudinalTuning.kiV = [0.22, 0.33]
ret.stoppingDecelRate = 0.17 # reach stopping target smoothly, brake_travel/s while trying to stop
ret.stopAccel = 0. # Required acceleraton to keep vehicle stationary
ret.vEgoStopping = 0.6 # Speed at which the car goes into stopping state, when car starts requesting stopping accel
ret.vEgoStarting = 0.6 # Speed at which the car goes into starting state, when car starts requesting starting accel,
# vEgoStarting needs to be > or == vEgoStopping to avoid state transition oscillation
ret.stoppingControl = True
ret.longitudinalTuning.deadzoneBP = [0.]
ret.longitudinalTuning.deadzoneV = [0.]
elif candidate == CAR.EQUINOX_NR:
ret.minEnableSpeed = 18 * CV.MPH_TO_MS
ret.mass = 3500. * CV.LB_TO_KG + STD_CARGO_KG # (3849+3708)/2
ret.wheelbase = 2.72 #107.3 inches in meters
ret.steerRatio = 14.4 # guess for tourx
ret.steerRatioRear = 0. # unknown online
ret.centerToFront = ret.wheelbase * 0.4 # wild guess
elif candidate == CAR.TAHOE_NR:
ret.minEnableSpeed = -1. # engage speed is decided by pcmFalse
ret.minSteerSpeed = -1 * CV.MPH_TO_MS
ret.mass = 5602. * CV.LB_TO_KG + STD_CARGO_KG # (3849+3708)/2
ret.wheelbase = 2.95 #116 inches in meters
ret.steerRatio = 16.3 # guess for tourx
ret.steerRatioRear = 0. # unknown online
ret.centerToFront = 2.59 # ret.wheelbase * 0.4 # wild guess
ret.steerActuatorDelay = 0.2
ret.pcmCruise = True # TODO: see if this resolves cruiseMismatch
ret.openpilotLongitudinalControl = False # ASCM vehicles use OP for long
ret.radarOffCan = True # ASCM vehicles (typically) have radar
# According to JYoung, decrease MAX_LAT_ACCEL if it is understeering
# friction may need to be increased slowly as well
# I'm not sure what to do about centering / wandering
MAX_LAT_ACCEL = 2.5
ret.lateralTuning.init('torque')
ret.lateralTuning.torque.useSteeringAngle = True
ret.lateralTuning.torque.kp = 2.0 / MAX_LAT_ACCEL
ret.lateralTuning.torque.kf = 1.0 / MAX_LAT_ACCEL
ret.lateralTuning.torque.ki = 0.50 / MAX_LAT_ACCEL
ret.lateralTuning.torque.friction = 0.1
elif candidate == CAR.SILVERADO_NR:
ret.minEnableSpeed = -1.
ret.minSteerSpeed = -1 * CV.MPH_TO_MS
ret.mass = 2400. + STD_CARGO_KG
ret.wheelbase = 3.745
ret.steerRatio = 16.3
ret.pcmCruise = True
ret.centerToFront = ret.wheelbase * .49
ret.steerRateCost = .4
ret.steerActuatorDelay = 0.11
ret.lateralTuning.pid.kpBP = [11., 15.5, 22., 31.]
ret.lateralTuning.pid.kpV = [0.11, 0.14, 0.20, 0.25]
ret.lateralTuning.pid.kdBP = [0.]
ret.lateralTuning.pid.kdV = [0.05]
ret.lateralTuning.pid.kf = .6 # when turning right. use with get_steer_feedforward_silverado()
ret.lateralTuning.pid.kfLeft = .4 # when turning left. use with get_steer_feedforward_silverado()
ret.longitudinalTuning.kpBP = [5., 35.]
ret.longitudinalTuning.kpV = [2.8, 1.5]
ret.longitudinalTuning.kiBP = [5., 35.]
ret.longitudinalTuning.kiV = [0.37, 0.30]
elif candidate == CAR.SUBURBAN:
ret.minEnableSpeed = -1. # engage speed is decided by pcmFalse
ret.minSteerSpeed = -1 * CV.MPH_TO_MS
ret.mass = 2731. + STD_CARGO_KG
ret.wheelbase = 3.302
ret.steerRatio = 23.2 # LiveParams 17.3 From 2016 spec (unlisted for newer models) TODO: Use LiveParameters to find calculated
ret.centerToFront = ret.wheelbase * 0.49
ret.pcmCruise = True # TODO: see if this resolves cruiseMismatch
ret.openpilotLongitudinalControl = False # ASCM vehicles use OP for long
ret.radarOffCan = True # ASCM vehicles (typically) have radar
ret.steerActuatorDelay = 0.253 # Per <NAME> - I got 0.074
ret.lateralTuning.pid.kpBP, ret.lateralTuning.pid.kiBP = [[10., 41.0], [10., 41.0]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.11, 0.19], [0.02, 0.12]]
ret.lateralTuning.pid.kpBP = [10., 41.]
ret.lateralTuning.pid.kpV = [0.11, 0.19]
ret.lateralTuning.pid.kiBP = [10., 41.]
ret.lateralTuning.pid.kiV = [0.02, 0.12]
ret.lateralTuning.pid.kdBP = [0.]
ret.lateralTuning.pid.kdV = [0.6]
ret.lateralTuning.pid.kf = 1.0
ret.steerLimitTimer = 0.5
# ret.lateralTuning.pid.kpBP, ret.lateralTuning.pid.kiBP = [[10., 41.0], [10., 41.0]]
# ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.13, 0.24], [0.01, 0.06]]
# ret.lateralTuning.pid.kf = 0.000060
tire_stiffness_factor = 1.0
elif candidate == CAR.BOLT_EUV:
ret.minEnableSpeed = -1
ret.minSteerSpeed = 5 * CV.MPH_TO_MS
ret.mass = 1616. + STD_CARGO_KG
ret.wheelbase = 2.60096
ret.steerRatio = 16.8
ret.steerRatioRear = 0.
ret.centerToFront = 2.0828 #ret.wheelbase * 0.4 # wild guess
tire_stiffness_factor = 1.0
# TODO: Improve stability in turns
# still working on improving lateral
ret.steerRateCost = 0.5
ret.steerActuatorDelay = 0.
ret.lateralTuning.pid.kpBP, ret.lateralTuning.pid.kiBP = [[10., 40.0], [0., 40.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.1, 0.22], [0.01, 0.021]]
ret.lateralTuning.pid.kdBP = [0.]
ret.lateralTuning.pid.kdV = [0.6]
ret.lateralTuning.pid.kf = 1. # use with get_feedforward_bolt_euv
ret.pcmCruise = True # TODO: see if this resolves cruiseMismatch
ret.openpilotLongitudinalControl = False # Using Stock ACC
ret.radarOffCan = True # No Radar
# Note: No Long tuning as we are using stock long
if ret.forceVoacc:
ret.safetyConfigs[0].safetyParam = 1 # Inform panda to block ACC frames from camera
ret.openpilotLongitudinalControl = True # OP needs to know it's in charge...
ret.radarOffCan = True # Forced VOACC will blow up (controls mismatch probably) if ACC unit not disabled
ret.pcmCruise = False # Tells OP not to depend on the car's CC.
# TODO: get actual value, for now starting with reasonable value for
# civic and scaling by mass and wheelbase
ret.rotationalInertia = scale_rot_inertia(ret.mass, ret.wheelbase)
# TODO: start from empirically derived lateral slip stiffness for the civic and scale by
# mass and CG position, so all cars will have approximately similar dyn behaviors
ret.tireStiffnessFront, ret.tireStiffnessRear = scale_tire_stiffness(ret.mass, ret.wheelbase, ret.centerToFront,
tire_stiffness_factor=tire_stiffness_factor)
return ret
# returns a car.CarState
def _update(self, c):
ret = self.CS.update(self.cp, self.cp_loopback, self.cp_body)
ret.steeringRateLimited = self.CC.steer_rate_limited if self.CC is not None else False
buttonEvents = []
if self.CS.cruise_buttons != self.CS.prev_cruise_buttons and self.CS.prev_cruise_buttons != CruiseButtons.INIT:
be = car.CarState.ButtonEvent.new_message()
be.type = ButtonType.unknown
if self.CS.cruise_buttons != CruiseButtons.UNPRESS:
be.pressed = True
but = self.CS.cruise_buttons
else:
be.pressed = False
but = self.CS.prev_cruise_buttons
if but == CruiseButtons.RES_ACCEL:
if not (ret.cruiseState.enabled and ret.standstill):
be.type = ButtonType.accelCruise # Suppress resume button if we're resuming from stop so we don't adjust speed.
elif but == CruiseButtons.DECEL_SET:
be.type = ButtonType.decelCruise
elif but == CruiseButtons.CANCEL:
be.type = ButtonType.cancel
elif but == CruiseButtons.MAIN:
be.type = ButtonType.altButton3
buttonEvents.append(be)
ret.buttonEvents = buttonEvents
# TODO: JJS Move this to appropriate place (check other brands)
EXTRA_GEARS = [GearShifter.sport, GearShifter.low, GearShifter.eco, GearShifter.manumatic]
events = self.create_common_events(ret, extra_gears = EXTRA_GEARS, pcm_enable=self.CS.CP.pcmCruise)
if ret.vEgo < self.CP.minEnableSpeed:
events.add(EventName.belowEngageSpeed)
if ret.cruiseState.standstill:
events.add(EventName.resumeRequired)
if ret.vEgo < self.CP.minSteerSpeed:
events.add(car.CarEvent.EventName.belowSteerSpeed)
# handle button presses
for b in ret.buttonEvents:
# do enable on both accel and decel buttons
if b.type in (ButtonType.accelCruise, ButtonType.decelCruise) and not b.pressed:
events.add(EventName.buttonEnable)
# do disable on button down
if b.type == ButtonType.cancel and b.pressed:
events.add(EventName.buttonCancel)
ret.events = events.to_msg()
return ret
def apply(self, c):
ret = self.CC.update(c, self.CS)
return ret
| en | 0.856552 | #!/usr/bin/env python3 # Determined by iteratively plotting and minimizing error for f(angle, speed) = steer. #46341000035928637 #8317776927522815 #31396926577596984 #32536345911579184 #8390269362439537 #-2.656819831714162 # UNSAFE_DISABLE_DISENGAGE_ON_GAS # TODO: JJS this value should come from the toggle # stock cruise control is kept off for vehicles with an ASCM # For vehicle that are using the stock ACC (presently either ) # ASCM vehicles use OP for long # ASCM vehicles (typically) have radar TODO: This should be detected from the fingerprint, not assumed # I'm not sure it's normal to read from Params() in interface.py... but # It seems the values populated in controlsd.py are set after this # Meaning the option wasn't returning true _in here_ # These cars have been put into dashcam only due to both a lack of users and test coverage. # These cars likely still work fine. Once a user confirms each car works and a test route is # added to selfdrive/car/tests/routes.py, we can remove it from this list. # Default to Panda forwarding ACC # Presence of a camera on the object bus is ok. # Have to go to read_only if ASCM is online (ACC-enabled cars), # or camera is on powertrain bus (LKA cars without ACC). # LKAS only - no radar, no long # TODO: How Do we detect vehicles using stock cam-based ACC? #ret.pcmCruise = True # not optimized yet # Start with a baseline lateral tuning for all GM vehicles. Override tuning as needed in each model section below. # full torque for 20 deg at 80mph means 0.00007818594 # Default delay, not measured yet # # Check for Electronic Parking Brake # TODO: JJS: Add param to cereal # ret.hasEPB = 0x230 in fingerprint[0] # baseline longitudinal tune # GM radar runs at 15Hz instead of standard 20Hz # supports stop and go, but initial engage must be above 18mph (which include conservatism) # Stock 15.7, LiveParameters # Stock Michelin Energy Saver A/S, LiveParameters # from Volt Gen 1 # !!! ONLY for sigmoid feedforward !!! # Only tuned to reduce oscillations. TODO. # supports stop and go, but initial engage must be above 18mph (which include conservatism) # wild guess # Remaining parameters copied from Volt for now # engage speed is decided by pcm # end to end is 13.46 # get_steer_feedforward_acadia() # (3849+3708)/2 # 111.4 inches in meters # guess for tourx # guess for tourx # engage speed is decided by pcm #ret.wheelbase * 0.4 # wild guess # TODO: Improve stability in turns # still working on improving lateral # TODO: Should steerRateCost and ActuatorDelay be converted to BPV arrays? # TODO: Check if the actuator delay changes based on vehicle speed # for get_steer_feedforward_bolt() # TODO: Needs refinement for stop and go, doesn't fully stop # Assumes the Bolt is using L-Mode for regen braking # reach stopping target smoothly, brake_travel/s while trying to stop # Required acceleraton to keep vehicle stationary # Speed at which the car goes into stopping state, when car starts requesting stopping accel # Speed at which the car goes into starting state, when car starts requesting starting accel, # vEgoStarting needs to be > or == vEgoStopping to avoid state transition oscillation # (3849+3708)/2 #107.3 inches in meters # guess for tourx # unknown online # wild guess # engage speed is decided by pcmFalse # (3849+3708)/2 #116 inches in meters # guess for tourx # unknown online # ret.wheelbase * 0.4 # wild guess # TODO: see if this resolves cruiseMismatch # ASCM vehicles use OP for long # ASCM vehicles (typically) have radar # According to JYoung, decrease MAX_LAT_ACCEL if it is understeering # friction may need to be increased slowly as well # I'm not sure what to do about centering / wandering # when turning right. use with get_steer_feedforward_silverado() # when turning left. use with get_steer_feedforward_silverado() # engage speed is decided by pcmFalse # LiveParams 17.3 From 2016 spec (unlisted for newer models) TODO: Use LiveParameters to find calculated # TODO: see if this resolves cruiseMismatch # ASCM vehicles use OP for long # ASCM vehicles (typically) have radar # Per <NAME> - I got 0.074 # ret.lateralTuning.pid.kpBP, ret.lateralTuning.pid.kiBP = [[10., 41.0], [10., 41.0]] # ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.13, 0.24], [0.01, 0.06]] # ret.lateralTuning.pid.kf = 0.000060 #ret.wheelbase * 0.4 # wild guess # TODO: Improve stability in turns # still working on improving lateral # use with get_feedforward_bolt_euv # TODO: see if this resolves cruiseMismatch # Using Stock ACC # No Radar # Note: No Long tuning as we are using stock long # Inform panda to block ACC frames from camera # OP needs to know it's in charge... # Forced VOACC will blow up (controls mismatch probably) if ACC unit not disabled # Tells OP not to depend on the car's CC. # TODO: get actual value, for now starting with reasonable value for # civic and scaling by mass and wheelbase # TODO: start from empirically derived lateral slip stiffness for the civic and scale by # mass and CG position, so all cars will have approximately similar dyn behaviors # returns a car.CarState # Suppress resume button if we're resuming from stop so we don't adjust speed. # TODO: JJS Move this to appropriate place (check other brands) # handle button presses # do enable on both accel and decel buttons # do disable on button down | 2.241663 | 2 |
Practica07/EllipticCurvesTests.py | Argenis616/cryptography | 0 | 6624542 | import EllipticCurves as ec
c = ec.Curve(2, 3, 97)
def test_class():
assert c.is_on_curve((17, 10))
assert c.is_on_curve((95, 31))
assert not c.is_on_curve((13, 13))
assert c.is_on_curve(None)
assert c.determinant() == 275
P, Q = (17, 10), (95, 31)
def test_add_curve():
p_plus_q = ec.add_points(P, Q, c)
assert c.is_on_curve(p_plus_q)
inf = ec.add_points(P, (17, 87), c)
assert c.is_on_curve(inf) and inf == None
p_plus_p = ec.add_points(P, P, c)
assert c.is_on_curve(p_plus_p)
def test_scalar_mult():
k = 1
one_p = ec.scalar_multiplication(P, 1, c)
while one_p != None:
k += 1
one_p = ec.add_points(P, one_p, c)
assert ec.scalar_multiplication(P, k, c) == None
| import EllipticCurves as ec
c = ec.Curve(2, 3, 97)
def test_class():
assert c.is_on_curve((17, 10))
assert c.is_on_curve((95, 31))
assert not c.is_on_curve((13, 13))
assert c.is_on_curve(None)
assert c.determinant() == 275
P, Q = (17, 10), (95, 31)
def test_add_curve():
p_plus_q = ec.add_points(P, Q, c)
assert c.is_on_curve(p_plus_q)
inf = ec.add_points(P, (17, 87), c)
assert c.is_on_curve(inf) and inf == None
p_plus_p = ec.add_points(P, P, c)
assert c.is_on_curve(p_plus_p)
def test_scalar_mult():
k = 1
one_p = ec.scalar_multiplication(P, 1, c)
while one_p != None:
k += 1
one_p = ec.add_points(P, one_p, c)
assert ec.scalar_multiplication(P, k, c) == None
| none | 1 | 2.96779 | 3 | |
Saves_display.py | Vovadoes/Yandex-Race | 1 | 6624543 | <filename>Saves_display.py
import os
import datetime
import pygame
from Car import Car
from Image import Image
from Road import Text
from Save import Save
from Starter import Starter
def saves_dislpay(screen, size):
from Menu import menu
from Button import Button
background_sprites = pygame.sprite.Group()
all_sprites = pygame.sprite.Group()
buttons_sprites = pygame.sprite.Group()
arrows_sprites = pygame.sprite.Group()
background = Image('data/Фон выбора машины.png')
# CONST
k_image_width = size[0] / background.image.get_width()
k_image_height = size[1] / background.image.get_height()
k_image_standart = min(k_image_width, k_image_height)
X_BUTTON_EXIT = int(0.02 * size[0])
Y_BUTTON_EXIT = int(0.9 * size[1])
TEXT_Height = int(4 * k_image_standart)
X_TEXT_BEGIN = int(0.20 * size[0])
X_TEXT_END = int(0.38 * size[0])
Y_BLOCK_BEGIN_TEXT = 0
Y_BLOCK_END_TEXT = size[1] - X_BUTTON_EXIT
# создадим спрайт
background_sprite = pygame.sprite.Sprite(background_sprites)
# определим его вид
background_sprite.image = pygame.transform.scale(background.image, size)
# и размеры
background_sprite.rect = background_sprite.image.get_rect()
buttons = []
button_exit = Button(Image("data/Кнопка.png"), 1, 1)
k = size[1] * 0.07 / button_exit.rect.height
button_exit = Button(Image("data/Кнопка.png"), k, k, buttons_sprites)
button_exit.rect.x = X_BUTTON_EXIT
button_exit.rect.y = Y_BUTTON_EXIT
button_exit.set_text("Назад")
button_exit.starter = Starter(menu, screen, size)
buttons.append(button_exit)
table = Button(Image("data/table.png"), 1, 1)
k = min(size[1] / table.rect.height,
size[0] / table.rect.width)
table = Button(Image("data/table.png"), k, k, background_sprites)
table.rect.x = (size[0] - table.rect.width) // 2
table.rect.y = (size[1] - table.rect.height) // 2
button_left = Button(Image(r"data/Стрелка влево.png"), 1, 1, x=0)
button_right = Button(Image(r"data/Стрелка вправо.png"), 1, 1, x=0)
k = size[1] * 0.1 / button_right.rect.height
button_right = Button(button_right.last_image, k, k, arrows_sprites)
button_right.rect.y = (size[1] - button_right.rect.height) // 2
button_right.rect.x = size[0] - button_left.rect.width - 10
k = size[1] * 0.1 / button_left.rect.height
button_left = Button(button_left.last_image, k, k, arrows_sprites)
button_left.rect.y = (size[1] - button_left.rect.height) // 2
button_left.rect.x = 10
cars = []
for i in os.listdir(os.path.join(Car.path_save)):
car = Car().load(path=i)
cars.append(car)
texts = {}
saves = Save.set_all_saves()
index_save = 0
print(saves)
recalculate_car = True
if len(saves) == 0:
recalculate_car = False
texts['error'] = Text(f"Сохранений нет", height=TEXT_Height, x=X_TEXT_BEGIN)
texts['error'].value = ['']
else:
texts['name'] = Text(f"Имя сохранения: ", height=TEXT_Height, x=X_TEXT_BEGIN)
texts['cars'] = Text(f"Количество машин: ", height=TEXT_Height, x=X_TEXT_BEGIN)
texts['roads'] = Text(f"Количество проездок: ", height=TEXT_Height, x=X_TEXT_BEGIN)
texts['final_roads'] = Text(f"Количество завершенных проездок: ", height=TEXT_Height, x=X_TEXT_BEGIN)
texts['date'] = Text(f"Дата сохранения: ", height=TEXT_Height, x=X_TEXT_BEGIN)
saves.sort(key=lambda i: i['date'], reverse=True)
d_y_text = (Y_BLOCK_END_TEXT - Y_BLOCK_BEGIN_TEXT - (len(texts) * TEXT_Height)) / (
len(texts) + 1)
y_text = Y_BLOCK_BEGIN_TEXT + d_y_text
for i in texts:
texts[i].y = y_text
y_text += TEXT_Height + d_y_text
fps = 60
running = True
clock = pygame.time.Clock()
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.MOUSEMOTION:
for button in buttons:
if button.rect.collidepoint(event.pos):
button.change_picture(Image("data/Кнопка светлая.png"),
button.deafult_k_image_width,
button.deafult_k_image_height)
else:
button.set_deafult()
if event.type == pygame.MOUSEBUTTONDOWN:
for button in buttons:
if button.rect.collidepoint(event.pos):
if button.starter is not None:
return button.starter
if button_left.rect.collidepoint(event.pos) or button_right.rect.collidepoint(
event.pos):
if len(saves) != 0:
if button_left.rect.collidepoint(event.pos):
index_save = (index_save - 1) % len(saves)
# print("button_left")
else:
index_save = (index_save + 1) % len(saves)
# print("button_right")
recalculate_car = True
if recalculate_car:
save = Save().load(path=Save.get_path(saves[index_save]["name"]))
print(save.specifications.name_cars)
texts['name'].value = [saves[index_save]["name"]]
texts['cars'].value = [len(save.specifications.name_cars), ' из ', len(cars)]
texts['roads'].value = [len(save.road_and_car)]
texts['final_roads'].value = [len(list(filter(lambda j: j.complete_trip, [i for i in save.road_and_car])))]
texts['date'].value = [saves[index_save]["date"]]
a = datetime.datetime(2022, 1, 26, 22, 18, 33, 253000)
recalculate_car = False
screen.fill(pygame.Color((0, 0, 0)))
background_sprites.draw(screen)
all_sprites.draw(screen)
arrows_sprites.draw(screen)
buttons_sprites.draw(screen)
for i in texts:
texts[i].render(screen)
for i in buttons:
i.render_text(screen)
clock.tick(fps)
pygame.display.flip()
return None | <filename>Saves_display.py
import os
import datetime
import pygame
from Car import Car
from Image import Image
from Road import Text
from Save import Save
from Starter import Starter
def saves_dislpay(screen, size):
from Menu import menu
from Button import Button
background_sprites = pygame.sprite.Group()
all_sprites = pygame.sprite.Group()
buttons_sprites = pygame.sprite.Group()
arrows_sprites = pygame.sprite.Group()
background = Image('data/Фон выбора машины.png')
# CONST
k_image_width = size[0] / background.image.get_width()
k_image_height = size[1] / background.image.get_height()
k_image_standart = min(k_image_width, k_image_height)
X_BUTTON_EXIT = int(0.02 * size[0])
Y_BUTTON_EXIT = int(0.9 * size[1])
TEXT_Height = int(4 * k_image_standart)
X_TEXT_BEGIN = int(0.20 * size[0])
X_TEXT_END = int(0.38 * size[0])
Y_BLOCK_BEGIN_TEXT = 0
Y_BLOCK_END_TEXT = size[1] - X_BUTTON_EXIT
# создадим спрайт
background_sprite = pygame.sprite.Sprite(background_sprites)
# определим его вид
background_sprite.image = pygame.transform.scale(background.image, size)
# и размеры
background_sprite.rect = background_sprite.image.get_rect()
buttons = []
button_exit = Button(Image("data/Кнопка.png"), 1, 1)
k = size[1] * 0.07 / button_exit.rect.height
button_exit = Button(Image("data/Кнопка.png"), k, k, buttons_sprites)
button_exit.rect.x = X_BUTTON_EXIT
button_exit.rect.y = Y_BUTTON_EXIT
button_exit.set_text("Назад")
button_exit.starter = Starter(menu, screen, size)
buttons.append(button_exit)
table = Button(Image("data/table.png"), 1, 1)
k = min(size[1] / table.rect.height,
size[0] / table.rect.width)
table = Button(Image("data/table.png"), k, k, background_sprites)
table.rect.x = (size[0] - table.rect.width) // 2
table.rect.y = (size[1] - table.rect.height) // 2
button_left = Button(Image(r"data/Стрелка влево.png"), 1, 1, x=0)
button_right = Button(Image(r"data/Стрелка вправо.png"), 1, 1, x=0)
k = size[1] * 0.1 / button_right.rect.height
button_right = Button(button_right.last_image, k, k, arrows_sprites)
button_right.rect.y = (size[1] - button_right.rect.height) // 2
button_right.rect.x = size[0] - button_left.rect.width - 10
k = size[1] * 0.1 / button_left.rect.height
button_left = Button(button_left.last_image, k, k, arrows_sprites)
button_left.rect.y = (size[1] - button_left.rect.height) // 2
button_left.rect.x = 10
cars = []
for i in os.listdir(os.path.join(Car.path_save)):
car = Car().load(path=i)
cars.append(car)
texts = {}
saves = Save.set_all_saves()
index_save = 0
print(saves)
recalculate_car = True
if len(saves) == 0:
recalculate_car = False
texts['error'] = Text(f"Сохранений нет", height=TEXT_Height, x=X_TEXT_BEGIN)
texts['error'].value = ['']
else:
texts['name'] = Text(f"Имя сохранения: ", height=TEXT_Height, x=X_TEXT_BEGIN)
texts['cars'] = Text(f"Количество машин: ", height=TEXT_Height, x=X_TEXT_BEGIN)
texts['roads'] = Text(f"Количество проездок: ", height=TEXT_Height, x=X_TEXT_BEGIN)
texts['final_roads'] = Text(f"Количество завершенных проездок: ", height=TEXT_Height, x=X_TEXT_BEGIN)
texts['date'] = Text(f"Дата сохранения: ", height=TEXT_Height, x=X_TEXT_BEGIN)
saves.sort(key=lambda i: i['date'], reverse=True)
d_y_text = (Y_BLOCK_END_TEXT - Y_BLOCK_BEGIN_TEXT - (len(texts) * TEXT_Height)) / (
len(texts) + 1)
y_text = Y_BLOCK_BEGIN_TEXT + d_y_text
for i in texts:
texts[i].y = y_text
y_text += TEXT_Height + d_y_text
fps = 60
running = True
clock = pygame.time.Clock()
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.MOUSEMOTION:
for button in buttons:
if button.rect.collidepoint(event.pos):
button.change_picture(Image("data/Кнопка светлая.png"),
button.deafult_k_image_width,
button.deafult_k_image_height)
else:
button.set_deafult()
if event.type == pygame.MOUSEBUTTONDOWN:
for button in buttons:
if button.rect.collidepoint(event.pos):
if button.starter is not None:
return button.starter
if button_left.rect.collidepoint(event.pos) or button_right.rect.collidepoint(
event.pos):
if len(saves) != 0:
if button_left.rect.collidepoint(event.pos):
index_save = (index_save - 1) % len(saves)
# print("button_left")
else:
index_save = (index_save + 1) % len(saves)
# print("button_right")
recalculate_car = True
if recalculate_car:
save = Save().load(path=Save.get_path(saves[index_save]["name"]))
print(save.specifications.name_cars)
texts['name'].value = [saves[index_save]["name"]]
texts['cars'].value = [len(save.specifications.name_cars), ' из ', len(cars)]
texts['roads'].value = [len(save.road_and_car)]
texts['final_roads'].value = [len(list(filter(lambda j: j.complete_trip, [i for i in save.road_and_car])))]
texts['date'].value = [saves[index_save]["date"]]
a = datetime.datetime(2022, 1, 26, 22, 18, 33, 253000)
recalculate_car = False
screen.fill(pygame.Color((0, 0, 0)))
background_sprites.draw(screen)
all_sprites.draw(screen)
arrows_sprites.draw(screen)
buttons_sprites.draw(screen)
for i in texts:
texts[i].render(screen)
for i in buttons:
i.render_text(screen)
clock.tick(fps)
pygame.display.flip()
return None | ru | 0.800362 | # CONST # создадим спрайт # определим его вид # и размеры # print("button_left") # print("button_right") | 3.361243 | 3 |
python/twisted_client_server/calculus/base.py | tardate/LittleCodingKata | 8 | 6624544 | """Calculation class.
An uber-simple implementation of some pure-python "business logic"
"""
class Calculation(object):
def _make_ints(self, *args):
try:
return map(int, args)
except ValueError:
raise TypeError("Couldn't coerce arguments to integers: %s" % args)
def add(self, a, b):
a, b = self._make_ints(a, b)
return a + b
def subtract(self, a, b):
a, b = self._make_ints(a, b)
return a - b
def multiply(self, a, b):
a, b = self._make_ints(a, b)
return a * b
def divide(self, a, b):
a, b = self._make_ints(a, b)
return a / b
| """Calculation class.
An uber-simple implementation of some pure-python "business logic"
"""
class Calculation(object):
def _make_ints(self, *args):
try:
return map(int, args)
except ValueError:
raise TypeError("Couldn't coerce arguments to integers: %s" % args)
def add(self, a, b):
a, b = self._make_ints(a, b)
return a + b
def subtract(self, a, b):
a, b = self._make_ints(a, b)
return a - b
def multiply(self, a, b):
a, b = self._make_ints(a, b)
return a * b
def divide(self, a, b):
a, b = self._make_ints(a, b)
return a / b
| en | 0.806254 | Calculation class. An uber-simple implementation of some pure-python "business logic" | 3.454022 | 3 |
pywinautofiles/examples/ForteAgentSample.py | jingring/pywinautolib | 0 | 6624545 | <filename>pywinautofiles/examples/ForteAgentSample.py<gh_stars>0
"""Perform some tests with Forte Agent
NOTE: Forte Agent has a very dynamic interface
e.g. whether it is free or not, whether it is still in the grace
period. For this reason this example script may or may not work well
for you"""
print __doc__
import time
from pprint import pprint
from pywinauto.application import Application
# start the application and wait for the Agent Dialog to be ready
app = Application().start_(r"c:\program files\agent\agent.exe")
while not app.Windows_():
time.sleep(.5)
# if the trial nag dialog pops up
if app.window_(title = "Forte Agent Trial").Exists():
#app.ForteAgentTrial.IdLikeToContinueUsingAgentfor7moredays.Click()
app.ForteAgentTrial.IdliketouseFreeAgent
app.ForteAgentTrial.OK.Click()
if app.window_(title = "Free Agent Registration").Exists():
app.FreeAgentRegistration.ImreallybusyRemindmein30.Click()
app.FreeAgentRegistration.OK.CloseClick()
if app.window_(title = "What's New Reminder").Exists():
app.WhatsNewReminder.ImreallybusyRemindmein90.Click()
app.WhatsNewReminder.OK.CloseClick()
# wait until the app is ready
app.FreeAgent.Wait("ready")
# if we get the Agent Setup wizard pops up close it
if app.AgentSetupWizard.Cancel.Exists(1):
app.AgentSetupWizard.Cancel.Click()
app.AgentSetupWizard2.Yes.Click()
# Select to emtpy trash
app.FreeAgent.MenuSelect("File->EmptyTrash")
app.EmptyTrash.No.Click()
# Select some more menus (typo not important :-)
app.FreeAgent.MenuSelect("File->Purge and Compact -> Compact All Folders")
app.FreeAgent.OK.Click()
#print app.FreeAgent.MenuItem("File->Purge and compact").GetProperties()
#app.FreeAgent.MenuSelect("File->Purge and Compact->PurgeFolder")
#app.PurgeFoldersInDesks.Cancel.Click()
# this is strange - when I do it by hand this is "Purge Folder" but during
# automation the text of the menu item is Purge Selected Folders
# FIXED - need to init the sub menu!
app.FreeAgent.MenuSelect("File->Purge and Compact->Purge Folder")
app.AgentTip.OK.Click()
app.FreeAgent.MenuSelect("File->Import and Export->Import Messages")
app.ImportMessages.Cancel.Click()
app.FreeAgent.MenuSelect("File->Import and Export->Import Address Book")
app.ImportAddresses.Cancel.Click()
app.FreeAgent.MenuSelect("File->Import and Export->Export Address Book")
app.ExportAddresses.Cancel.Click()
# pick something other then a file menu item
app.FreeAgent.MenuSelect("Tools->ApplyFiltersToFolder")
if app.ToolsApplyFilters.OK.Exists():
app.ToolsApplyFilters.OK.Click()
#app.AgentTip.OK.Click()
#app.ApplyFiltersToFolders.Cancel.Click()
print "==" * 20
print "The Agent File Menu..."
print "==" * 20
pprint (app.FreeAgent.MenuItems()[1])
try:
app.FreeAgent.MenuSelect("File->Print")
app.Print.Cancel.Click()
except:
print "Print Menu was probably disabled"
# quit Agent
app.FreeAgent.MenuSelect("File -> Exit")
| <filename>pywinautofiles/examples/ForteAgentSample.py<gh_stars>0
"""Perform some tests with Forte Agent
NOTE: Forte Agent has a very dynamic interface
e.g. whether it is free or not, whether it is still in the grace
period. For this reason this example script may or may not work well
for you"""
print __doc__
import time
from pprint import pprint
from pywinauto.application import Application
# start the application and wait for the Agent Dialog to be ready
app = Application().start_(r"c:\program files\agent\agent.exe")
while not app.Windows_():
time.sleep(.5)
# if the trial nag dialog pops up
if app.window_(title = "Forte Agent Trial").Exists():
#app.ForteAgentTrial.IdLikeToContinueUsingAgentfor7moredays.Click()
app.ForteAgentTrial.IdliketouseFreeAgent
app.ForteAgentTrial.OK.Click()
if app.window_(title = "Free Agent Registration").Exists():
app.FreeAgentRegistration.ImreallybusyRemindmein30.Click()
app.FreeAgentRegistration.OK.CloseClick()
if app.window_(title = "What's New Reminder").Exists():
app.WhatsNewReminder.ImreallybusyRemindmein90.Click()
app.WhatsNewReminder.OK.CloseClick()
# wait until the app is ready
app.FreeAgent.Wait("ready")
# if we get the Agent Setup wizard pops up close it
if app.AgentSetupWizard.Cancel.Exists(1):
app.AgentSetupWizard.Cancel.Click()
app.AgentSetupWizard2.Yes.Click()
# Select to emtpy trash
app.FreeAgent.MenuSelect("File->EmptyTrash")
app.EmptyTrash.No.Click()
# Select some more menus (typo not important :-)
app.FreeAgent.MenuSelect("File->Purge and Compact -> Compact All Folders")
app.FreeAgent.OK.Click()
#print app.FreeAgent.MenuItem("File->Purge and compact").GetProperties()
#app.FreeAgent.MenuSelect("File->Purge and Compact->PurgeFolder")
#app.PurgeFoldersInDesks.Cancel.Click()
# this is strange - when I do it by hand this is "Purge Folder" but during
# automation the text of the menu item is Purge Selected Folders
# FIXED - need to init the sub menu!
app.FreeAgent.MenuSelect("File->Purge and Compact->Purge Folder")
app.AgentTip.OK.Click()
app.FreeAgent.MenuSelect("File->Import and Export->Import Messages")
app.ImportMessages.Cancel.Click()
app.FreeAgent.MenuSelect("File->Import and Export->Import Address Book")
app.ImportAddresses.Cancel.Click()
app.FreeAgent.MenuSelect("File->Import and Export->Export Address Book")
app.ExportAddresses.Cancel.Click()
# pick something other then a file menu item
app.FreeAgent.MenuSelect("Tools->ApplyFiltersToFolder")
if app.ToolsApplyFilters.OK.Exists():
app.ToolsApplyFilters.OK.Click()
#app.AgentTip.OK.Click()
#app.ApplyFiltersToFolders.Cancel.Click()
print "==" * 20
print "The Agent File Menu..."
print "==" * 20
pprint (app.FreeAgent.MenuItems()[1])
try:
app.FreeAgent.MenuSelect("File->Print")
app.Print.Cancel.Click()
except:
print "Print Menu was probably disabled"
# quit Agent
app.FreeAgent.MenuSelect("File -> Exit")
| en | 0.733891 | Perform some tests with Forte Agent NOTE: Forte Agent has a very dynamic interface e.g. whether it is free or not, whether it is still in the grace period. For this reason this example script may or may not work well for you # start the application and wait for the Agent Dialog to be ready # if the trial nag dialog pops up #app.ForteAgentTrial.IdLikeToContinueUsingAgentfor7moredays.Click() # wait until the app is ready # if we get the Agent Setup wizard pops up close it # Select to emtpy trash # Select some more menus (typo not important :-) #print app.FreeAgent.MenuItem("File->Purge and compact").GetProperties() #app.FreeAgent.MenuSelect("File->Purge and Compact->PurgeFolder") #app.PurgeFoldersInDesks.Cancel.Click() # this is strange - when I do it by hand this is "Purge Folder" but during # automation the text of the menu item is Purge Selected Folders # FIXED - need to init the sub menu! # pick something other then a file menu item #app.AgentTip.OK.Click() #app.ApplyFiltersToFolders.Cancel.Click() # quit Agent | 2.402647 | 2 |
django_qingstor_storage/__init__.py | knktc/django-qingstor-storage | 2 | 6624546 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author:knktc
@contact:<EMAIL>
@create:2018-08-31 15:40
"""
__author__ = 'knktc'
__version__ = '0.1'
def main():
"""
main process
"""
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author:knktc
@contact:<EMAIL>
@create:2018-08-31 15:40
"""
__author__ = 'knktc'
__version__ = '0.1'
def main():
"""
main process
"""
if __name__ == '__main__':
main()
| en | 0.349228 | #!/usr/bin/env python3 # -*- coding: utf-8 -*- @author:knktc @contact:<EMAIL> @create:2018-08-31 15:40 main process | 1.279036 | 1 |
level25.py | CoffeeTableEnnui/RedCircleGame | 0 | 6624547 | <filename>level25.py
import rectangles as r
import circles as c
import games as g
import pygame
level = g.Game(100, 700, 100, 100)
level.addwall(50, 600, 390, 410)
level.addwall(160, 180, 50, 189)
level.addwall(390, 410, 690, 710)
enemy0 = c.SmallEnemy(79, 200)
enemy0.xdir = -1
enemy0.start, enemy0.end = 79, 727
level.addleftright(enemy0)
enemy1 = c.SmallEnemy(151, 250)
enemy1.xdir = 1
enemy1.start, enemy1.end = 79, 727
level.addleftright(enemy1)
enemy2 = c.SmallEnemy(223, 300)
enemy2.xdir = 1
enemy2.start, enemy2.end = 79, 727
level.addleftright(enemy2)
enemy3 = c.SmallEnemy(295, 350)
enemy3.xdir = 1
enemy3.start, enemy3.end = 79, 727
level.addleftright(enemy3)
enemy4 = c.SmallEnemy(439, 450)
enemy4.xdir = 1
enemy4.start, enemy4.end = 79, 727
level.addleftright(enemy4)
enemy5 = c.SmallEnemy(511, 500)
enemy5.xdir = 1
enemy5.start, enemy5.end = 79, 727
level.addleftright(enemy5)
enemy6 = c.SmallEnemy(583, 550)
enemy6.xdir = 1
enemy6.start, enemy6.end = 79, 727
level.addleftright(enemy6)
enemy7 = c.SmallEnemy(655, 600)
enemy7.xdir = 1
enemy7.start, enemy7.end = 79, 727
level.addleftright(enemy7)
enemy8 = c.SmallEnemy(727, 650)
enemy8.xdir = 1
enemy8.start, enemy8.end = 79, 727
level.addleftright(enemy8)
| <filename>level25.py
import rectangles as r
import circles as c
import games as g
import pygame
level = g.Game(100, 700, 100, 100)
level.addwall(50, 600, 390, 410)
level.addwall(160, 180, 50, 189)
level.addwall(390, 410, 690, 710)
enemy0 = c.SmallEnemy(79, 200)
enemy0.xdir = -1
enemy0.start, enemy0.end = 79, 727
level.addleftright(enemy0)
enemy1 = c.SmallEnemy(151, 250)
enemy1.xdir = 1
enemy1.start, enemy1.end = 79, 727
level.addleftright(enemy1)
enemy2 = c.SmallEnemy(223, 300)
enemy2.xdir = 1
enemy2.start, enemy2.end = 79, 727
level.addleftright(enemy2)
enemy3 = c.SmallEnemy(295, 350)
enemy3.xdir = 1
enemy3.start, enemy3.end = 79, 727
level.addleftright(enemy3)
enemy4 = c.SmallEnemy(439, 450)
enemy4.xdir = 1
enemy4.start, enemy4.end = 79, 727
level.addleftright(enemy4)
enemy5 = c.SmallEnemy(511, 500)
enemy5.xdir = 1
enemy5.start, enemy5.end = 79, 727
level.addleftright(enemy5)
enemy6 = c.SmallEnemy(583, 550)
enemy6.xdir = 1
enemy6.start, enemy6.end = 79, 727
level.addleftright(enemy6)
enemy7 = c.SmallEnemy(655, 600)
enemy7.xdir = 1
enemy7.start, enemy7.end = 79, 727
level.addleftright(enemy7)
enemy8 = c.SmallEnemy(727, 650)
enemy8.xdir = 1
enemy8.start, enemy8.end = 79, 727
level.addleftright(enemy8)
| none | 1 | 3.021845 | 3 | |
src/Group the People give the Size/solution.py | sankalok/leetcode-questions | 0 | 6624548 | <reponame>sankalok/leetcode-questions<gh_stars>0
class Solution:
def groupThePeople(self, groupSizes: List[int]) -> List[List[int]]:
groups = []
counter = 1
counterMax = max(groupSizes)
while(counter <= counterMax):
indexes = []
for i in range(0, len(groupSizes)):
if(groupSizes[i] == counter):
indexes.append(i)
groups.append(indexes)
counter += 1
subgroups = []
for i in range(0, len(groups)):
size = i + 1
noGroups = len(groups[i]) // size
count = 0
while(count < len(groups[i])):
subgroups.append(groups[i][count:count+size])
count = count + size
return subgroups
| class Solution:
def groupThePeople(self, groupSizes: List[int]) -> List[List[int]]:
groups = []
counter = 1
counterMax = max(groupSizes)
while(counter <= counterMax):
indexes = []
for i in range(0, len(groupSizes)):
if(groupSizes[i] == counter):
indexes.append(i)
groups.append(indexes)
counter += 1
subgroups = []
for i in range(0, len(groups)):
size = i + 1
noGroups = len(groups[i]) // size
count = 0
while(count < len(groups[i])):
subgroups.append(groups[i][count:count+size])
count = count + size
return subgroups | none | 1 | 3.282323 | 3 | |
sark/codeblock.py | yannayl/Sark | 1 | 6624549 | <reponame>yannayl/Sark
import networkx
import idaapi
from .code import lines, functions
from .core import get_func, fix_addresses
class CodeBlock(idaapi.BasicBlock):
def __init__(self, id_ea=None, bb=None, fc=None):
if bb is None and fc is None:
if id_ea is None:
id_ea = idaapi.get_screen_ea()
temp_codeblock = get_codeblock(id_ea)
self.__dict__.update(temp_codeblock.__dict__)
else:
super(CodeBlock, self).__init__(id=id_ea, bb=bb, fc=fc)
@property
def lines(self):
return lines(self.startEA, self.endEA)
@property
def next(self):
return self.succs()
@property
def prev(self):
return self.preds()
def set_color(self, color=None):
for line in self.lines:
line.color = color
if color is None:
idaapi.clr_node_info2(self._fc._q.bounds.startEA, self.id, idaapi.NIF_BG_COLOR)
else:
node_info = idaapi.node_info_t()
node_info.bg_color = color
idaapi.set_node_info2(self._fc._q.bounds.startEA, self.id, node_info, idaapi.NIF_BG_COLOR)
@property
def color(self):
node_info = idaapi.node_info_t()
success = idaapi.get_node_info2(node_info, self._fc._q.bounds.startEA, self.id)
if not success:
return None
if not node_info.valid_bg_color():
return None
return node_info.bg_color
@color.setter
def color(self, color):
self.set_color(color)
def __repr__(self):
return "<CodeBlock(startEA=0x{:08X}, endEA=0x{:08X})>".format(self.startEA, self.endEA)
def __eq__(self, other):
return self.startEA == other.startEA
class FlowChart(idaapi.FlowChart):
def __init__(self, f=None, bounds=None, flags=idaapi.FC_PREDS):
if f is None and bounds is None:
f = idaapi.get_screen_ea()
if f is not None:
f = get_func(f)
super(FlowChart, self).__init__(f=f, bounds=bounds, flags=flags)
def _getitem(self, index):
return CodeBlock(index, self._q[index], self)
def get_flowchart(ea=None):
if ea is None:
ea = idaapi.get_screen_ea()
func = idaapi.get_func(ea)
flowchart_ = FlowChart(func)
return flowchart_
def get_codeblock(ea=None):
if ea is None:
ea = idaapi.get_screen_ea()
flowchart_ = get_flowchart(ea)
for code_block in flowchart_:
if code_block.startEA <= ea < code_block.endEA:
return code_block
def get_block_start(ea):
"""Get the start address of an IDA Graph block."""
return get_codeblock(ea).startEA
def get_nx_graph(ea):
"""Convert an IDA flowchart to a NetworkX graph."""
nx_graph = networkx.DiGraph()
func = idaapi.get_func(ea)
flowchart = FlowChart(func)
for block in flowchart:
# Make sure all nodes are added (including edge-less nodes)
nx_graph.add_node(block.startEA)
for pred in block.preds():
nx_graph.add_edge(pred.startEA, block.startEA)
for succ in block.succs():
nx_graph.add_edge(block.startEA, succ.startEA)
return nx_graph
def codeblocks(start=None, end=None, full=True):
"""Get all `CodeBlock`s in a given range.
Args:
start - start address of the range. If `None` uses IDB start.
end - end address of the range. If `None` uses IDB end.
full - `True` is required to change node info (e.g. color). `False` causes faster iteration.
"""
if full:
for function in functions(start, end):
fc = FlowChart(f=function.func_t)
for block in fc:
yield block
else:
start, end = fix_addresses(start, end)
for code_block in FlowChart(bounds=(start, end)):
yield code_block
| import networkx
import idaapi
from .code import lines, functions
from .core import get_func, fix_addresses
class CodeBlock(idaapi.BasicBlock):
def __init__(self, id_ea=None, bb=None, fc=None):
if bb is None and fc is None:
if id_ea is None:
id_ea = idaapi.get_screen_ea()
temp_codeblock = get_codeblock(id_ea)
self.__dict__.update(temp_codeblock.__dict__)
else:
super(CodeBlock, self).__init__(id=id_ea, bb=bb, fc=fc)
@property
def lines(self):
return lines(self.startEA, self.endEA)
@property
def next(self):
return self.succs()
@property
def prev(self):
return self.preds()
def set_color(self, color=None):
for line in self.lines:
line.color = color
if color is None:
idaapi.clr_node_info2(self._fc._q.bounds.startEA, self.id, idaapi.NIF_BG_COLOR)
else:
node_info = idaapi.node_info_t()
node_info.bg_color = color
idaapi.set_node_info2(self._fc._q.bounds.startEA, self.id, node_info, idaapi.NIF_BG_COLOR)
@property
def color(self):
node_info = idaapi.node_info_t()
success = idaapi.get_node_info2(node_info, self._fc._q.bounds.startEA, self.id)
if not success:
return None
if not node_info.valid_bg_color():
return None
return node_info.bg_color
@color.setter
def color(self, color):
self.set_color(color)
def __repr__(self):
return "<CodeBlock(startEA=0x{:08X}, endEA=0x{:08X})>".format(self.startEA, self.endEA)
def __eq__(self, other):
return self.startEA == other.startEA
class FlowChart(idaapi.FlowChart):
def __init__(self, f=None, bounds=None, flags=idaapi.FC_PREDS):
if f is None and bounds is None:
f = idaapi.get_screen_ea()
if f is not None:
f = get_func(f)
super(FlowChart, self).__init__(f=f, bounds=bounds, flags=flags)
def _getitem(self, index):
return CodeBlock(index, self._q[index], self)
def get_flowchart(ea=None):
if ea is None:
ea = idaapi.get_screen_ea()
func = idaapi.get_func(ea)
flowchart_ = FlowChart(func)
return flowchart_
def get_codeblock(ea=None):
if ea is None:
ea = idaapi.get_screen_ea()
flowchart_ = get_flowchart(ea)
for code_block in flowchart_:
if code_block.startEA <= ea < code_block.endEA:
return code_block
def get_block_start(ea):
"""Get the start address of an IDA Graph block."""
return get_codeblock(ea).startEA
def get_nx_graph(ea):
"""Convert an IDA flowchart to a NetworkX graph."""
nx_graph = networkx.DiGraph()
func = idaapi.get_func(ea)
flowchart = FlowChart(func)
for block in flowchart:
# Make sure all nodes are added (including edge-less nodes)
nx_graph.add_node(block.startEA)
for pred in block.preds():
nx_graph.add_edge(pred.startEA, block.startEA)
for succ in block.succs():
nx_graph.add_edge(block.startEA, succ.startEA)
return nx_graph
def codeblocks(start=None, end=None, full=True):
"""Get all `CodeBlock`s in a given range.
Args:
start - start address of the range. If `None` uses IDB start.
end - end address of the range. If `None` uses IDB end.
full - `True` is required to change node info (e.g. color). `False` causes faster iteration.
"""
if full:
for function in functions(start, end):
fc = FlowChart(f=function.func_t)
for block in fc:
yield block
else:
start, end = fix_addresses(start, end)
for code_block in FlowChart(bounds=(start, end)):
yield code_block | en | 0.781006 | Get the start address of an IDA Graph block. Convert an IDA flowchart to a NetworkX graph. # Make sure all nodes are added (including edge-less nodes) Get all `CodeBlock`s in a given range. Args: start - start address of the range. If `None` uses IDB start. end - end address of the range. If `None` uses IDB end. full - `True` is required to change node info (e.g. color). `False` causes faster iteration. | 2.435236 | 2 |
src/notification.py | maknotavailable/security-hub | 2 | 6624550 | <reponame>maknotavailable/security-hub
import boto3 as b3
from botocore.exceptions import ClientError
import logging
# Custom functions
import utils
# Format logging
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO,
format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s')
class Email():
def __init__(self):
"""Authenticate email server and prepare configuration"""
# AWS access
ACCESS_KEY = utils.get_secret("ACCESS_KEY_ID", "aws")
SECRET_KEY = utils.get_secret("SECRET_ACCESS_KEY", "aws")
AWS_REGION = utils.get_secret("REGION", "aws")
# Create a new SES resource and specify a region.
self.client = b3.client('ses',
aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY,
region_name = AWS_REGION)
# Sender, receiver
self.SENDER = utils.get_secret("sender", "email")
recipients = utils.get_secret("receiver", "email")
self.RECEIVERS = recipients.split(",")
# Config
self.CHARSET = "UTF-8"
def send(self, subject: str, body: str):
"""Append payload and send email"""
# Try to send the email.
try:
#Provide the contents of the email.
response = self.client.send_email(
Destination={
'ToAddresses': self.RECEIVERS
},
Message={
'Body': {
'Text': {
'Charset': self.CHARSET,
'Data': body,
},
},
'Subject': {
'Charset': self.CHARSET,
'Data': subject,
},
},
Source = self.SENDER,
)
except ClientError as e:
log.error(e.response['Error']['Message'])
else:
log.info("Email sent! Message ID: %s" % response['MessageId']), | import boto3 as b3
from botocore.exceptions import ClientError
import logging
# Custom functions
import utils
# Format logging
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO,
format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s')
class Email():
def __init__(self):
"""Authenticate email server and prepare configuration"""
# AWS access
ACCESS_KEY = utils.get_secret("ACCESS_KEY_ID", "aws")
SECRET_KEY = utils.get_secret("SECRET_ACCESS_KEY", "aws")
AWS_REGION = utils.get_secret("REGION", "aws")
# Create a new SES resource and specify a region.
self.client = b3.client('ses',
aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY,
region_name = AWS_REGION)
# Sender, receiver
self.SENDER = utils.get_secret("sender", "email")
recipients = utils.get_secret("receiver", "email")
self.RECEIVERS = recipients.split(",")
# Config
self.CHARSET = "UTF-8"
def send(self, subject: str, body: str):
"""Append payload and send email"""
# Try to send the email.
try:
#Provide the contents of the email.
response = self.client.send_email(
Destination={
'ToAddresses': self.RECEIVERS
},
Message={
'Body': {
'Text': {
'Charset': self.CHARSET,
'Data': body,
},
},
'Subject': {
'Charset': self.CHARSET,
'Data': subject,
},
},
Source = self.SENDER,
)
except ClientError as e:
log.error(e.response['Error']['Message'])
else:
log.info("Email sent! Message ID: %s" % response['MessageId']), | en | 0.703457 | # Custom functions # Format logging Authenticate email server and prepare configuration # AWS access # Create a new SES resource and specify a region. # Sender, receiver # Config Append payload and send email # Try to send the email. #Provide the contents of the email. | 2.34953 | 2 |