text
stringlengths
4
1.02M
meta
dict
""" The Plaid API The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501 Generated by: https://openapi-generator.tech """ import re # noqa: F401 import sys # noqa: F401 from plaid.model_utils import ( # noqa: F401 ApiTypeError, ModelComposed, ModelNormal, ModelSimple, cached_property, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, ) def lazy_import(): from plaid.model.account_base import AccountBase from plaid.model.processor_number import ProcessorNumber globals()['AccountBase'] = AccountBase globals()['ProcessorNumber'] = ProcessorNumber class ProcessorAuthGetResponse(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = { } validations = { } @cached_property def additional_properties_type(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 _nullable = False @cached_property def openapi_types(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ lazy_import() return { 'request_id': (str,), # noqa: E501 'numbers': (ProcessorNumber,), # noqa: E501 'account': (AccountBase,), # noqa: E501 } @cached_property def discriminator(): return None attribute_map = { 'request_id': 'request_id', # noqa: E501 'numbers': 'numbers', # noqa: E501 'account': 'account', # noqa: E501 } _composed_schemas = {} required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) @convert_js_args_to_python_args def __init__(self, request_id, numbers, account, *args, **kwargs): # noqa: E501 """ProcessorAuthGetResponse - a model defined in OpenAPI Args: request_id (str): A unique identifier for the request, which can be used for troubleshooting. This identifier, like all Plaid identifiers, is case sensitive. numbers (ProcessorNumber): account (AccountBase): Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) """ _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) self.request_id = request_id self.numbers = numbers self.account = account for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ self.additional_properties_type is None: # discard variable. continue setattr(self, var_name, var_value)
{ "content_hash": "cfec61e05338b43785c71a0a0dd272c0", "timestamp": "", "source": "github", "line_count": 189, "max_line_length": 169, "avg_line_length": 39.83068783068783, "alnum_prop": 0.5661530286928799, "repo_name": "plaid/plaid-python", "id": "9217ef322284ebcd5bf2af4f3987099bbf807d52", "size": "7528", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "plaid/model/processor_auth_get_response.py", "mode": "33188", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "323" }, { "name": "Makefile", "bytes": "622" }, { "name": "Mustache", "bytes": "125163" }, { "name": "Python", "bytes": "9342874" } ], "symlink_target": "" }
import cPickle as pickle import numpy as np import os import random class StanfordSentiment: def __init__(self, path=None, tablesize = 1000000): if not path: path = "utils/datasets/stanfordSentimentTreebank" self.path = path self.tablesize = tablesize def tokens(self): if hasattr(self, "_tokens") and self._tokens: return self._tokens tokens = dict() tokenfreq = dict() wordcount = 0 revtokens = [] idx = 0 for sentence in self.sentences(): for w in sentence: wordcount += 1 if not w in tokens: tokens[w] = idx revtokens += [w] tokenfreq[w] = 1 idx += 1 else: tokenfreq[w] += 1 tokens["UNK"] = idx revtokens += ["UNK"] tokenfreq["UNK"] = 1 wordcount += 1 self._tokens = tokens self._tokenfreq = tokenfreq self._wordcount = wordcount self._revtokens = revtokens return self._tokens def sentences(self): if hasattr(self, "_sentences") and self._sentences: return self._sentences sentences = [] with open(self.path + "/datasetSentences.txt", "r") as f: first = True for line in f: if first: first = False continue splitted = line.strip().split()[1:] # Deal with some peculiar encoding issues with this file sentences += [[w.lower().decode("utf-8").encode('latin1') for w in splitted]] self._sentences = sentences self._sentlengths = np.array([len(s) for s in sentences]) self._cumsentlen = np.cumsum(self._sentlengths) return self._sentences def numSentences(self): if hasattr(self, "_numSentences") and self._numSentences: return self._numSentences else: self._numSentences = len(self.sentences()) return self._numSentences def allSentences(self): if hasattr(self, "_allsentences") and self._allsentences: return self._allsentences sentences = self.sentences() rejectProb = self.rejectProb() tokens = self.tokens() allsentences = [[w for w in s if 0 >= rejectProb[tokens[w]] or random.random() >= rejectProb[tokens[w]]] for s in sentences * 30] allsentences = [s for s in allsentences if len(s) > 1] self._allsentences = allsentences return self._allsentences def getRandomContext(self, C=5): allsent = self.allSentences() sentID = random.randint(0, len(allsent) - 1) sent = allsent[sentID] wordID = random.randint(0, len(sent) - 1) context = sent[max(0, wordID - C):wordID] if wordID+1 < len(sent): context += sent[wordID+1:min(len(sent), wordID + C + 1)] centerword = sent[wordID] context = [w for w in context if w != centerword] if len(context) > 0: return centerword, context else: return self.getRandomContext(C) def sent_labels(self): if hasattr(self, "_sent_labels") and self._sent_labels: return self._sent_labels dictionary = dict() phrases = 0 with open(self.path + "/dictionary.txt", "r") as f: for line in f: line = line.strip() if not line: continue splitted = line.split("|") dictionary[splitted[0].lower()] = int(splitted[1]) phrases += 1 labels = [0.0] * phrases with open(self.path + "/sentiment_labels.txt", "r") as f: first = True for line in f: if first: first = False continue line = line.strip() if not line: continue splitted = line.split("|") labels[int(splitted[0])] = float(splitted[1]) sent_labels = [0.0] * self.numSentences() sentences = self.sentences() for i in xrange(self.numSentences()): sentence = sentences[i] full_sent = " ".join(sentence).replace('-lrb-', '(').replace('-rrb-', ')') sent_labels[i] = labels[dictionary[full_sent]] self._sent_labels = sent_labels return self._sent_labels def dataset_split(self): if hasattr(self, "_split") and self._split: return self._split split = [[] for i in xrange(3)] with open(self.path + "/datasetSplit.txt", "r") as f: first = True for line in f: if first: first = False continue splitted = line.strip().split(",") split[int(splitted[1]) - 1] += [int(splitted[0]) - 1] self._split = split return self._split def getRandomTrainSentence(self): split = self.dataset_split() sentId = split[0][random.randint(0, len(split[0]) - 1)] return self.sentences()[sentId], self.categorify(self.sent_labels()[sentId]) def categorify(self, label): if label <= 0.2: return 0 elif label <= 0.4: return 1 elif label <= 0.6: return 2 elif label <= 0.8: return 3 else: return 4 def getDevSentences(self): return self.getSplitSentences(2) def getTestSentences(self): return self.getSplitSentences(1) def getTrainSentences(self): return self.getSplitSentences(0) def getSplitSentences(self, split=0): ds_split = self.dataset_split() return [(self.sentences()[i], self.categorify(self.sent_labels()[i])) for i in ds_split[split]] def sampleTable(self): if hasattr(self, '_sampleTable') and self._sampleTable is not None: return self._sampleTable nTokens = len(self.tokens()) samplingFreq = np.zeros((nTokens,)) self.allSentences() i = 0 for w in xrange(nTokens): w = self._revtokens[i] if w in self._tokenfreq: freq = 1.0 * self._tokenfreq[w] # Reweigh freq = freq ** 0.75 else: freq = 0.0 samplingFreq[i] = freq i += 1 samplingFreq /= np.sum(samplingFreq) samplingFreq = np.cumsum(samplingFreq) * self.tablesize self._sampleTable = [0] * self.tablesize j = 0 for i in xrange(self.tablesize): while i > samplingFreq[j]: j += 1 self._sampleTable[i] = j return self._sampleTable def rejectProb(self): if hasattr(self, '_rejectProb') and self._rejectProb is not None: return self._rejectProb threshold = 1e-5 * self._wordcount nTokens = len(self.tokens()) rejectProb = np.zeros((nTokens,)) for i in xrange(nTokens): w = self._revtokens[i] freq = 1.0 * self._tokenfreq[w] # Reweigh rejectProb[i] = max(0, 1 - np.sqrt(threshold / freq)) self._rejectProb = rejectProb return self._rejectProb def sampleTokenIdx(self): return self.sampleTable()[random.randint(0, self.tablesize - 1)]
{ "content_hash": "e5bf7d8d206257b4c20c5f8109146553", "timestamp": "", "source": "github", "line_count": 245, "max_line_length": 103, "avg_line_length": 30.8, "alnum_prop": 0.526636628677445, "repo_name": "kabrapratik28/Stanford_courses", "id": "4ae128d3ce9c38a87db687118bde999ebdfaf530", "size": "7593", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "cs224n/assignment1/utils/treebank.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Jupyter Notebook", "bytes": "11387872" }, { "name": "Makefile", "bytes": "183" }, { "name": "Python", "bytes": "729990" }, { "name": "Shell", "bytes": "6036" } ], "symlink_target": "" }
from __future__ import absolute_import from django.db import models from django.utils import timezone from sentry.db.models import Model, NodeField, FlexibleForeignKey, sane_repr from sentry.db.models.manager import BaseManager from sentry.utils.canonical import CanonicalKeyView def ref_func(x): return x.project_id or x.project.id class RawEvent(Model): __core__ = False project = FlexibleForeignKey("sentry.Project") event_id = models.CharField(max_length=32, null=True) datetime = models.DateTimeField(default=timezone.now) data = NodeField( blank=True, null=True, ref_func=ref_func, ref_version=1, wrapper=CanonicalKeyView ) objects = BaseManager() class Meta: app_label = "sentry" db_table = "sentry_rawevent" unique_together = (("project", "event_id"),) __repr__ = sane_repr("project_id")
{ "content_hash": "654b3839ee7b57698fca0c4dcae224e0", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 89, "avg_line_length": 27.4375, "alnum_prop": 0.6993166287015945, "repo_name": "beeftornado/sentry", "id": "e197a6998fa90d5f4209cfabdc5bf0cedaab90a6", "size": "878", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/sentry/models/rawevent.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "157195" }, { "name": "HTML", "bytes": "197026" }, { "name": "JavaScript", "bytes": "380379" }, { "name": "Makefile", "bytes": "2832" }, { "name": "Python", "bytes": "6473603" } ], "symlink_target": "" }
"""Test Zeroconf component setup process.""" import pytest from zeroconf import InterfaceChoice, ServiceInfo, ServiceStateChange from homeassistant.components import zeroconf from homeassistant.components.zeroconf import CONF_DEFAULT_INTERFACE from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.generated import zeroconf as zc_gen from homeassistant.setup import async_setup_component from tests.async_mock import patch NON_UTF8_VALUE = b"ABCDEF\x8a" NON_ASCII_KEY = b"non-ascii-key\x8a" PROPERTIES = { b"macaddress": b"ABCDEF012345", b"non-utf8-value": NON_UTF8_VALUE, NON_ASCII_KEY: None, } HOMEKIT_STATUS_UNPAIRED = b"1" HOMEKIT_STATUS_PAIRED = b"0" @pytest.fixture def mock_zeroconf(): """Mock zeroconf.""" with patch("homeassistant.components.zeroconf.HaZeroconf") as mock_zc: yield mock_zc.return_value def service_update_mock(zeroconf, services, handlers): """Call service update handler.""" for service in services: handlers[0](zeroconf, service, f"name.{service}", ServiceStateChange.Added) def get_service_info_mock(service_type, name): """Return service info for get_service_info.""" return ServiceInfo( service_type, name, addresses=[b"\n\x00\x00\x14"], port=80, weight=0, priority=0, server="name.local.", properties=PROPERTIES, ) def get_homekit_info_mock(model, pairing_status): """Return homekit info for get_service_info for an homekit device.""" def mock_homekit_info(service_type, name): return ServiceInfo( service_type, name, addresses=[b"\n\x00\x00\x14"], port=80, weight=0, priority=0, server="name.local.", properties={b"md": model.encode(), b"sf": pairing_status}, ) return mock_homekit_info async def test_setup(hass, mock_zeroconf): """Test configured options for a device are loaded via config entry.""" with patch.object( hass.config_entries.flow, "async_init" ) as mock_config_flow, patch.object( zeroconf, "HaServiceBrowser", side_effect=service_update_mock ) as mock_service_browser: mock_zeroconf.get_service_info.side_effect = get_service_info_mock assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}}) assert len(mock_service_browser.mock_calls) == 1 expected_flow_calls = 0 for matching_components in zc_gen.ZEROCONF.values(): expected_flow_calls += len(matching_components) assert len(mock_config_flow.mock_calls) == expected_flow_calls # Test instance is set. assert "zeroconf" in hass.data assert await hass.components.zeroconf.async_get_instance() is mock_zeroconf async def test_setup_with_default_interface(hass, mock_zeroconf): """Test default interface config.""" with patch.object(hass.config_entries.flow, "async_init"), patch.object( zeroconf, "HaServiceBrowser", side_effect=service_update_mock ): mock_zeroconf.get_service_info.side_effect = get_service_info_mock assert await async_setup_component( hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {CONF_DEFAULT_INTERFACE: True}} ) assert mock_zeroconf.called_with(interface_choice=InterfaceChoice.Default) async def test_setup_without_default_interface(hass, mock_zeroconf): """Test without default interface config.""" with patch.object(hass.config_entries.flow, "async_init"), patch.object( zeroconf, "HaServiceBrowser", side_effect=service_update_mock ): mock_zeroconf.get_service_info.side_effect = get_service_info_mock assert await async_setup_component( hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {CONF_DEFAULT_INTERFACE: False}} ) assert mock_zeroconf.called_with() async def test_homekit_match_partial_space(hass, mock_zeroconf): """Test configured options for a device are loaded via config entry.""" with patch.dict( zc_gen.ZEROCONF, {zeroconf.HOMEKIT_TYPE: ["homekit_controller"]}, clear=True ), patch.object( hass.config_entries.flow, "async_init" ) as mock_config_flow, patch.object( zeroconf, "HaServiceBrowser", side_effect=service_update_mock ) as mock_service_browser: mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock( "LIFX bulb", HOMEKIT_STATUS_UNPAIRED ) assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}}) assert len(mock_service_browser.mock_calls) == 1 assert len(mock_config_flow.mock_calls) == 1 assert mock_config_flow.mock_calls[0][1][0] == "lifx" async def test_homekit_match_partial_dash(hass, mock_zeroconf): """Test configured options for a device are loaded via config entry.""" with patch.dict( zc_gen.ZEROCONF, {zeroconf.HOMEKIT_TYPE: ["homekit_controller"]}, clear=True ), patch.object( hass.config_entries.flow, "async_init" ) as mock_config_flow, patch.object( zeroconf, "HaServiceBrowser", side_effect=service_update_mock ) as mock_service_browser: mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock( "Rachio-fa46ba", HOMEKIT_STATUS_UNPAIRED ) assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}}) assert len(mock_service_browser.mock_calls) == 1 assert len(mock_config_flow.mock_calls) == 1 assert mock_config_flow.mock_calls[0][1][0] == "rachio" async def test_homekit_match_full(hass, mock_zeroconf): """Test configured options for a device are loaded via config entry.""" with patch.dict( zc_gen.ZEROCONF, {zeroconf.HOMEKIT_TYPE: ["homekit_controller"]}, clear=True ), patch.object( hass.config_entries.flow, "async_init" ) as mock_config_flow, patch.object( zeroconf, "HaServiceBrowser", side_effect=service_update_mock ) as mock_service_browser: mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock( "BSB002", HOMEKIT_STATUS_UNPAIRED ) assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}}) homekit_mock = get_homekit_info_mock("BSB002", HOMEKIT_STATUS_UNPAIRED) info = homekit_mock("_hap._tcp.local.", "BSB002._hap._tcp.local.") import pprint pprint.pprint(["homekit", info]) assert len(mock_service_browser.mock_calls) == 1 assert len(mock_config_flow.mock_calls) == 1 assert mock_config_flow.mock_calls[0][1][0] == "hue" async def test_homekit_already_paired(hass, mock_zeroconf): """Test that an already paired device is sent to homekit_controller.""" with patch.dict( zc_gen.ZEROCONF, {zeroconf.HOMEKIT_TYPE: ["homekit_controller"]}, clear=True ), patch.object( hass.config_entries.flow, "async_init" ) as mock_config_flow, patch.object( zeroconf, "HaServiceBrowser", side_effect=service_update_mock ) as mock_service_browser: mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock( "tado", HOMEKIT_STATUS_PAIRED ) assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}}) assert len(mock_service_browser.mock_calls) == 1 assert len(mock_config_flow.mock_calls) == 2 assert mock_config_flow.mock_calls[0][1][0] == "tado" assert mock_config_flow.mock_calls[1][1][0] == "homekit_controller" async def test_homekit_invalid_paring_status(hass, mock_zeroconf): """Test that missing paring data is not sent to homekit_controller.""" with patch.dict( zc_gen.ZEROCONF, {zeroconf.HOMEKIT_TYPE: ["homekit_controller"]}, clear=True ), patch.object( hass.config_entries.flow, "async_init" ) as mock_config_flow, patch.object( zeroconf, "HaServiceBrowser", side_effect=service_update_mock ) as mock_service_browser: mock_zeroconf.get_service_info.side_effect = get_homekit_info_mock( "tado", b"invalid" ) assert await async_setup_component(hass, zeroconf.DOMAIN, {zeroconf.DOMAIN: {}}) assert len(mock_service_browser.mock_calls) == 1 assert len(mock_config_flow.mock_calls) == 1 assert mock_config_flow.mock_calls[0][1][0] == "tado" async def test_info_from_service_non_utf8(hass): """Test info_from_service handles non UTF-8 property keys and values correctly.""" service_type = "_test._tcp.local." info = zeroconf.info_from_service( get_service_info_mock(service_type, f"test.{service_type}") ) raw_info = info["properties"].pop("_raw", False) assert raw_info assert len(raw_info) == len(PROPERTIES) - 1 assert NON_ASCII_KEY not in raw_info assert len(info["properties"]) <= len(raw_info) assert "non-utf8-value" not in info["properties"] assert raw_info["non-utf8-value"] is NON_UTF8_VALUE async def test_get_instance(hass, mock_zeroconf): """Test we get an instance.""" assert await hass.components.zeroconf.async_get_instance() is mock_zeroconf hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() assert len(mock_zeroconf.ha_close.mock_calls) == 1
{ "content_hash": "f583788283af4947ec73b57ff831a7c0", "timestamp": "", "source": "github", "line_count": 238, "max_line_length": 88, "avg_line_length": 38.95798319327731, "alnum_prop": 0.6769844693701467, "repo_name": "robbiet480/home-assistant", "id": "45b1d9b1171df4f706bd6b83aa0aec4d063d172d", "size": "9272", "binary": false, "copies": "2", "ref": "refs/heads/dev", "path": "tests/components/zeroconf/test_init.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "18837456" }, { "name": "Shell", "bytes": "6846" } ], "symlink_target": "" }
""" This script fills the contents of doc/user_guide/API.rst based on the updated Altair schema. """ from os.path import abspath, dirname, join import sys import types # Import Altair from head ROOT_DIR = abspath(join(dirname(__file__), "..")) sys.path.insert(0, ROOT_DIR) import altair as alt # noqa: E402 API_FILENAME = join(ROOT_DIR, "doc", "user_guide", "API.rst") API_TEMPLATE = """\ .. _API: API Reference ============= This is the class and function reference of Altair, and the following content is generated automatically from the code documentation strings. Please refer to the `full user guide <http://altair-viz.github.io>`_ for further details, as this low-level documentation may not be enough to give full guidelines on their use. Top-Level Objects ----------------- .. currentmodule:: altair .. autosummary:: :toctree: generated/toplevel/ :nosignatures: {toplevel_charts} Encoding Channels ----------------- .. currentmodule:: altair .. autosummary:: :toctree: generated/channels/ :nosignatures: {encoding_wrappers} API Functions ------------- .. currentmodule:: altair .. autosummary:: :toctree: generated/api/ :nosignatures: {api_functions} Low-Level Schema Wrappers ------------------------- .. currentmodule:: altair .. autosummary:: :toctree: generated/core/ :nosignatures: {lowlevel_wrappers} """ def iter_objects( mod, ignore_private=True, restrict_to_type=None, restrict_to_subclass=None ): for name in dir(mod): obj = getattr(mod, name) if ignore_private: if name.startswith("_"): continue if restrict_to_type is not None: if not isinstance(obj, restrict_to_type): continue if restrict_to_subclass is not None: if not (isinstance(obj, type) and issubclass(obj, restrict_to_subclass)): continue yield name def toplevel_charts(): return sorted(iter_objects(alt.api, restrict_to_subclass=alt.TopLevelMixin)) def encoding_wrappers(): return sorted(iter_objects(alt.channels, restrict_to_subclass=alt.SchemaBase)) def api_functions(): return sorted(iter_objects(alt.api, restrict_to_type=types.FunctionType)) def lowlevel_wrappers(): return sorted(iter_objects(alt.schema.core, restrict_to_subclass=alt.SchemaBase)) def write_api_file(): print("Updating API docs\n ->{}".format(API_FILENAME)) sep = "\n " with open(API_FILENAME, "w") as f: f.write( API_TEMPLATE.format( toplevel_charts=sep.join(toplevel_charts()), api_functions=sep.join(api_functions()), encoding_wrappers=sep.join(encoding_wrappers()), lowlevel_wrappers=sep.join(lowlevel_wrappers()), ) ) if __name__ == "__main__": write_api_file()
{ "content_hash": "5d920d5695fcfd49d5100dde1d16bbce", "timestamp": "", "source": "github", "line_count": 118, "max_line_length": 85, "avg_line_length": 24.24576271186441, "alnum_prop": 0.6399860188745194, "repo_name": "jakevdp/altair", "id": "a70c3aa734a665fdc03486ce017d719505105ee6", "size": "2861", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tools/generate_api_docs.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Makefile", "bytes": "343" }, { "name": "Python", "bytes": "5353045" }, { "name": "TeX", "bytes": "2684" } ], "symlink_target": "" }
from recipe_engine import recipe_api class IsolateApi(recipe_api.RecipeApi): """APIs for interacting with isolates.""" def __init__(self, **kwargs): super(IsolateApi, self).__init__(**kwargs) self._isolate_server = 'https://isolateserver.appspot.com' self._isolated_tests = {} @property def isolate_server(self): """URL of Isolate server to use, default is a production one.""" return self._isolate_server @isolate_server.setter def isolate_server(self, value): """Changes URL of Isolate server to use.""" self._isolate_server = value def set_isolate_environment(self, config): """Modifies the config to include isolate related GYP_DEFINES. Modifies the passed Config (which should generally be api.chromium.c) to set up the appropriate GYP_DEFINES to prepare all necessary files to do this after compile. This must be called early in your recipe; definitely before the checkout and runhooks steps. """ config.gyp_env.GYP_DEFINES['test_isolation_mode'] = 'prepare' def clean_isolated_files(self, build_dir): """Cleans out all *.isolated files from the build directory in preparation for the compile. Needed in order to ensure isolates are rebuilt properly because their dependencies are currently not completely described to gyp. Should be invoked before compilation in both 'archive' or 'prepare' modes (see 'set_isolate_environment'). """ self.m.python( 'clean isolated files', self.resource('find_isolated_tests.py'), [ '--build-dir', build_dir, '--clean-isolated-files' ]) def find_isolated_tests(self, build_dir, targets=None, **kwargs): """Returns a step which finds all *.isolated files in a build directory. Useful only with 'archive' isolation mode (see 'set_isolate_environment'). In 'prepare' mode use 'isolate_tests' instead. Assigns the dict {target name -> *.isolated file hash} to the swarm_hashes build property. This implies this step can currently only be run once per recipe. If |targets| is None, the step will use all *.isolated files it finds. Otherwise, it will verify that all |targets| are found and will use only them. If some expected targets are missing, will abort the build. """ step_result = self.m.python( 'find isolated tests', self.resource('find_isolated_tests.py'), [ '--build-dir', build_dir, '--output-json', self.m.json.output(), ], step_test_data=lambda: (self.test_api.output_json(targets)), **kwargs) assert isinstance(step_result.json.output, dict) self._isolated_tests = step_result.json.output if targets is not None and ( step_result.presentation.status != self.m.step.FAILURE): found = set(step_result.json.output) expected = set(targets) if found >= expected: # Limit result only to |expected|. self._isolated_tests = { target: step_result.json.output[target] for target in expected } else: # Some expected targets are missing? Fail the step. step_result.presentation.status = self.m.step.FAILURE step_result.presentation.logs['missing.isolates'] = ( ['Failed to find *.isolated files:'] + list(expected - found)) step_result.presentation.properties['swarm_hashes'] = self._isolated_tests # No isolated files found? That looks suspicious, emit warning. if (not self._isolated_tests and step_result.presentation.status != self.m.step.FAILURE): step_result.presentation.status = self.m.step.WARNING def isolate_tests(self, build_dir, targets=None, verbose=False, set_swarm_hashes=True, **kwargs): """Archives prepared tests in |build_dir| to isolate server. Works only if Chromium was compiled with test_isolation_mode=='prepare'. See set_isolate_environment(). In that mode src/tools/isolate_driver.py is invoked by ninja during compilation to produce *.isolated.gen.json files that describe how to archive tests. This step then uses *.isolated.gen.json files to actually performs the archival. By archiving all tests at once it is able to reduce the total amount of work. Tests share many common files, and such files are processed only once. Assigns the dict {target name -> *.isolated file hash} to the swarm_hashes build property (also accessible as 'isolated_tests' property). This implies this step can currently only be run once per recipe. """ # TODO(vadimsh): Always require |targets| to be passed explicitly. Currently # chromium_trybot, blink_trybot and swarming/canary recipes rely on targets # autodiscovery. The code path in chromium_trybot that needs it is being # deprecated in favor of to *_ng builders, that pass targets explicitly. if targets is None: # Ninja builds <target>.isolated.gen.json files via isolate_driver.py. paths = self.m.file.glob( 'find isolated targets', build_dir.join('*.isolated.gen.json'), test_data=[ build_dir.join('dummy_target_%d.isolated.gen.json' % i) for i in (1, 2) ]) targets = [] for p in paths: name = self.m.path.basename(p) assert name.endswith('.isolated.gen.json'), name targets.append(name[:-len('.isolated.gen.json')]) # No isolated tests found. if not targets: # pragma: no cover return input_files = [build_dir.join('%s.isolated.gen.json' % t) for t in targets] try: # TODO(vadimsh): Differentiate between bad *.isolate and upload errors. # Raise InfraFailure on upload errors. args = [ self.m.swarming_client.path, 'batcharchive', '--dump-json', self.m.json.output(), '--isolate-server', self._isolate_server, ] + (['--verbose'] if verbose else []) + input_files return self.m.python( 'isolate tests', self.resource('isolate.py'), args, step_test_data=lambda: (self.test_api.output_json(targets)), **kwargs) finally: step_result = self.m.step.active_result self._isolated_tests = step_result.json.output if self._isolated_tests: presentation = step_result.presentation if set_swarm_hashes: presentation.properties['swarm_hashes'] = self._isolated_tests missing = sorted( t for t, h in self._isolated_tests.iteritems() if not h) if missing: step_result.presentation.logs['failed to isolate'] = ( ['Failed to isolate following targets:'] + missing + ['', 'See logs for more information.'] ) for k in missing: self._isolated_tests.pop(k) @property def isolated_tests(self): """The dictionary of 'target name -> isolated hash' for this run. These come either from the incoming swarm_hashes build property, or from calling find_isolated_tests, above, at some point during the run. """ hashes = self.m.properties.get('swarm_hashes', self._isolated_tests) # Be robust in the case where swarm_hashes is an empty string # instead of an empty dictionary, or similar. if not hashes: return {} # pragma: no covergae return { k.encode('ascii'): v.encode('ascii') for k, v in hashes.iteritems() } @property def _run_isolated_path(self): """Returns the path to run_isolated.py.""" return self.m.swarming_client.path.join('run_isolated.py') def run_isolated(self, name, isolate_hash, args=None, **kwargs): """Runs an isolated test.""" cmd = [ '--isolated', isolate_hash, '-I', self.isolate_server, '--verbose', ] if args: cmd.append('--') cmd.extend(args) self.m.python(name, self._run_isolated_path, cmd, **kwargs) def remove_build_metadata(self): """Removes the build metadata embedded in the build artifacts.""" args = [ '--build-dir', self.m.chromium.output_dir, '--src-dir', self.m.path['checkout'] ] # Turn the failures during this step into warnings, it's a best effort step # that shouldn't break the build for now. try: self.m.python('remove_build_metadata', self.resource('remove_build_metadata.py'), args=args, cwd=self.m.path['slave_build']) except self.m.step.StepFailure: step_result = self.m.step.active_result step_result.presentation.status = self.m.step.WARNING def compare_build_artifacts(self, first_dir, second_dir): """Compare the artifacts from 2 builds.""" args = [ '--first-build-dir', first_dir, '--second-build-dir', second_dir, '--target-platform', self.m.chromium.c.TARGET_PLATFORM ] self.m.python('compare_build_artifacts', self.resource('compare_build_artifacts.py'), args=args, cwd=self.m.path['slave_build'])
{ "content_hash": "b7dd1817964957bb2b0217c55192fa7a", "timestamp": "", "source": "github", "line_count": 229, "max_line_length": 80, "avg_line_length": 39.624454148471614, "alnum_prop": 0.6454705752700022, "repo_name": "eunchong/build", "id": "69d7836af3337924e7e345d65defb86790140fc8", "size": "9237", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "scripts/slave/recipe_modules/isolate/api.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "3128" }, { "name": "CSS", "bytes": "211818" }, { "name": "HTML", "bytes": "429981" }, { "name": "JavaScript", "bytes": "75624" }, { "name": "Makefile", "bytes": "21204" }, { "name": "Python", "bytes": "6143109" }, { "name": "Shell", "bytes": "23512" } ], "symlink_target": "" }
from neutron.common import constants from neutron import context from neutron.plugins.ml2.drivers.l2pop import db as l2pop_db from neutron.tests.common import helpers from neutron.tests.unit import testlib_api class TestL2PopulationDBTestCase(testlib_api.SqlTestCase): def setUp(self): super(TestL2PopulationDBTestCase, self).setUp() self.db_mixin = l2pop_db.L2populationDbMixin() def test_get_agent_by_host(self): # Register a L2 agent + A bunch of other agents on the same host helpers.register_l3_agent() helpers.register_dhcp_agent() helpers.register_ovs_agent() agent = self.db_mixin.get_agent_by_host( context.get_admin_context().session, helpers.HOST) self.assertEqual(constants.AGENT_TYPE_OVS, agent.agent_type) def test_get_agent_by_host_no_candidate(self): # Register a bunch of non-L2 agents on the same host helpers.register_l3_agent() helpers.register_dhcp_agent() agent = self.db_mixin.get_agent_by_host( context.get_admin_context().session, helpers.HOST) self.assertIsNone(agent)
{ "content_hash": "629933d5ca0d515d9537d707371b6aa4", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 72, "avg_line_length": 40.67857142857143, "alnum_prop": 0.6962247585601404, "repo_name": "glove747/liberty-neutron", "id": "e4bf6dc59e2c880cb9d5d76b5ff7e6859d6feef5", "size": "1745", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "neutron/tests/unit/plugins/ml2/drivers/l2pop/test_db.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Mako", "bytes": "1047" }, { "name": "Python", "bytes": "7559351" }, { "name": "Shell", "bytes": "14832" } ], "symlink_target": "" }
import functools import flask from flask import request from umr.zeeguu_server import ZEEGUU_LOGIN def with_session(view): """ Decorator checks whether a session is available either in - as a cookie - as a GET or POST parameter If it is, it sets the sessionID field on the request object which can be used within the decorated functions. In case of no session, user is redirected to login form. """ @functools.wraps(view) def wrapped_view(*args, **kwargs): request.sessionID = None if 'sessionID' in request.cookies: request.sessionID = request.cookies.get('sessionID') else: return flask.redirect(ZEEGUU_LOGIN + '?next=' + request.url) return view(*args, **kwargs) return wrapped_view
{ "content_hash": "0aaf4355388c4a5abc0219c417021220", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 72, "avg_line_length": 28.357142857142858, "alnum_prop": 0.6662468513853904, "repo_name": "mircealungu/Unified-Multilanguage-Reader", "id": "4e67f136aecaa152dc56ccfda83993913fec896e", "size": "794", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/umr/session.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "426360" }, { "name": "HTML", "bytes": "11688" }, { "name": "JavaScript", "bytes": "203391" }, { "name": "Python", "bytes": "7620" }, { "name": "Shell", "bytes": "3909" } ], "symlink_target": "" }
"""docstring """ __revision__ = '0.1' global_read_user_interceptor = None global_access_secret_key = None global_login_page = None
{ "content_hash": "f30cf28d1397efd9a7d201f4545cce1b", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 35, "avg_line_length": 16.625, "alnum_prop": 0.6917293233082706, "repo_name": "YuelianINC/django_render", "id": "77df751efc28955cd654508ce12856a53d8545b1", "size": "221", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "django_render/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "20505" }, { "name": "Shell", "bytes": "96" } ], "symlink_target": "" }
import os import sys sys.path.insert(0, os.path.abspath('../..')) # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', #'sphinx.ext.intersphinx', 'oslosphinx' ] # autodoc generation is a bit aggressive and a nuisance when doing heavy # text edit cycles. # execute "export SPHINX_DEBUG=1" in your terminal to disable # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'flash-test' copyright = u'2013, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. # html_theme_path = ["."] # html_theme = '_theme' # html_static_path = ['static'] # Output file base name for HTML help builder. htmlhelp_basename = '%sdoc' % project # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', '%s.tex' % project, u'%s Documentation' % project, u'OpenStack Foundation', 'manual'), ] # Example configuration for intersphinx: refer to the Python standard library. #intersphinx_mapping = {'http://docs.python.org/': None}
{ "content_hash": "41e16de566a09bf9eee3e91b6833c4a6", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 79, "avg_line_length": 30.950819672131146, "alnum_prop": 0.6721398305084746, "repo_name": "nikolas-hermanns/flash-test", "id": "92a221c52cf11655bc6d8d707456dfeb0be22b99", "size": "2458", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/source/conf.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "32536" } ], "symlink_target": "" }
"""SCons.Tool.latex Tool-specific initialization for LaTeX. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/latex.py 3897 2009/01/13 06:45:54 scons" import SCons.Action import SCons.Defaults import SCons.Scanner.LaTeX import SCons.Util import SCons.Tool import SCons.Tool.tex LaTeXAction = None def LaTeXAuxFunction(target = None, source= None, env=None): result = SCons.Tool.tex.InternalLaTeXAuxAction( LaTeXAction, target, source, env ) return result LaTeXAuxAction = SCons.Action.Action(LaTeXAuxFunction, strfunction=SCons.Tool.tex.TeXLaTeXStrFunction) def generate(env): """Add Builders and construction variables for LaTeX to an Environment.""" global LaTeXAction if LaTeXAction is None: LaTeXAction = SCons.Action.Action('$LATEXCOM', '$LATEXCOMSTR') import dvi dvi.generate(env) import pdf pdf.generate(env) bld = env['BUILDERS']['DVI'] bld.add_action('.ltx', LaTeXAuxAction) bld.add_action('.latex', LaTeXAuxAction) bld.add_emitter('.ltx', SCons.Tool.tex.tex_eps_emitter) bld.add_emitter('.latex', SCons.Tool.tex.tex_eps_emitter) env['LATEX'] = 'latex' env['LATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode') env['LATEXCOM'] = 'cd ${TARGET.dir} && $LATEX $LATEXFLAGS ${SOURCE.file}' env['LATEXRETRIES'] = 3 def exists(env): return env.Detect('latex')
{ "content_hash": "d7b4eaa774cd9efdb6919f3ac9933b42", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 89, "avg_line_length": 35.64473684210526, "alnum_prop": 0.7279438907345884, "repo_name": "amyvmiwei/chromium", "id": "63b093ff3c14817e2844a3887e706fbe73796b4f", "size": "2709", "binary": false, "copies": "3", "ref": "refs/heads/trunk", "path": "third_party/scons/scons-local/SCons/Tool/latex.py", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
from setuptools import setup, find_packages import os try: reqs = open(os.path.join(os.path.dirname(__file__), 'requirements.txt')).read() except (IOError, OSError): reqs = '' setup( name='django-navbar', version=__import__('navbar').__version__, description='Reusable django application managing navigation menues with ' 'permissions, auto selection and crumbs.', long_description=open('docs/overview.txt').read(), author='Doug Napoleone', author_email='doug.napoleone@gmail.com', url='http://code.google.com/p/django-navbar/', packages=find_packages(), classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], include_package_data=True, install_requires=reqs )
{ "content_hash": "8dc1b26dd752712ecc7d1b57d68d24ba", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 83, "avg_line_length": 33.13333333333333, "alnum_prop": 0.6378269617706237, "repo_name": "coordt/django-navbar", "id": "881673c26ed52bb0dc5c7f3840a9a40093d662a2", "size": "994", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "setup.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "1156" }, { "name": "Python", "bytes": "21079" } ], "symlink_target": "" }
from django.conf import settings from dimagi.utils.modules import to_function import copy from dimagi.utils.couch.database import get_db def _extract_domains(doc): if "domain" in doc and doc["domain"]: return [doc["domain"]] elif "domains" in doc and doc["domains"]: return doc["domains"] return [] class DocumentTransform(): # for coupling reasons, we have to bundle the original document # with its attachments so that we can properly deal with it # across databases. # We also need the source database to fetch the attachment def __init__(self, doc, database): self._attachments = {} self.attachments = {} self.database = database if "_attachments" in doc and doc['_attachments']: self._attachments = doc["_attachments"] del doc["_attachments"] self.attachments = dict((k, self.database.fetch_attachment(doc["_id"], k)) for k in self._attachments) self.doc = doc class TargetSyncConfig(): def __init__(self, target, transform): self.domain = target self.transform_function = to_function(transform, failhard=True) def update_domains(self, doc, sourcedomain): if "domain" in doc and doc["domain"] == sourcedomain: doc["domain"] = self.domain elif "domains" in doc and sourcedomain in doc["domains"]: doc["domains"].remove(sourcedomain) doc["domains"].append(self.domain) else: raise ValueError("Source domain %s not found in doc %s!" % (sourcedomain, doc)) def transform(self, doc, sourcedomain, sourcedb): self.update_domains(doc, sourcedomain) pretransform = DocumentTransform(doc, sourcedb) return self.transform_function(pretransform) class DomainSyncConfig(): """ Initializes the object that deals with syncing domains. In your settings you should define: DOMAIN_SYNCS = { sourcedomain1: { "domain": targetdomain1, "transform": path.to.transformfunction1 }, sourcedomain2: {...} } in your settings file. """ def __init__(self): self.mapping = {} self.old_database = get_db() if hasattr(settings, "DOMAIN_SYNC_DATABASE_NAME"): # match a slash, followed by words or underscores # followed by the end of the string #re.sub("(?<=/)(\w_-)+$", settings.DOMAIN_SYNC_DATABASE_NAME, self.old_database.uri) self.database = self.old_database.server.get_or_create_db(settings.DOMAIN_SYNC_DATABASE_NAME) if hasattr(settings, "DOMAIN_SYNCS"): for domain, targetconfig in settings.DOMAIN_SYNCS.items(): self.mapping[domain] = TargetSyncConfig(**targetconfig) def get_transforms(self, doc): transforms = [] # always sync certain global documents if "doc_type" in doc and doc["doc_type"] == "CommCareBuild": return [DocumentTransform(doc, self.old_database)] for domain in _extract_domains(doc): if domain in self.mapping: doccopy = copy.deepcopy(doc) transformed = self.mapping[domain].transform(doccopy, domain, self.old_database) if transformed: transforms.append(transformed) return transforms def save(self, transform): return save(transform, self.database) def save(transform, database): # this is a fancy save method because we do some special casing # with the attachments database.save_doc(transform.doc, force_update=True) for k, attach in transform.attachments.items(): database.put_attachment(transform.doc, attach, name=k, content_type=transform._attachments[k]["content_type"]) global_config = DomainSyncConfig()
{ "content_hash": "93c03a552697be9887ac91691838ffb8", "timestamp": "", "source": "github", "line_count": 99, "max_line_length": 114, "avg_line_length": 39.98989898989899, "alnum_prop": 0.6150543066430917, "repo_name": "SEL-Columbia/commcare-hq", "id": "015b57b24c99083d6153f8566e194b7428ba6073", "size": "3959", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "corehq/apps/domainsync/config.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ActionScript", "bytes": "15950" }, { "name": "CSS", "bytes": "768322" }, { "name": "JavaScript", "bytes": "2647080" }, { "name": "Python", "bytes": "7806659" }, { "name": "Shell", "bytes": "28569" } ], "symlink_target": "" }
import array import six.moves.cPickle as pickle import json from collections import defaultdict from gzip import GzipFile from os.path import getmtime import struct from time import time import os from io import BufferedReader from hashlib import md5 from itertools import chain from tempfile import NamedTemporaryFile import sys from six.moves import range from swift.common.exceptions import RingLoadError from swift.common.utils import hash_path, validate_configuration from swift.common.ring.utils import tiers_for_dev def calc_replica_count(replica2part2dev_id): base = len(replica2part2dev_id) - 1 extra = 1.0 * len(replica2part2dev_id[-1]) / len(replica2part2dev_id[0]) return base + extra class RingData(object): """Partitioned consistent hashing ring data (used for serialization).""" def __init__(self, replica2part2dev_id, devs, part_shift, next_part_power=None): self.devs = devs self._replica2part2dev_id = replica2part2dev_id self._part_shift = part_shift self.next_part_power = next_part_power for dev in self.devs: if dev is not None: dev.setdefault("region", 1) @property def replica_count(self): """Number of replicas (full or partial) used in the ring.""" return calc_replica_count(self._replica2part2dev_id) @classmethod def deserialize_v1(cls, gz_file, metadata_only=False): """ Deserialize a v1 ring file into a dictionary with `devs`, `part_shift`, and `replica2part2dev_id` keys. If the optional kwarg `metadata_only` is True, then the `replica2part2dev_id` is not loaded and that key in the returned dictionary just has the value `[]`. :param file gz_file: An opened file-like object which has already consumed the 6 bytes of magic and version. :param bool metadata_only: If True, only load `devs` and `part_shift` :returns: A dict containing `devs`, `part_shift`, and `replica2part2dev_id` """ json_len, = struct.unpack('!I', gz_file.read(4)) ring_dict = json.loads(gz_file.read(json_len).decode('ascii')) ring_dict['replica2part2dev_id'] = [] if metadata_only: return ring_dict byteswap = (ring_dict.get('byteorder', sys.byteorder) != sys.byteorder) partition_count = 1 << (32 - ring_dict['part_shift']) for x in range(ring_dict['replica_count']): part2dev = array.array('H', gz_file.read(2 * partition_count)) if byteswap: part2dev.byteswap() ring_dict['replica2part2dev_id'].append(part2dev) return ring_dict @classmethod def load(cls, filename, metadata_only=False): """ Load ring data from a file. :param filename: Path to a file serialized by the save() method. :param bool metadata_only: If True, only load `devs` and `part_shift`. :returns: A RingData instance containing the loaded data. """ gz_file = GzipFile(filename, 'rb') # Python 2.6 GzipFile doesn't support BufferedIO if hasattr(gz_file, '_checkReadable'): gz_file = BufferedReader(gz_file) # See if the file is in the new format magic = gz_file.read(4) if magic == b'R1NG': format_version, = struct.unpack('!H', gz_file.read(2)) if format_version == 1: ring_data = cls.deserialize_v1( gz_file, metadata_only=metadata_only) else: raise Exception('Unknown ring format version %d' % format_version) else: # Assume old-style pickled ring gz_file.seek(0) ring_data = pickle.load(gz_file) if not hasattr(ring_data, 'devs'): ring_data = RingData(ring_data['replica2part2dev_id'], ring_data['devs'], ring_data['part_shift'], ring_data.get('next_part_power')) return ring_data def serialize_v1(self, file_obj): # Write out new-style serialization magic and version: file_obj.write(struct.pack('!4sH', b'R1NG', 1)) ring = self.to_dict() # Only include next_part_power if it is set in the # builder, otherwise just ignore it _text = {'devs': ring['devs'], 'part_shift': ring['part_shift'], 'replica_count': len(ring['replica2part2dev_id']), 'byteorder': sys.byteorder} next_part_power = ring.get('next_part_power') if next_part_power is not None: _text['next_part_power'] = next_part_power json_text = json.dumps(_text, sort_keys=True, ensure_ascii=True).encode('ascii') json_len = len(json_text) file_obj.write(struct.pack('!I', json_len)) file_obj.write(json_text) for part2dev_id in ring['replica2part2dev_id']: file_obj.write(part2dev_id.tostring()) def save(self, filename, mtime=1300507380.0): """ Serialize this RingData instance to disk. :param filename: File into which this instance should be serialized. :param mtime: time used to override mtime for gzip, default or None if the caller wants to include time """ # Override the timestamp so that the same ring data creates # the same bytes on disk. This makes a checksum comparison a # good way to see if two rings are identical. tempf = NamedTemporaryFile(dir=".", prefix=filename, delete=False) gz_file = GzipFile(filename, mode='wb', fileobj=tempf, mtime=mtime) self.serialize_v1(gz_file) gz_file.close() tempf.flush() os.fsync(tempf.fileno()) tempf.close() os.chmod(tempf.name, 0o644) os.rename(tempf.name, filename) def to_dict(self): return {'devs': self.devs, 'replica2part2dev_id': self._replica2part2dev_id, 'part_shift': self._part_shift, 'next_part_power': self.next_part_power} class Ring(object): """ Partitioned consistent hashing ring. :param serialized_path: path to serialized RingData instance :param reload_time: time interval in seconds to check for a ring change :param ring_name: ring name string (basically specified from policy) :param validation_hook: hook point to validate ring configuration ontime :raises RingLoadError: if the loaded ring data violates its constraint """ def __init__(self, serialized_path, reload_time=15, ring_name=None, validation_hook=lambda ring_data: None): # can't use the ring unless HASH_PATH_SUFFIX is set validate_configuration() if ring_name: self.serialized_path = os.path.join(serialized_path, ring_name + '.ring.gz') else: self.serialized_path = os.path.join(serialized_path) self.reload_time = reload_time self._validation_hook = validation_hook self._reload(force=True) def _reload(self, force=False): self._rtime = time() + self.reload_time if force or self.has_changed(): ring_data = RingData.load(self.serialized_path) try: self._validation_hook(ring_data) except RingLoadError: if force: raise else: # In runtime reload at working server, it's ok to use old # ring data if the new ring data is invalid. return self._mtime = getmtime(self.serialized_path) self._devs = ring_data.devs # NOTE(akscram): Replication parameters like replication_ip # and replication_port are required for # replication process. An old replication # ring doesn't contain this parameters into # device. Old-style pickled rings won't have # region information. for dev in self._devs: if dev: dev.setdefault('region', 1) if 'ip' in dev: dev.setdefault('replication_ip', dev['ip']) if 'port' in dev: dev.setdefault('replication_port', dev['port']) self._replica2part2dev_id = ring_data._replica2part2dev_id self._part_shift = ring_data._part_shift self._rebuild_tier_data() # Do this now, when we know the data has changed, rather than # doing it on every call to get_more_nodes(). # # Since this is to speed up the finding of handoffs, we only # consider devices with at least one partition assigned. This # way, a region, zone, or server with no partitions assigned # does not count toward our totals, thereby keeping the early # bailouts in get_more_nodes() working. dev_ids_with_parts = set() for part2dev_id in self._replica2part2dev_id: for dev_id in part2dev_id: dev_ids_with_parts.add(dev_id) regions = set() zones = set() ips = set() self._num_devs = 0 for dev in self._devs: if dev and dev['id'] in dev_ids_with_parts: regions.add(dev['region']) zones.add((dev['region'], dev['zone'])) ips.add((dev['region'], dev['zone'], dev['ip'])) self._num_devs += 1 self._num_regions = len(regions) self._num_zones = len(zones) self._num_ips = len(ips) self._next_part_power = ring_data.next_part_power @property def next_part_power(self): return self._next_part_power @property def part_power(self): return 32 - self._part_shift def _rebuild_tier_data(self): self.tier2devs = defaultdict(list) for dev in self._devs: if not dev: continue for tier in tiers_for_dev(dev): self.tier2devs[tier].append(dev) tiers_by_length = defaultdict(list) for tier in self.tier2devs: tiers_by_length[len(tier)].append(tier) self.tiers_by_length = sorted(tiers_by_length.values(), key=lambda x: len(x[0])) for tiers in self.tiers_by_length: tiers.sort() @property def replica_count(self): """Number of replicas (full or partial) used in the ring.""" return calc_replica_count(self._replica2part2dev_id) @property def partition_count(self): """Number of partitions in the ring.""" return len(self._replica2part2dev_id[0]) @property def devs(self): """devices in the ring""" if time() > self._rtime: self._reload() return self._devs def has_changed(self): """ Check to see if the ring on disk is different than the current one in memory. :returns: True if the ring on disk has changed, False otherwise """ return getmtime(self.serialized_path) != self._mtime def _get_part_nodes(self, part): part_nodes = [] seen_ids = set() for r2p2d in self._replica2part2dev_id: if part < len(r2p2d): dev_id = r2p2d[part] if dev_id not in seen_ids: part_nodes.append(self.devs[dev_id]) seen_ids.add(dev_id) return [dict(node, index=i) for i, node in enumerate(part_nodes)] def get_part(self, account, container=None, obj=None): """ Get the partition for an account/container/object. :param account: account name :param container: container name :param obj: object name :returns: the partition number """ key = hash_path(account, container, obj, raw_digest=True) if time() > self._rtime: self._reload() part = struct.unpack_from('>I', key)[0] >> self._part_shift return part def get_part_nodes(self, part): """ Get the nodes that are responsible for the partition. If one node is responsible for more than one replica of the same partition, it will only appear in the output once. :param part: partition to get nodes for :returns: list of node dicts See :func:`get_nodes` for a description of the node dicts. """ if time() > self._rtime: self._reload() return self._get_part_nodes(part) def get_nodes(self, account, container=None, obj=None): """ Get the partition and nodes for an account/container/object. If a node is responsible for more than one replica, it will only appear in the output once. :param account: account name :param container: container name :param obj: object name :returns: a tuple of (partition, list of node dicts) Each node dict will have at least the following keys: ====== =============================================================== id unique integer identifier amongst devices index offset into the primary node list for the partition weight a float of the relative weight of this device as compared to others; this indicates how many partitions the builder will try to assign to this device zone integer indicating which zone the device is in; a given partition will not be assigned to multiple devices within the same zone ip the ip address of the device port the tcp port of the device device the device's name on disk (sdb1, for example) meta general use 'extra' field; for example: the online date, the hardware description ====== =============================================================== """ part = self.get_part(account, container, obj) return part, self._get_part_nodes(part) def get_more_nodes(self, part): """ Generator to get extra nodes for a partition for hinted handoff. The handoff nodes will try to be in zones other than the primary zones, will take into account the device weights, and will usually keep the same sequences of handoffs even with ring changes. :param part: partition to get handoff nodes for :returns: generator of node dicts See :func:`get_nodes` for a description of the node dicts. """ if time() > self._rtime: self._reload() primary_nodes = self._get_part_nodes(part) used = set(d['id'] for d in primary_nodes) same_regions = set(d['region'] for d in primary_nodes) same_zones = set((d['region'], d['zone']) for d in primary_nodes) same_ips = set( (d['region'], d['zone'], d['ip']) for d in primary_nodes) parts = len(self._replica2part2dev_id[0]) part_hash = md5(str(part).encode('ascii')).digest() start = struct.unpack_from('>I', part_hash)[0] >> self._part_shift inc = int(parts / 65536) or 1 # Multiple loops for execution speed; the checks and bookkeeping get # simpler as you go along hit_all_regions = len(same_regions) == self._num_regions for handoff_part in chain(range(start, parts, inc), range(inc - ((parts - start) % inc), start, inc)): if hit_all_regions: # At this point, there are no regions left untouched, so we # can stop looking. break for part2dev_id in self._replica2part2dev_id: if handoff_part < len(part2dev_id): dev_id = part2dev_id[handoff_part] dev = self._devs[dev_id] region = dev['region'] if dev_id not in used and region not in same_regions: yield dev used.add(dev_id) same_regions.add(region) zone = dev['zone'] ip = (region, zone, dev['ip']) same_zones.add((region, zone)) same_ips.add(ip) if len(same_regions) == self._num_regions: hit_all_regions = True break hit_all_zones = len(same_zones) == self._num_zones for handoff_part in chain(range(start, parts, inc), range(inc - ((parts - start) % inc), start, inc)): if hit_all_zones: # Much like we stopped looking for fresh regions before, we # can now stop looking for fresh zones; there are no more. break for part2dev_id in self._replica2part2dev_id: if handoff_part < len(part2dev_id): dev_id = part2dev_id[handoff_part] dev = self._devs[dev_id] zone = (dev['region'], dev['zone']) if dev_id not in used and zone not in same_zones: yield dev used.add(dev_id) same_zones.add(zone) ip = zone + (dev['ip'],) same_ips.add(ip) if len(same_zones) == self._num_zones: hit_all_zones = True break hit_all_ips = len(same_ips) == self._num_ips for handoff_part in chain(range(start, parts, inc), range(inc - ((parts - start) % inc), start, inc)): if hit_all_ips: # We've exhausted the pool of unused backends, so stop # looking. break for part2dev_id in self._replica2part2dev_id: if handoff_part < len(part2dev_id): dev_id = part2dev_id[handoff_part] dev = self._devs[dev_id] ip = (dev['region'], dev['zone'], dev['ip']) if dev_id not in used and ip not in same_ips: yield dev used.add(dev_id) same_ips.add(ip) if len(same_ips) == self._num_ips: hit_all_ips = True break hit_all_devs = len(used) == self._num_devs for handoff_part in chain(range(start, parts, inc), range(inc - ((parts - start) % inc), start, inc)): if hit_all_devs: # We've used every device we have, so let's stop looking for # unused devices now. break for part2dev_id in self._replica2part2dev_id: if handoff_part < len(part2dev_id): dev_id = part2dev_id[handoff_part] if dev_id not in used: yield self._devs[dev_id] used.add(dev_id) if len(used) == self._num_devs: hit_all_devs = True break
{ "content_hash": "93cc3f6a8dc2d75a21893def1e190edb", "timestamp": "", "source": "github", "line_count": 496, "max_line_length": 79, "avg_line_length": 40.25, "alnum_prop": 0.5418753756762172, "repo_name": "matthewoliver/swift", "id": "fc63f041ee9318a0c19803a0e1c0b55474fba657", "size": "20559", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "swift/common/ring/ring.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "248" }, { "name": "PHP", "bytes": "377" }, { "name": "Python", "bytes": "10042645" }, { "name": "Shell", "bytes": "3075" } ], "symlink_target": "" }
"""empty message Revision ID: 4fb92f3ede9e Revises: 2b269c4557de Create Date: 2014-03-14 12:12:41.912515 """ # revision identifiers, used by Alembic. revision = '4fb92f3ede9e' down_revision = '2b269c4557de' from alembic import op from datetime import datetime import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('permission', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('reader', sa.Boolean(), nullable=True), sa.Column('editor', sa.Boolean(), nullable=True), sa.Column('admin', sa.Boolean(), nullable=True), sa.Column('account_id', sa.Integer(), nullable=True), sa.Column('project_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['account_id'], ['account.id'], ), sa.ForeignKeyConstraint(['project_id'], ['project.id'], ), sa.PrimaryKeyConstraint('id') ) #op.drop_table('role') ### end Alembic commands ### projects = sa.sql.table('project', sa.sql.column('id', sa.Integer), sa.sql.column('author_id', sa.Integer) ) perms = sa.sql.table('permission', sa.sql.column('project_id', sa.Integer), sa.sql.column('account_id', sa.Integer), sa.sql.column('reader', sa.Boolean), sa.sql.column('editor', sa.Boolean), sa.sql.column('admin', sa.Boolean), sa.sql.column('created_at', sa.DateTime), sa.sql.column('updated_at', sa.DateTime) ) connection = op.get_bind() rp = connection.execute(projects.select()) if rp is not None: perm_seed = [] for row in rp.fetchall(): row = dict(zip(row.keys(), tuple(row))) perm_seed.append({ 'account_id': row.get('author_id'), 'project_id': row.get('id'), 'admin': True, 'reader': True, 'editor': True, 'created_at': datetime.utcnow(), 'updated_at': datetime.utcnow() }) op.bulk_insert(perms, perm_seed) def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_table('permission') ### end Alembic commands ###
{ "content_hash": "7e8b8529d0ceca02cd9b9960609c2a88", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 63, "avg_line_length": 31.30263157894737, "alnum_prop": 0.5977301387137453, "repo_name": "4bic/grano", "id": "4ed7944c533a28079bff55e52b97819cafdef499", "size": "2379", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "grano/alembic/versions/4fb92f3ede9e_.py", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "100" }, { "name": "Mako", "bytes": "412" }, { "name": "Python", "bytes": "237445" } ], "symlink_target": "" }
"""Module for constructing <colgroup> tag.""" from __future__ import absolute_import from ...lib.utils import validate_attribute_values from ...templates.html.tags import colgroup ATTRIBUTES = { 'align': { 'description': 'Aligns the content in a column group', 'values': ['left', 'right', 'center', 'justify', 'char'] }, 'char': { 'description': 'Aligns the content in a column group to a ' 'character', 'values': None }, 'charoff': { 'description': 'Sets the number of characters the content ' 'will be aligned from the character specified ' 'by the char attribute', 'values': None }, 'span': { 'description': 'Specifies the number of columns a column ' 'group should span', 'values': None }, 'valign': { 'description': 'Vertical aligns the content in a column group', 'values': ['top', 'middle', 'bottom', 'baseline'] }, 'width': { 'description': 'Specifies the width of a column group', 'values': None } } class ColGroup(object): """Class for constructing colgroup tag. Args: align (str): Aligns the content in a column group. char (str): Aligns the content in a column group to a character. charoff (int): Sets the number of characters the content will be aligned from the character specified by the char attribute. span (int): Specifies the number of columns a column group should span. valign (str): Vertical aligns the content in a column group. width (str): Specifies the width of a column group. .. versionadded:: 0.2.0 """ def __init__(self, align=None, char=None, charoff=None, span=None, valign=None, width=None): self.tag = 'colgroup' validate_attribute_values(tag=self.tag, attribute_name='align', attribute_value=align, default_values=ATTRIBUTES['align']['values']) self.validate_char_attribute(align=align, value=char) self.validate_charoff_attribute(align=align, char=char, value=charoff) validate_attribute_values( tag=self.tag, attribute_name='valign', attribute_value=valign, default_values=ATTRIBUTES['valign']['values']) self.values = {'align': align, 'char': char, 'charoff': charoff, 'span': span, 'valign': valign, 'width': width} def construct(self): """Returns the constructed colgroup tag <colgroup>.""" return colgroup.render(self.values) def validate_char_attribute(self, align, value): """Validates char attribute. The char attribute can only be used if the align attribute is set to "char". """ if not value: return if value and align != 'char': raise AttributeError('<colgroup>: The char attribute can only be ' 'used if the align attribute is set to ' '"char".') def validate_charoff_attribute(self, align, char, value): """Validates charoff attribute. The charoff attribute can only be used if the char attribute is specified and the align attribute is set to "char". """ if not value: return if value and (not char or align != 'char'): raise AttributeError('<colgroup>: The charoff attribute can only ' 'be used if the char attribute is specified ' 'and the align attribute is set to "char".')
{ "content_hash": "01a8ea80192bcb4cb87565bdbf78e093", "timestamp": "", "source": "github", "line_count": 107, "max_line_length": 79, "avg_line_length": 37.09345794392523, "alnum_prop": 0.5399344923154447, "repo_name": "bharadwajyarlagadda/korona", "id": "aa66c02a9cff2650e6dacff336ead02506a3dcd6", "size": "3993", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "korona/html/tags/colgroup.py", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "150" }, { "name": "Python", "bytes": "199694" } ], "symlink_target": "" }
import paddle.v2 as paddle import os import gzip from paddle.v2.reader.creator import cloud_reader import paddle.v2.dataset.uci_housing as uci_housing etcd_ip = os.getenv("ETCD_IP") etcd_endpoint = "http://" + etcd_ip + ":" + "2379" trainer_id = int(os.getenv("PADDLE_INIT_TRAINER_ID")) def main(): # init paddle.init() # network config x = paddle.layer.data(name='x', type=paddle.data_type.dense_vector(13)) y_predict = paddle.layer.fc( input=x, size=1, act=paddle.activation.Linear(), param_attr=paddle.attr.Param(learning_rate=1e-3)) y = paddle.layer.data(name='y', type=paddle.data_type.dense_vector(1)) cost = paddle.layer.square_error_cost(input=y_predict, label=y) # create parameters parameters = paddle.parameters.create(cost) # create optimizer optimizer = paddle.optimizer.Momentum(momentum=0) trainer = paddle.trainer.SGD(cost=cost, parameters=parameters, update_equation=optimizer, is_local=False, pserver_spec=etcd_endpoint, use_etcd=True) feeding = {'x': 0, 'y': 1} # event_handler to print training and testing info def event_handler(event): if isinstance(event, paddle.event.EndIteration): if event.batch_id % 100 == 0: print "Pass %d, Batch %d, Cost %f" % ( event.pass_id, event.batch_id, event.cost) if isinstance(event, paddle.event.EndPass): result = trainer.test( reader=paddle.batch( uci_housing.test(), batch_size=2), feeding=feeding) print "Test %d, Cost %f" % (event.pass_id, result.cost) if trainer_id == "0": with gzip.open("fit-a-line_pass_%05d.tar.gz" % event.pass_id, "w") as f: parameters.to_tar(f) # training trainer.train( reader=paddle.batch( paddle.reader.shuffle( cloud_reader([ "/pfs/dlnel/public/dataset/uci_housing/uci_housing_train-*" ], etcd_endpoint), buf_size=500), batch_size=2), feeding=feeding, event_handler=event_handler, num_passes=30) if __name__ == '__main__': main()
{ "content_hash": "f20c8f93d8c5f14a9c8f7e89888feefb", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 79, "avg_line_length": 33.04054054054054, "alnum_prop": 0.5435582822085889, "repo_name": "PaddlePaddle/cloud", "id": "ed5e53aefd6da37002800e2f31ad27429d75b494", "size": "3058", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "demo/fit_a_line/train_ft.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "1103" }, { "name": "Go", "bytes": "200594" }, { "name": "HTML", "bytes": "8917" }, { "name": "JavaScript", "bytes": "270250" }, { "name": "Python", "bytes": "150171" }, { "name": "Shell", "bytes": "13748" } ], "symlink_target": "" }
import requests from bs4 import BeautifulSoup as mksoup BASE = "https://www.iblocklist.com" def get_value_from(url): soup = mksoup(requests.get(BASE + url).text) return str(soup.find_all("input")[-1]).split("\"")[-2] soup = mksoup(requests.get("https://www.iblocklist.com/lists.php").text) links = {} for row in soup.find_all("tr")[1:]: section = str(list(row.children)[0]) pieces = section.split("\"") links[pieces[2].split("<")[0][1:]] = pieces[1] for link in links: value = get_value_from(links[link]) if "http" in value: print value
{ "content_hash": "db9c30f067711d86c31cecaa44c6e632", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 72, "avg_line_length": 25.217391304347824, "alnum_prop": 0.6362068965517241, "repo_name": "susman/Ultimate-Blocklist", "id": "072c3875410b1f4b9951179bf1f777301e849b4f", "size": "671", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "get_url.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "671" }, { "name": "Shell", "bytes": "1983" } ], "symlink_target": "" }
# # Parse tree nodes # from __future__ import absolute_import import cython cython.declare(sys=object, os=object, copy=object, Builtin=object, error=object, warning=object, Naming=object, PyrexTypes=object, py_object_type=object, ModuleScope=object, LocalScope=object, ClosureScope=object, StructOrUnionScope=object, PyClassScope=object, CppClassScope=object, UtilityCode=object, EncodedString=object, absolute_path_length=cython.Py_ssize_t, error_type=object) import sys, os, copy from itertools import chain from . import Builtin from .Errors import error, warning, InternalError, CompileError from . import Naming from . import PyrexTypes from . import TypeSlots from .PyrexTypes import py_object_type, error_type from .Symtab import (ModuleScope, LocalScope, ClosureScope, StructOrUnionScope, PyClassScope, CppClassScope, TemplateScope) from .Code import UtilityCode from .StringEncoding import EncodedString, escape_byte_string, split_string_literal from . import Future from . import Options from . import DebugFlags absolute_path_length = 0 def relative_position(pos): """ We embed the relative filename in the generated C file, since we don't want to have to regenerate and compile all the source code whenever the Python install directory moves (which could happen, e.g,. when distributing binaries.) INPUT: a position tuple -- (absolute filename, line number column position) OUTPUT: relative filename line number AUTHOR: William Stein """ global absolute_path_length if absolute_path_length==0: absolute_path_length = len(os.path.abspath(os.getcwd())) return (pos[0].get_filenametable_entry()[absolute_path_length+1:], pos[1]) def embed_position(pos, docstring): if not Options.embed_pos_in_docstring: return docstring pos_line = u'File: %s (starting at line %s)' % relative_position(pos) if docstring is None: # unicode string return EncodedString(pos_line) # make sure we can encode the filename in the docstring encoding # otherwise make the docstring a unicode string encoding = docstring.encoding if encoding is not None: try: pos_line.encode(encoding) except UnicodeEncodeError: encoding = None if not docstring: # reuse the string encoding of the original docstring doc = EncodedString(pos_line) else: doc = EncodedString(pos_line + u'\n' + docstring) doc.encoding = encoding return doc def _analyse_signature_annotation(annotation, env): base_type = None explicit_pytype = explicit_ctype = False if annotation.is_dict_literal: for name, value in annotation.key_value_pairs: if not name.is_string_literal: continue if name.value in ('type', b'type'): explicit_pytype = True if not explicit_ctype: annotation = value elif name.value in ('ctype', b'ctype'): explicit_ctype = True annotation = value if explicit_pytype and explicit_ctype: warning(annotation.pos, "Duplicate type declarations found in signature annotation") arg_type = annotation.analyse_as_type(env) if arg_type is not None: if explicit_pytype and not explicit_ctype and not arg_type.is_pyobject: warning(annotation.pos, "Python type declaration in signature annotation does not refer to a Python type") base_type = CAnalysedBaseTypeNode( annotation.pos, type=arg_type, is_arg=True) else: warning(annotation.pos, "Unknown type declaration found in signature annotation") return base_type, arg_type def write_func_call(func, codewriter_class): def f(*args, **kwds): if len(args) > 1 and isinstance(args[1], codewriter_class): # here we annotate the code with this function call # but only if new code is generated node, code = args[:2] marker = ' /* %s -> %s.%s %s */' % ( ' ' * code.call_level, node.__class__.__name__, func.__name__, node.pos[1:]) pristine = code.buffer.stream.tell() code.putln(marker) start = code.buffer.stream.tell() code.call_level += 4 res = func(*args, **kwds) code.call_level -= 4 if start == code.buffer.stream.tell(): # no code written => undo writing marker code.buffer.stream.truncate(pristine) else: marker = marker.replace('->', '<-', 1) code.putln(marker) return res else: return func(*args, **kwds) return f class VerboseCodeWriter(type): # Set this as a metaclass to trace function calls in code. # This slows down code generation and makes much larger files. def __new__(cls, name, bases, attrs): from types import FunctionType from .Code import CCodeWriter attrs = dict(attrs) for mname, m in attrs.items(): if isinstance(m, FunctionType): attrs[mname] = write_func_call(m, CCodeWriter) return super(VerboseCodeWriter, cls).__new__(cls, name, bases, attrs) class CheckAnalysers(type): """Metaclass to check that type analysis functions return a node. """ methods = set(['analyse_types', 'analyse_expressions', 'analyse_target_types']) def __new__(cls, name, bases, attrs): from types import FunctionType def check(name, func): def call(*args, **kwargs): retval = func(*args, **kwargs) if retval is None: print name, args, kwargs return retval return call attrs = dict(attrs) for mname, m in attrs.items(): if isinstance(m, FunctionType) and mname in cls.methods: attrs[mname] = check(mname, m) return super(CheckAnalysers, cls).__new__(cls, name, bases, attrs) class Node(object): # pos (string, int, int) Source file position # is_name boolean Is a NameNode # is_literal boolean Is a ConstNode #__metaclass__ = CheckAnalysers if DebugFlags.debug_trace_code_generation: __metaclass__ = VerboseCodeWriter is_name = 0 is_none = 0 is_nonecheck = 0 is_literal = 0 is_terminator = 0 temps = None # All descendants should set child_attrs to a list of the attributes # containing nodes considered "children" in the tree. Each such attribute # can either contain a single node or a list of nodes. See Visitor.py. child_attrs = None cf_state = None # This may be an additional (or 'actual') type that will be checked when # this node is coerced to another type. This could be useful to set when # the actual type to which it can coerce is known, but you want to leave # the type a py_object_type coercion_type = None def __init__(self, pos, **kw): self.pos = pos self.__dict__.update(kw) gil_message = "Operation" nogil_check = None def gil_error(self, env=None): error(self.pos, "%s not allowed without gil" % self.gil_message) cpp_message = "Operation" def cpp_check(self, env): if not env.is_cpp(): self.cpp_error() def cpp_error(self): error(self.pos, "%s only allowed in c++" % self.cpp_message) def clone_node(self): """Clone the node. This is defined as a shallow copy, except for member lists amongst the child attributes (from get_child_accessors) which are also copied. Lists containing child nodes are thus seen as a way for the node to hold multiple children directly; the list is not treated as a separate level in the tree.""" result = copy.copy(self) for attrname in result.child_attrs: value = getattr(result, attrname) if isinstance(value, list): setattr(result, attrname, [x for x in value]) return result # # There are 3 phases of parse tree processing, applied in order to # all the statements in a given scope-block: # # (0) analyse_declarations # Make symbol table entries for all declarations at the current # level, both explicit (def, cdef, etc.) and implicit (assignment # to an otherwise undeclared name). # # (1) analyse_expressions # Determine the result types of expressions and fill in the # 'type' attribute of each ExprNode. Insert coercion nodes into the # tree where needed to convert to and from Python objects. # Allocate temporary locals for intermediate results. Fill # in the 'result_code' attribute of each ExprNode with a C code # fragment. # # (2) generate_code # Emit C code for all declarations, statements and expressions. # Recursively applies the 3 processing phases to the bodies of # functions. # def analyse_declarations(self, env): pass def analyse_expressions(self, env): raise InternalError("analyse_expressions not implemented for %s" % \ self.__class__.__name__) def generate_code(self, code): raise InternalError("generate_code not implemented for %s" % \ self.__class__.__name__) def annotate(self, code): # mro does the wrong thing if isinstance(self, BlockNode): self.body.annotate(code) def end_pos(self): try: return self._end_pos except AttributeError: pos = self.pos if not self.child_attrs: self._end_pos = pos return pos for attr in self.child_attrs: child = getattr(self, attr) # Sometimes lists, sometimes nodes if child is None: pass elif isinstance(child, list): for c in child: pos = max(pos, c.end_pos()) else: pos = max(pos, child.end_pos()) self._end_pos = pos return pos def dump(self, level=0, filter_out=("pos",), cutoff=100, encountered=None): """Debug helper method that returns a recursive string representation of this node. """ if cutoff == 0: return "<...nesting level cutoff...>" if encountered is None: encountered = set() if id(self) in encountered: return "<%s (0x%x) -- already output>" % (self.__class__.__name__, id(self)) encountered.add(id(self)) def dump_child(x, level): if isinstance(x, Node): return x.dump(level, filter_out, cutoff-1, encountered) elif isinstance(x, list): return "[%s]" % ", ".join([dump_child(item, level) for item in x]) else: return repr(x) attrs = [(key, value) for key, value in self.__dict__.items() if key not in filter_out] if len(attrs) == 0: return "<%s (0x%x)>" % (self.__class__.__name__, id(self)) else: indent = " " * level res = "<%s (0x%x)\n" % (self.__class__.__name__, id(self)) for key, value in attrs: res += "%s %s: %s\n" % (indent, key, dump_child(value, level + 1)) res += "%s>" % indent return res def dump_pos(self, mark_column=False, marker='(#)'): """Debug helper method that returns the source code context of this node as a string. """ if not self.pos: return u'' source_desc, line, col = self.pos contents = source_desc.get_lines(encoding='ASCII', error_handling='ignore') # line numbers start at 1 lines = contents[max(0,line-3):line] current = lines[-1] if mark_column: current = current[:col] + marker + current[col:] lines[-1] = current.rstrip() + u' # <<<<<<<<<<<<<<\n' lines += contents[line:line+2] return u'"%s":%d:%d\n%s\n' % ( source_desc.get_escaped_description(), line, col, u''.join(lines)) class CompilerDirectivesNode(Node): """ Sets compiler directives for the children nodes """ # directives {string:value} A dictionary holding the right value for # *all* possible directives. # body Node child_attrs = ["body"] def analyse_declarations(self, env): old = env.directives env.directives = self.directives self.body.analyse_declarations(env) env.directives = old def analyse_expressions(self, env): old = env.directives env.directives = self.directives self.body = self.body.analyse_expressions(env) env.directives = old return self def generate_function_definitions(self, env, code): env_old = env.directives code_old = code.globalstate.directives code.globalstate.directives = self.directives self.body.generate_function_definitions(env, code) env.directives = env_old code.globalstate.directives = code_old def generate_execution_code(self, code): old = code.globalstate.directives code.globalstate.directives = self.directives self.body.generate_execution_code(code) code.globalstate.directives = old def annotate(self, code): old = code.globalstate.directives code.globalstate.directives = self.directives self.body.annotate(code) code.globalstate.directives = old class BlockNode(object): # Mixin class for nodes representing a declaration block. def generate_cached_builtins_decls(self, env, code): entries = env.global_scope().undeclared_cached_builtins for entry in entries: code.globalstate.add_cached_builtin_decl(entry) del entries[:] def generate_lambda_definitions(self, env, code): for node in env.lambda_defs: node.generate_function_definitions(env, code) class StatListNode(Node): # stats a list of StatNode child_attrs = ["stats"] @staticmethod def create_analysed(pos, env, *args, **kw): node = StatListNode(pos, *args, **kw) return node # No node-specific analysis needed def analyse_declarations(self, env): #print "StatListNode.analyse_declarations" ### for stat in self.stats: stat.analyse_declarations(env) def analyse_expressions(self, env): #print "StatListNode.analyse_expressions" ### self.stats = [ stat.analyse_expressions(env) for stat in self.stats ] return self def generate_function_definitions(self, env, code): #print "StatListNode.generate_function_definitions" ### for stat in self.stats: stat.generate_function_definitions(env, code) def generate_execution_code(self, code): #print "StatListNode.generate_execution_code" ### for stat in self.stats: code.mark_pos(stat.pos) stat.generate_execution_code(code) def annotate(self, code): for stat in self.stats: stat.annotate(code) class StatNode(Node): # # Code generation for statements is split into the following subphases: # # (1) generate_function_definitions # Emit C code for the definitions of any structs, # unions, enums and functions defined in the current # scope-block. # # (2) generate_execution_code # Emit C code for executable statements. # def generate_function_definitions(self, env, code): pass def generate_execution_code(self, code): raise InternalError("generate_execution_code not implemented for %s" % \ self.__class__.__name__) class CDefExternNode(StatNode): # include_file string or None # body StatNode child_attrs = ["body"] def analyse_declarations(self, env): if self.include_file: env.add_include_file(self.include_file) old_cinclude_flag = env.in_cinclude env.in_cinclude = 1 self.body.analyse_declarations(env) env.in_cinclude = old_cinclude_flag def analyse_expressions(self, env): return self def generate_execution_code(self, code): pass def annotate(self, code): self.body.annotate(code) class CDeclaratorNode(Node): # Part of a C declaration. # # Processing during analyse_declarations phase: # # analyse # Returns (name, type) pair where name is the # CNameDeclaratorNode of the name being declared # and type is the type it is being declared as. # # calling_convention string Calling convention of CFuncDeclaratorNode # for which this is a base child_attrs = [] calling_convention = "" def analyse_templates(self): # Only C++ functions have templates. return None class CNameDeclaratorNode(CDeclaratorNode): # name string The Cython name being declared # cname string or None C name, if specified # default ExprNode or None the value assigned on declaration child_attrs = ['default'] default = None def analyse(self, base_type, env, nonempty = 0): if nonempty and self.name == '': # May have mistaken the name for the type. if base_type.is_ptr or base_type.is_array or base_type.is_buffer: error(self.pos, "Missing argument name") elif base_type.is_void: error(self.pos, "Use spam() rather than spam(void) to declare a function with no arguments.") else: self.name = base_type.declaration_code("", for_display=1, pyrex=1) base_type = py_object_type if base_type.is_fused and env.fused_to_specific: base_type = base_type.specialize(env.fused_to_specific) self.type = base_type return self, base_type class CPtrDeclaratorNode(CDeclaratorNode): # base CDeclaratorNode child_attrs = ["base"] def analyse(self, base_type, env, nonempty = 0): if base_type.is_pyobject: error(self.pos, "Pointer base type cannot be a Python object") ptr_type = PyrexTypes.c_ptr_type(base_type) return self.base.analyse(ptr_type, env, nonempty = nonempty) class CReferenceDeclaratorNode(CDeclaratorNode): # base CDeclaratorNode child_attrs = ["base"] def analyse(self, base_type, env, nonempty = 0): if base_type.is_pyobject: error(self.pos, "Reference base type cannot be a Python object") ref_type = PyrexTypes.c_ref_type(base_type) return self.base.analyse(ref_type, env, nonempty = nonempty) class CArrayDeclaratorNode(CDeclaratorNode): # base CDeclaratorNode # dimension ExprNode child_attrs = ["base", "dimension"] def analyse(self, base_type, env, nonempty = 0): if (base_type.is_cpp_class and base_type.is_template_type()) or base_type.is_cfunction: from .ExprNodes import TupleNode if isinstance(self.dimension, TupleNode): args = self.dimension.args else: args = self.dimension, values = [v.analyse_as_type(env) for v in args] if None in values: ix = values.index(None) error(args[ix].pos, "Template parameter not a type") base_type = error_type else: base_type = base_type.specialize_here(self.pos, values) return self.base.analyse(base_type, env, nonempty = nonempty) if self.dimension: self.dimension = self.dimension.analyse_const_expression(env) if not self.dimension.type.is_int: error(self.dimension.pos, "Array dimension not integer") size = self.dimension.get_constant_c_result_code() if size is not None: try: size = int(size) except ValueError: # runtime constant? pass else: size = None if not base_type.is_complete(): error(self.pos, "Array element type '%s' is incomplete" % base_type) if base_type.is_pyobject: error(self.pos, "Array element cannot be a Python object") if base_type.is_cfunction: error(self.pos, "Array element cannot be a function") array_type = PyrexTypes.c_array_type(base_type, size) return self.base.analyse(array_type, env, nonempty = nonempty) class CFuncDeclaratorNode(CDeclaratorNode): # base CDeclaratorNode # args [CArgDeclNode] # templates [TemplatePlaceholderType] # has_varargs boolean # exception_value ConstNode # exception_check boolean True if PyErr_Occurred check needed # nogil boolean Can be called without gil # with_gil boolean Acquire gil around function body # is_const_method boolean Whether this is a const method child_attrs = ["base", "args", "exception_value"] overridable = 0 optional_arg_count = 0 is_const_method = 0 templates = None def analyse_templates(self): if isinstance(self.base, CArrayDeclaratorNode): from .ExprNodes import TupleNode, NameNode template_node = self.base.dimension if isinstance(template_node, TupleNode): template_nodes = template_node.args elif isinstance(template_node, NameNode): template_nodes = [template_node] else: error(template_node.pos, "Template arguments must be a list of names") return None self.templates = [] for template in template_nodes: if isinstance(template, NameNode): self.templates.append(PyrexTypes.TemplatePlaceholderType(template.name)) else: error(template.pos, "Template arguments must be a list of names") self.base = self.base.base return self.templates else: return None def analyse(self, return_type, env, nonempty = 0, directive_locals = {}): if nonempty: nonempty -= 1 func_type_args = [] for i, arg_node in enumerate(self.args): name_declarator, type = arg_node.analyse( env, nonempty=nonempty, is_self_arg=(i == 0 and env.is_c_class_scope and 'staticmethod' not in env.directives)) name = name_declarator.name if name in directive_locals: type_node = directive_locals[name] other_type = type_node.analyse_as_type(env) if other_type is None: error(type_node.pos, "Not a type") elif (type is not PyrexTypes.py_object_type and not type.same_as(other_type)): error(self.base.pos, "Signature does not agree with previous declaration") error(type_node.pos, "Previous declaration here") else: type = other_type if name_declarator.cname: error(self.pos, "Function argument cannot have C name specification") if i==0 and env.is_c_class_scope and type.is_unspecified: # fix the type of self type = env.parent_type # Turn *[] argument into ** if type.is_array: type = PyrexTypes.c_ptr_type(type.base_type) # Catch attempted C-style func(void) decl if type.is_void: error(arg_node.pos, "Use spam() rather than spam(void) to declare a function with no arguments.") func_type_args.append( PyrexTypes.CFuncTypeArg(name, type, arg_node.pos)) if arg_node.default: self.optional_arg_count += 1 elif self.optional_arg_count: error(self.pos, "Non-default argument follows default argument") exc_val = None exc_check = 0 if self.exception_check == '+': env.add_include_file('ios') # for std::ios_base::failure env.add_include_file('new') # for std::bad_alloc env.add_include_file('stdexcept') env.add_include_file('typeinfo') # for std::bad_cast if (return_type.is_pyobject and (self.exception_value or self.exception_check) and self.exception_check != '+'): error(self.pos, "Exception clause not allowed for function returning Python object") else: if self.exception_value: self.exception_value = self.exception_value.analyse_const_expression(env) if self.exception_check == '+': exc_val_type = self.exception_value.type if (not exc_val_type.is_error and not exc_val_type.is_pyobject and not (exc_val_type.is_cfunction and not exc_val_type.return_type.is_pyobject and not exc_val_type.args)): error(self.exception_value.pos, "Exception value must be a Python exception or cdef function with no arguments.") exc_val = self.exception_value else: self.exception_value = self.exception_value.coerce_to( return_type, env).analyse_const_expression(env) exc_val = self.exception_value.get_constant_c_result_code() if exc_val is None: raise InternalError( "get_constant_c_result_code not implemented for %s" % self.exception_value.__class__.__name__) if not return_type.assignable_from(self.exception_value.type): error(self.exception_value.pos, "Exception value incompatible with function return type") exc_check = self.exception_check if return_type.is_cfunction: error(self.pos, "Function cannot return a function") func_type = PyrexTypes.CFuncType( return_type, func_type_args, self.has_varargs, optional_arg_count = self.optional_arg_count, exception_value = exc_val, exception_check = exc_check, calling_convention = self.base.calling_convention, nogil = self.nogil, with_gil = self.with_gil, is_overridable = self.overridable, is_const_method = self.is_const_method, templates = self.templates) if self.optional_arg_count: if func_type.is_fused: # This is a bit of a hack... When we need to create specialized CFuncTypes # on the fly because the cdef is defined in a pxd, we need to declare the specialized optional arg # struct def declare_opt_arg_struct(func_type, fused_cname): self.declare_optional_arg_struct(func_type, env, fused_cname) func_type.declare_opt_arg_struct = declare_opt_arg_struct else: self.declare_optional_arg_struct(func_type, env) callspec = env.directives['callspec'] if callspec: current = func_type.calling_convention if current and current != callspec: error(self.pos, "cannot have both '%s' and '%s' " "calling conventions" % (current, callspec)) func_type.calling_convention = callspec return self.base.analyse(func_type, env) def declare_optional_arg_struct(self, func_type, env, fused_cname=None): """ Declares the optional argument struct (the struct used to hold the values for optional arguments). For fused cdef functions, this is deferred as analyse_declarations is called only once (on the fused cdef function). """ scope = StructOrUnionScope() arg_count_member = '%sn' % Naming.pyrex_prefix scope.declare_var(arg_count_member, PyrexTypes.c_int_type, self.pos) for arg in func_type.args[len(func_type.args)-self.optional_arg_count:]: scope.declare_var(arg.name, arg.type, arg.pos, allow_pyobject = 1) struct_cname = env.mangle(Naming.opt_arg_prefix, self.base.name) if fused_cname is not None: struct_cname = PyrexTypes.get_fused_cname(fused_cname, struct_cname) op_args_struct = env.global_scope().declare_struct_or_union( name = struct_cname, kind = 'struct', scope = scope, typedef_flag = 0, pos = self.pos, cname = struct_cname) op_args_struct.defined_in_pxd = 1 op_args_struct.used = 1 func_type.op_arg_struct = PyrexTypes.c_ptr_type(op_args_struct.type) class CConstDeclaratorNode(CDeclaratorNode): # base CDeclaratorNode child_attrs = ["base"] def analyse(self, base_type, env, nonempty = 0): if base_type.is_pyobject: error(self.pos, "Const base type cannot be a Python object") const = PyrexTypes.c_const_type(base_type) return self.base.analyse(const, env, nonempty = nonempty) class CArgDeclNode(Node): # Item in a function declaration argument list. # # base_type CBaseTypeNode # declarator CDeclaratorNode # not_none boolean Tagged with 'not None' # or_none boolean Tagged with 'or None' # accept_none boolean Resolved boolean for not_none/or_none # default ExprNode or None # default_value PyObjectConst constant for default value # annotation ExprNode or None Py3 function arg annotation # is_self_arg boolean Is the "self" arg of an extension type method # is_type_arg boolean Is the "class" arg of an extension type classmethod # is_kw_only boolean Is a keyword-only argument # is_dynamic boolean Non-literal arg stored inside CyFunction child_attrs = ["base_type", "declarator", "default", "annotation"] is_self_arg = 0 is_type_arg = 0 is_generic = 1 kw_only = 0 not_none = 0 or_none = 0 type = None name_declarator = None default_value = None annotation = None is_dynamic = 0 def analyse(self, env, nonempty = 0, is_self_arg = False): if is_self_arg: self.base_type.is_self_arg = self.is_self_arg = True if self.type is None: # The parser may misinterpret names as types. We fix that here. if isinstance(self.declarator, CNameDeclaratorNode) and self.declarator.name == '': if nonempty: if self.base_type.is_basic_c_type: # char, short, long called "int" type = self.base_type.analyse(env, could_be_name=True) arg_name = type.empty_declaration_code() else: arg_name = self.base_type.name self.declarator.name = EncodedString(arg_name) self.base_type.name = None self.base_type.is_basic_c_type = False could_be_name = True else: could_be_name = False self.base_type.is_arg = True base_type = self.base_type.analyse(env, could_be_name=could_be_name) if hasattr(self.base_type, 'arg_name') and self.base_type.arg_name: self.declarator.name = self.base_type.arg_name # The parser is unable to resolve the ambiguity of [] as part of the # type (e.g. in buffers) or empty declarator (as with arrays). # This is only arises for empty multi-dimensional arrays. if (base_type.is_array and isinstance(self.base_type, TemplatedTypeNode) and isinstance(self.declarator, CArrayDeclaratorNode)): declarator = self.declarator while isinstance(declarator.base, CArrayDeclaratorNode): declarator = declarator.base declarator.base = self.base_type.array_declarator base_type = base_type.base_type # inject type declaration from annotations if self.annotation and env.directives['annotation_typing'] and self.base_type.name is None: arg_type = self.inject_type_from_annotations(env) if arg_type is not None: base_type = arg_type return self.declarator.analyse(base_type, env, nonempty=nonempty) else: return self.name_declarator, self.type def inject_type_from_annotations(self, env): annotation = self.annotation if not annotation: return None base_type, arg_type = _analyse_signature_annotation(annotation, env) if base_type is not None: self.base_type = base_type return arg_type def calculate_default_value_code(self, code): if self.default_value is None: if self.default: if self.default.is_literal: # will not output any code, just assign the result_code self.default.generate_evaluation_code(code) return self.type.cast_code(self.default.result()) self.default_value = code.get_argument_default_const(self.type) return self.default_value def annotate(self, code): if self.default: self.default.annotate(code) def generate_assignment_code(self, code, target=None): default = self.default if default is None or default.is_literal: return if target is None: target = self.calculate_default_value_code(code) default.generate_evaluation_code(code) default.make_owned_reference(code) result = default.result_as(self.type) code.putln("%s = %s;" % (target, result)) if self.type.is_pyobject: code.put_giveref(default.result()) default.generate_post_assignment_code(code) default.free_temps(code) class CBaseTypeNode(Node): # Abstract base class for C base type nodes. # # Processing during analyse_declarations phase: # # analyse # Returns the type. def analyse_as_type(self, env): return self.analyse(env) class CAnalysedBaseTypeNode(Node): # type type child_attrs = [] def analyse(self, env, could_be_name = False): return self.type class CSimpleBaseTypeNode(CBaseTypeNode): # name string # module_path [string] Qualifying name components # is_basic_c_type boolean # signed boolean # longness integer # complex boolean # is_self_arg boolean Is self argument of C method # ##is_type_arg boolean Is type argument of class method child_attrs = [] arg_name = None # in case the argument name was interpreted as a type module_path = [] is_basic_c_type = False complex = False def analyse(self, env, could_be_name = False): # Return type descriptor. #print "CSimpleBaseTypeNode.analyse: is_self_arg =", self.is_self_arg ### type = None if self.is_basic_c_type: type = PyrexTypes.simple_c_type(self.signed, self.longness, self.name) if not type: error(self.pos, "Unrecognised type modifier combination") elif self.name == "object" and not self.module_path: type = py_object_type elif self.name is None: if self.is_self_arg and env.is_c_class_scope: #print "CSimpleBaseTypeNode.analyse: defaulting to parent type" ### type = env.parent_type ## elif self.is_type_arg and env.is_c_class_scope: ## type = Builtin.type_type else: type = py_object_type else: if self.module_path: # Maybe it's a nested C++ class. scope = env for item in self.module_path: entry = scope.lookup(item) if entry is not None and entry.is_cpp_class: scope = entry.type.scope else: scope = None break if scope is None: # Maybe it's a cimport. scope = env.find_imported_module(self.module_path, self.pos) if scope: scope.fused_to_specific = env.fused_to_specific else: scope = env if scope: if scope.is_c_class_scope: scope = scope.global_scope() type = scope.lookup_type(self.name) if type is not None: pass elif could_be_name: if self.is_self_arg and env.is_c_class_scope: type = env.parent_type ## elif self.is_type_arg and env.is_c_class_scope: ## type = Builtin.type_type else: type = py_object_type self.arg_name = EncodedString(self.name) else: if self.templates: if not self.name in self.templates: error(self.pos, "'%s' is not a type identifier" % self.name) type = PyrexTypes.TemplatePlaceholderType(self.name) else: error(self.pos, "'%s' is not a type identifier" % self.name) if self.complex: if not type.is_numeric or type.is_complex: error(self.pos, "can only complexify c numeric types") type = PyrexTypes.CComplexType(type) type.create_declaration_utility_code(env) elif type is Builtin.complex_type: # Special case: optimise builtin complex type into C's # double complex. The parser cannot do this (as for the # normal scalar types) as the user may have redeclared the # 'complex' type. Testing for the exact type here works. type = PyrexTypes.c_double_complex_type type.create_declaration_utility_code(env) self.complex = True if type: return type else: return PyrexTypes.error_type class MemoryViewSliceTypeNode(CBaseTypeNode): name = 'memoryview' child_attrs = ['base_type_node', 'axes'] def analyse(self, env, could_be_name = False): base_type = self.base_type_node.analyse(env) if base_type.is_error: return base_type from . import MemoryView try: axes_specs = MemoryView.get_axes_specs(env, self.axes) except CompileError, e: error(e.position, e.message_only) self.type = PyrexTypes.ErrorType() return self.type if not MemoryView.validate_axes(self.pos, axes_specs): self.type = error_type else: MemoryView.validate_memslice_dtype(self.pos, base_type) self.type = PyrexTypes.MemoryViewSliceType(base_type, axes_specs) self.use_memview_utilities(env) return self.type def use_memview_utilities(self, env): from . import MemoryView env.use_utility_code(MemoryView.view_utility_code) class CNestedBaseTypeNode(CBaseTypeNode): # For C++ classes that live inside other C++ classes. # name string # base_type CBaseTypeNode child_attrs = ['base_type'] def analyse(self, env, could_be_name = None): base_type = self.base_type.analyse(env) if base_type is PyrexTypes.error_type: return PyrexTypes.error_type if not base_type.is_cpp_class: error(self.pos, "'%s' is not a valid type scope" % base_type) return PyrexTypes.error_type type_entry = base_type.scope.lookup_here(self.name) if not type_entry or not type_entry.is_type: error(self.pos, "'%s.%s' is not a type identifier" % (base_type, self.name)) return PyrexTypes.error_type return type_entry.type class TemplatedTypeNode(CBaseTypeNode): # After parsing: # positional_args [ExprNode] List of positional arguments # keyword_args DictNode Keyword arguments # base_type_node CBaseTypeNode # After analysis: # type PyrexTypes.BufferType or PyrexTypes.CppClassType ...containing the right options child_attrs = ["base_type_node", "positional_args", "keyword_args", "dtype_node"] dtype_node = None name = None def analyse(self, env, could_be_name = False, base_type = None): if base_type is None: base_type = self.base_type_node.analyse(env) if base_type.is_error: return base_type if base_type.is_cpp_class and base_type.is_template_type(): # Templated class if self.keyword_args and self.keyword_args.key_value_pairs: error(self.pos, "c++ templates cannot take keyword arguments") self.type = PyrexTypes.error_type else: template_types = [] for template_node in self.positional_args: type = template_node.analyse_as_type(env) if type is None: error(template_node.pos, "unknown type in template argument") return error_type template_types.append(type) self.type = base_type.specialize_here(self.pos, template_types) elif base_type.is_pyobject: # Buffer from . import Buffer options = Buffer.analyse_buffer_options( self.pos, env, self.positional_args, self.keyword_args, base_type.buffer_defaults) if sys.version_info[0] < 3: # Py 2.x enforces byte strings as keyword arguments ... options = dict([ (name.encode('ASCII'), value) for name, value in options.items() ]) self.type = PyrexTypes.BufferType(base_type, **options) else: # Array empty_declarator = CNameDeclaratorNode(self.pos, name="", cname=None) if len(self.positional_args) > 1 or self.keyword_args.key_value_pairs: error(self.pos, "invalid array declaration") self.type = PyrexTypes.error_type else: # It would be nice to merge this class with CArrayDeclaratorNode, # but arrays are part of the declaration, not the type... if not self.positional_args: dimension = None else: dimension = self.positional_args[0] self.array_declarator = CArrayDeclaratorNode(self.pos, base = empty_declarator, dimension = dimension) self.type = self.array_declarator.analyse(base_type, env)[1] if self.type.is_fused and env.fused_to_specific: self.type = self.type.specialize(env.fused_to_specific) return self.type class CComplexBaseTypeNode(CBaseTypeNode): # base_type CBaseTypeNode # declarator CDeclaratorNode child_attrs = ["base_type", "declarator"] def analyse(self, env, could_be_name = False): base = self.base_type.analyse(env, could_be_name) _, type = self.declarator.analyse(base, env) return type class CTupleBaseTypeNode(CBaseTypeNode): # components [CBaseTypeNode] child_attrs = ["components"] def analyse(self, env, could_be_name=False): component_types = [] for c in self.components: type = c.analyse(env) if type.is_pyobject: error(c.pos, "Tuple types can't (yet) contain Python objects.") return error_type component_types.append(type) entry = env.declare_tuple_type(self.pos, component_types) entry.used = True return entry.type class FusedTypeNode(CBaseTypeNode): """ Represents a fused type in a ctypedef statement: ctypedef cython.fused_type(int, long, long long) integral name str name of this fused type types [CSimpleBaseTypeNode] is the list of types to be fused """ child_attrs = [] def analyse_declarations(self, env): type = self.analyse(env) entry = env.declare_typedef(self.name, type, self.pos) # Omit the typedef declaration that self.declarator would produce entry.in_cinclude = True def analyse(self, env, could_be_name = False): types = [] for type_node in self.types: type = type_node.analyse_as_type(env) if not type: error(type_node.pos, "Not a type") continue if type in types: error(type_node.pos, "Type specified multiple times") else: types.append(type) # if len(self.types) == 1: # return types[0] return PyrexTypes.FusedType(types, name=self.name) class CConstTypeNode(CBaseTypeNode): # base_type CBaseTypeNode child_attrs = ["base_type"] def analyse(self, env, could_be_name = False): base = self.base_type.analyse(env, could_be_name) if base.is_pyobject: error(self.pos, "Const base type cannot be a Python object") return PyrexTypes.c_const_type(base) class CVarDefNode(StatNode): # C variable definition or forward/extern function declaration. # # visibility 'private' or 'public' or 'extern' # base_type CBaseTypeNode # declarators [CDeclaratorNode] # in_pxd boolean # api boolean # overridable boolean whether it is a cpdef # modifiers ['inline'] # decorators [cython.locals(...)] or None # directive_locals { string : NameNode } locals defined by cython.locals(...) child_attrs = ["base_type", "declarators"] decorators = None directive_locals = None def analyse_declarations(self, env, dest_scope = None): if self.directive_locals is None: self.directive_locals = {} if not dest_scope: dest_scope = env self.dest_scope = dest_scope if self.declarators: templates = self.declarators[0].analyse_templates() else: templates = None if templates is not None: if self.visibility != 'extern': error(self.pos, "Only extern functions allowed") if len(self.declarators) > 1: error(self.declarators[1].pos, "Can't multiply declare template types") env = TemplateScope('func_template', env) env.directives = env.outer_scope.directives for template_param in templates: env.declare_type(template_param.name, template_param, self.pos) base_type = self.base_type.analyse(env) if base_type.is_fused and not self.in_pxd and (env.is_c_class_scope or env.is_module_scope): error(self.pos, "Fused types not allowed here") return error_type self.entry = None visibility = self.visibility for declarator in self.declarators: if (len(self.declarators) > 1 and not isinstance(declarator, CNameDeclaratorNode) and env.directives['warn.multiple_declarators']): warning(declarator.pos, "Non-trivial type declarators in shared declaration (e.g. mix of pointers and values). " + "Each pointer declaration should be on its own line.", 1) create_extern_wrapper = (self.overridable and self.visibility == 'extern' and env.is_module_scope) if create_extern_wrapper: declarator.overridable = False if isinstance(declarator, CFuncDeclaratorNode): name_declarator, type = declarator.analyse(base_type, env, directive_locals=self.directive_locals) else: name_declarator, type = declarator.analyse(base_type, env) if not type.is_complete(): if not (self.visibility == 'extern' and type.is_array or type.is_memoryviewslice): error(declarator.pos, "Variable type '%s' is incomplete" % type) if self.visibility == 'extern' and type.is_pyobject: error(declarator.pos, "Python object cannot be declared extern") name = name_declarator.name cname = name_declarator.cname if name == '': error(declarator.pos, "Missing name in declaration.") return if type.is_cfunction: self.entry = dest_scope.declare_cfunction(name, type, declarator.pos, cname=cname, visibility=self.visibility, in_pxd=self.in_pxd, api=self.api, modifiers=self.modifiers, overridable=self.overridable) if self.entry is not None: self.entry.directive_locals = copy.copy(self.directive_locals) if 'staticmethod' in env.directives: type.is_static_method = True if create_extern_wrapper: self.entry.type.create_to_py_utility_code(env) self.entry.create_wrapper = True else: if self.directive_locals: error(self.pos, "Decorators can only be followed by functions") self.entry = dest_scope.declare_var(name, type, declarator.pos, cname=cname, visibility=visibility, in_pxd=self.in_pxd, api=self.api, is_cdef=1) if Options.docstrings: self.entry.doc = embed_position(self.pos, self.doc) class CStructOrUnionDefNode(StatNode): # name string # cname string or None # kind "struct" or "union" # typedef_flag boolean # visibility "public" or "private" # api boolean # in_pxd boolean # attributes [CVarDefNode] or None # entry Entry # packed boolean child_attrs = ["attributes"] def declare(self, env, scope=None): self.entry = env.declare_struct_or_union( self.name, self.kind, scope, self.typedef_flag, self.pos, self.cname, visibility = self.visibility, api = self.api, packed = self.packed) def analyse_declarations(self, env): scope = None if self.attributes is not None: scope = StructOrUnionScope(self.name) self.declare(env, scope) if self.attributes is not None: if self.in_pxd and not env.in_cinclude: self.entry.defined_in_pxd = 1 for attr in self.attributes: attr.analyse_declarations(env, scope) if self.visibility != 'extern': for attr in scope.var_entries: type = attr.type while type.is_array: type = type.base_type if type == self.entry.type: error(attr.pos, "Struct cannot contain itself as a member.") def analyse_expressions(self, env): return self def generate_execution_code(self, code): pass class CppClassNode(CStructOrUnionDefNode, BlockNode): # name string # cname string or None # visibility "extern" # in_pxd boolean # attributes [CVarDefNode] or None # entry Entry # base_classes [CBaseTypeNode] # templates [string] or None # decorators [DecoratorNode] or None decorators = None def declare(self, env): if self.templates is None: template_types = None else: template_types = [PyrexTypes.TemplatePlaceholderType(template_name) for template_name in self.templates] self.entry = env.declare_cpp_class( self.name, None, self.pos, self.cname, base_classes = [], visibility = self.visibility, templates = template_types) def analyse_declarations(self, env): scope = None if self.attributes is not None: scope = CppClassScope(self.name, env, templates = self.templates) def base_ok(base_class): if base_class.is_cpp_class or base_class.is_struct: return True else: error(self.pos, "Base class '%s' not a struct or class." % base_class) base_class_types = filter(base_ok, [b.analyse(scope or env) for b in self.base_classes]) if self.templates is None: template_types = None else: template_types = [PyrexTypes.TemplatePlaceholderType(template_name) for template_name in self.templates] self.entry = env.declare_cpp_class( self.name, scope, self.pos, self.cname, base_class_types, visibility = self.visibility, templates = template_types) if self.entry is None: return self.entry.is_cpp_class = 1 if scope is not None: scope.type = self.entry.type defined_funcs = [] def func_attributes(attributes): for attr in attributes: if isinstance(attr, CFuncDefNode): yield attr elif isinstance(attr, CompilerDirectivesNode): for sub_attr in func_attributes(attr.body.stats): yield sub_attr if self.attributes is not None: if self.in_pxd and not env.in_cinclude: self.entry.defined_in_pxd = 1 for attr in self.attributes: attr.analyse_declarations(scope) for func in func_attributes(self.attributes): defined_funcs.append(func) if self.templates is not None: func.template_declaration = "template <typename %s>" % ", typename ".join(self.templates) self.body = StatListNode(self.pos, stats=defined_funcs) self.scope = scope def analyse_expressions(self, env): self.body = self.body.analyse_expressions(self.entry.type.scope) return self def generate_function_definitions(self, env, code): self.body.generate_function_definitions(self.entry.type.scope, code) def generate_execution_code(self, code): self.body.generate_execution_code(code) def annotate(self, code): self.body.annotate(code) class CEnumDefNode(StatNode): # name string or None # cname string or None # items [CEnumDefItemNode] # typedef_flag boolean # visibility "public" or "private" or "extern" # api boolean # in_pxd boolean # create_wrapper boolean # entry Entry child_attrs = ["items"] def declare(self, env): self.entry = env.declare_enum(self.name, self.pos, cname = self.cname, typedef_flag = self.typedef_flag, visibility = self.visibility, api = self.api, create_wrapper = self.create_wrapper) def analyse_declarations(self, env): if self.items is not None: if self.in_pxd and not env.in_cinclude: self.entry.defined_in_pxd = 1 for item in self.items: item.analyse_declarations(env, self.entry) def analyse_expressions(self, env): return self def generate_execution_code(self, code): if self.visibility == 'public' or self.api: code.mark_pos(self.pos) temp = code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=True) for item in self.entry.enum_values: code.putln("%s = PyInt_FromLong(%s); %s" % ( temp, item.cname, code.error_goto_if_null(temp, item.pos))) code.put_gotref(temp) code.putln('if (PyDict_SetItemString(%s, "%s", %s) < 0) %s' % ( Naming.moddict_cname, item.name, temp, code.error_goto(item.pos))) code.put_decref_clear(temp, PyrexTypes.py_object_type) code.funcstate.release_temp(temp) class CEnumDefItemNode(StatNode): # name string # cname string or None # value ExprNode or None child_attrs = ["value"] def analyse_declarations(self, env, enum_entry): if self.value: self.value = self.value.analyse_const_expression(env) if not self.value.type.is_int: self.value = self.value.coerce_to(PyrexTypes.c_int_type, env) self.value = self.value.analyse_const_expression(env) entry = env.declare_const(self.name, enum_entry.type, self.value, self.pos, cname = self.cname, visibility = enum_entry.visibility, api = enum_entry.api, create_wrapper = enum_entry.create_wrapper) enum_entry.enum_values.append(entry) if enum_entry.name: enum_entry.type.values.append(entry.cname) class CTypeDefNode(StatNode): # base_type CBaseTypeNode # declarator CDeclaratorNode # visibility "public" or "private" # api boolean # in_pxd boolean child_attrs = ["base_type", "declarator"] def analyse_declarations(self, env): base = self.base_type.analyse(env) name_declarator, type = self.declarator.analyse(base, env) name = name_declarator.name cname = name_declarator.cname entry = env.declare_typedef(name, type, self.pos, cname = cname, visibility = self.visibility, api = self.api) if type.is_fused: entry.in_cinclude = True if self.in_pxd and not env.in_cinclude: entry.defined_in_pxd = 1 def analyse_expressions(self, env): return self def generate_execution_code(self, code): pass class FuncDefNode(StatNode, BlockNode): # Base class for function definition nodes. # # return_type PyrexType # #filename string C name of filename string const # entry Symtab.Entry # needs_closure boolean Whether or not this function has inner functions/classes/yield # needs_outer_scope boolean Whether or not this function requires outer scope # pymethdef_required boolean Force Python method struct generation # directive_locals { string : ExprNode } locals defined by cython.locals(...) # directive_returns [ExprNode] type defined by cython.returns(...) # star_arg PyArgDeclNode or None * argument # starstar_arg PyArgDeclNode or None ** argument # # is_async_def boolean is a Coroutine function # # has_fused_arguments boolean # Whether this cdef function has fused parameters. This is needed # by AnalyseDeclarationsTransform, so it can replace CFuncDefNodes # with fused argument types with a FusedCFuncDefNode py_func = None needs_closure = False needs_outer_scope = False pymethdef_required = False is_generator = False is_generator_body = False is_async_def = False modifiers = [] has_fused_arguments = False star_arg = None starstar_arg = None is_cyfunction = False code_object = None def analyse_default_values(self, env): default_seen = 0 for arg in self.args: if arg.default: default_seen = 1 if arg.is_generic: arg.default = arg.default.analyse_types(env) arg.default = arg.default.coerce_to(arg.type, env) else: error(arg.pos, "This argument cannot have a default value") arg.default = None elif arg.kw_only: default_seen = 1 elif default_seen: error(arg.pos, "Non-default argument following default argument") def analyse_annotations(self, env): for arg in self.args: if arg.annotation: arg.annotation = arg.annotation.analyse_types(env) def align_argument_type(self, env, arg): # @cython.locals() directive_locals = self.directive_locals orig_type = arg.type if arg.name in directive_locals: type_node = directive_locals[arg.name] other_type = type_node.analyse_as_type(env) elif isinstance(arg, CArgDeclNode) and arg.annotation and env.directives['annotation_typing']: type_node = arg.annotation other_type = arg.inject_type_from_annotations(env) if other_type is None: return arg else: return arg if other_type is None: error(type_node.pos, "Not a type") elif (orig_type is not PyrexTypes.py_object_type and not orig_type.same_as(other_type)): error(arg.base_type.pos, "Signature does not agree with previous declaration") error(type_node.pos, "Previous declaration here") else: arg.type = other_type return arg def need_gil_acquisition(self, lenv): return 0 def create_local_scope(self, env): genv = env while genv.is_py_class_scope or genv.is_c_class_scope: genv = genv.outer_scope if self.needs_closure: lenv = ClosureScope(name=self.entry.name, outer_scope = genv, parent_scope = env, scope_name=self.entry.cname) else: lenv = LocalScope(name=self.entry.name, outer_scope=genv, parent_scope=env) lenv.return_type = self.return_type type = self.entry.type if type.is_cfunction: lenv.nogil = type.nogil and not type.with_gil self.local_scope = lenv lenv.directives = env.directives return lenv def generate_function_body(self, env, code): self.body.generate_execution_code(code) def generate_function_definitions(self, env, code): from . import Buffer if self.return_type.is_memoryviewslice: from . import MemoryView lenv = self.local_scope if lenv.is_closure_scope and not lenv.is_passthrough: outer_scope_cname = "%s->%s" % (Naming.cur_scope_cname, Naming.outer_scope_cname) else: outer_scope_cname = Naming.outer_scope_cname lenv.mangle_closure_cnames(outer_scope_cname) # Generate closure function definitions self.body.generate_function_definitions(lenv, code) # generate lambda function definitions self.generate_lambda_definitions(lenv, code) is_getbuffer_slot = (self.entry.name == "__getbuffer__" and self.entry.scope.is_c_class_scope) is_releasebuffer_slot = (self.entry.name == "__releasebuffer__" and self.entry.scope.is_c_class_scope) is_buffer_slot = is_getbuffer_slot or is_releasebuffer_slot if is_buffer_slot: if 'cython_unused' not in self.modifiers: self.modifiers = self.modifiers + ['cython_unused'] preprocessor_guard = self.get_preprocessor_guard() profile = code.globalstate.directives['profile'] linetrace = code.globalstate.directives['linetrace'] if profile or linetrace: code.globalstate.use_utility_code( UtilityCode.load_cached("Profile", "Profile.c")) # Generate C code for header and body of function code.enter_cfunc_scope() code.return_from_error_cleanup_label = code.new_label() code.funcstate.gil_owned = not lenv.nogil # ----- Top-level constants used by this function code.mark_pos(self.pos) self.generate_cached_builtins_decls(lenv, code) # ----- Function header code.putln("") if preprocessor_guard: code.putln(preprocessor_guard) with_pymethdef = (self.needs_assignment_synthesis(env, code) or self.pymethdef_required) if self.py_func: self.py_func.generate_function_header(code, with_pymethdef = with_pymethdef, proto_only=True) self.generate_function_header(code, with_pymethdef = with_pymethdef) # ----- Local variable declarations # Find function scope cenv = env while cenv.is_py_class_scope or cenv.is_c_class_scope: cenv = cenv.outer_scope if self.needs_closure: code.put(lenv.scope_class.type.declaration_code(Naming.cur_scope_cname)) code.putln(";") elif self.needs_outer_scope: if lenv.is_passthrough: code.put(lenv.scope_class.type.declaration_code(Naming.cur_scope_cname)) code.putln(";") code.put(cenv.scope_class.type.declaration_code(Naming.outer_scope_cname)) code.putln(";") self.generate_argument_declarations(lenv, code) for entry in lenv.var_entries: if not (entry.in_closure or entry.is_arg): code.put_var_declaration(entry) # Initialize the return variable __pyx_r init = "" if not self.return_type.is_void: if self.return_type.is_pyobject: init = " = NULL" elif self.return_type.is_memoryviewslice: init = ' = ' + MemoryView.memslice_entry_init code.putln( "%s%s;" % (self.return_type.declaration_code(Naming.retval_cname), init)) tempvardecl_code = code.insertion_point() self.generate_keyword_list(code) # ----- Extern library function declarations lenv.generate_library_function_declarations(code) # ----- GIL acquisition acquire_gil = self.acquire_gil # See if we need to acquire the GIL for variable declarations, or for # refnanny only # Closures are not currently possible for cdef nogil functions, # but check them anyway have_object_args = self.needs_closure or self.needs_outer_scope for arg in lenv.arg_entries: if arg.type.is_pyobject: have_object_args = True break acquire_gil_for_var_decls_only = ( lenv.nogil and lenv.has_with_gil_block and (have_object_args or lenv.buffer_entries)) acquire_gil_for_refnanny_only = ( lenv.nogil and lenv.has_with_gil_block and not acquire_gil_for_var_decls_only) use_refnanny = not lenv.nogil or lenv.has_with_gil_block if acquire_gil or acquire_gil_for_var_decls_only: code.put_ensure_gil() code.funcstate.gil_owned = True elif lenv.nogil and lenv.has_with_gil_block: code.declare_gilstate() if profile or linetrace: tempvardecl_code.put_trace_declarations() code_object = self.code_object.calculate_result_code(code) if self.code_object else None code.put_trace_frame_init(code_object) # ----- set up refnanny if use_refnanny: tempvardecl_code.put_declare_refcount_context() code.put_setup_refcount_context( self.entry.name, acquire_gil=acquire_gil_for_refnanny_only) # ----- Automatic lead-ins for certain special functions if is_getbuffer_slot: self.getbuffer_init(code) # ----- Create closure scope object if self.needs_closure: tp_slot = TypeSlots.ConstructorSlot("tp_new", '__new__') slot_func_cname = TypeSlots.get_slot_function(lenv.scope_class.type.scope, tp_slot) if not slot_func_cname: slot_func_cname = '%s->tp_new' % lenv.scope_class.type.typeptr_cname code.putln("%s = (%s)%s(%s, %s, NULL);" % ( Naming.cur_scope_cname, lenv.scope_class.type.empty_declaration_code(), slot_func_cname, lenv.scope_class.type.typeptr_cname, Naming.empty_tuple)) code.putln("if (unlikely(!%s)) {" % Naming.cur_scope_cname) if is_getbuffer_slot: self.getbuffer_error_cleanup(code) if use_refnanny: code.put_finish_refcount_context() if acquire_gil or acquire_gil_for_var_decls_only: code.put_release_ensured_gil() # FIXME: what if the error return value is a Python value? err_val = self.error_value() if err_val is None: if not self.caller_will_check_exceptions(): warning(self.entry.pos, "Unraisable exception in function '%s'." % self.entry.qualified_name, 0) code.put_unraisable(self.entry.qualified_name, lenv.nogil) #if self.return_type.is_void: code.putln("return;") else: code.putln("return %s;" % err_val) code.putln("}") code.put_gotref(Naming.cur_scope_cname) # Note that it is unsafe to decref the scope at this point. if self.needs_outer_scope: if self.is_cyfunction: code.putln("%s = (%s) __Pyx_CyFunction_GetClosure(%s);" % ( outer_scope_cname, cenv.scope_class.type.empty_declaration_code(), Naming.self_cname)) else: code.putln("%s = (%s) %s;" % ( outer_scope_cname, cenv.scope_class.type.empty_declaration_code(), Naming.self_cname)) if lenv.is_passthrough: code.putln("%s = %s;" % (Naming.cur_scope_cname, outer_scope_cname)) elif self.needs_closure: # inner closures own a reference to their outer parent code.put_incref(outer_scope_cname, cenv.scope_class.type) code.put_giveref(outer_scope_cname) # ----- Trace function call if profile or linetrace: # this looks a bit late, but if we don't get here due to a # fatal error before hand, it's not really worth tracing code.put_trace_call(self.entry.name, self.pos, nogil=not code.funcstate.gil_owned) code.funcstate.can_trace = True # ----- Fetch arguments self.generate_argument_parsing_code(env, code) # If an argument is assigned to in the body, we must # incref it to properly keep track of refcounts. is_cdef = isinstance(self, CFuncDefNode) for entry in lenv.arg_entries: if entry.type.is_pyobject: if (acquire_gil or len(entry.cf_assignments) > 1) and not entry.in_closure: code.put_var_incref(entry) # Note: defaults are always incref-ed. For def functions, we # we aquire arguments from object converstion, so we have # new references. If we are a cdef function, we need to # incref our arguments elif is_cdef and entry.type.is_memoryviewslice and len(entry.cf_assignments) > 1: code.put_incref_memoryviewslice(entry.cname, have_gil=code.funcstate.gil_owned) for entry in lenv.var_entries: if entry.is_arg and len(entry.cf_assignments) > 1: if entry.xdecref_cleanup: code.put_var_xincref(entry) else: code.put_var_incref(entry) # ----- Initialise local buffer auxiliary variables for entry in lenv.var_entries + lenv.arg_entries: if entry.type.is_buffer and entry.buffer_aux.buflocal_nd_var.used: Buffer.put_init_vars(entry, code) # ----- Check and convert arguments self.generate_argument_type_tests(code) # ----- Acquire buffer arguments for entry in lenv.arg_entries: if entry.type.is_buffer: Buffer.put_acquire_arg_buffer(entry, code, self.pos) if acquire_gil_for_var_decls_only: code.put_release_ensured_gil() code.funcstate.gil_owned = False # ------------------------- # ----- Function body ----- # ------------------------- self.generate_function_body(env, code) code.mark_pos(self.pos, trace=False) code.putln("") code.putln("/* function exit code */") # ----- Default return value if not self.body.is_terminator: if self.return_type.is_pyobject: #if self.return_type.is_extension_type: # lhs = "(PyObject *)%s" % Naming.retval_cname #else: lhs = Naming.retval_cname code.put_init_to_py_none(lhs, self.return_type) else: val = self.return_type.default_value if val: code.putln("%s = %s;" % (Naming.retval_cname, val)) # ----- Error cleanup if code.error_label in code.labels_used: if not self.body.is_terminator: code.put_goto(code.return_label) code.put_label(code.error_label) for cname, type in code.funcstate.all_managed_temps(): code.put_xdecref(cname, type, have_gil=not lenv.nogil) # Clean up buffers -- this calls a Python function # so need to save and restore error state buffers_present = len(lenv.buffer_entries) > 0 memslice_entries = [e for e in lenv.entries.itervalues() if e.type.is_memoryviewslice] if buffers_present: code.globalstate.use_utility_code(restore_exception_utility_code) code.putln("{ PyObject *__pyx_type, *__pyx_value, *__pyx_tb;") code.putln("__Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb);") for entry in lenv.buffer_entries: Buffer.put_release_buffer_code(code, entry) #code.putln("%s = 0;" % entry.cname) code.putln("__Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);}") if self.return_type.is_memoryviewslice: MemoryView.put_init_entry(Naming.retval_cname, code) err_val = Naming.retval_cname else: err_val = self.error_value() exc_check = self.caller_will_check_exceptions() if err_val is not None or exc_check: # TODO: Fix exception tracing (though currently unused by cProfile). # code.globalstate.use_utility_code(get_exception_tuple_utility_code) # code.put_trace_exception() if lenv.nogil and not lenv.has_with_gil_block: code.putln("{") code.put_ensure_gil() code.put_add_traceback(self.entry.qualified_name) if lenv.nogil and not lenv.has_with_gil_block: code.put_release_ensured_gil() code.putln("}") else: warning(self.entry.pos, "Unraisable exception in function '%s'." % self.entry.qualified_name, 0) code.put_unraisable(self.entry.qualified_name, lenv.nogil) default_retval = self.return_type.default_value if err_val is None and default_retval: err_val = default_retval if err_val is not None: code.putln("%s = %s;" % (Naming.retval_cname, err_val)) if is_getbuffer_slot: self.getbuffer_error_cleanup(code) # If we are using the non-error cleanup section we should # jump past it if we have an error. The if-test below determine # whether this section is used. if buffers_present or is_getbuffer_slot or self.return_type.is_memoryviewslice: code.put_goto(code.return_from_error_cleanup_label) # ----- Non-error return cleanup code.put_label(code.return_label) for entry in lenv.buffer_entries: if entry.used: Buffer.put_release_buffer_code(code, entry) if is_getbuffer_slot: self.getbuffer_normal_cleanup(code) if self.return_type.is_memoryviewslice: # See if our return value is uninitialized on non-error return # from . import MemoryView # MemoryView.err_if_nogil_initialized_check(self.pos, env) cond = code.unlikely(self.return_type.error_condition( Naming.retval_cname)) code.putln( 'if (%s) {' % cond) if env.nogil: code.put_ensure_gil() code.putln( 'PyErr_SetString(' 'PyExc_TypeError,' '"Memoryview return value is not initialized");') if env.nogil: code.put_release_ensured_gil() code.putln( '}') # ----- Return cleanup for both error and no-error return code.put_label(code.return_from_error_cleanup_label) for entry in lenv.var_entries: if not entry.used or entry.in_closure: continue if entry.type.is_memoryviewslice: code.put_xdecref_memoryviewslice(entry.cname, have_gil=not lenv.nogil) elif entry.type.is_pyobject: if not entry.is_arg or len(entry.cf_assignments) > 1: if entry.xdecref_cleanup: code.put_var_xdecref(entry) else: code.put_var_decref(entry) # Decref any increfed args for entry in lenv.arg_entries: if entry.type.is_pyobject: if ((acquire_gil or len(entry.cf_assignments) > 1) and not entry.in_closure): code.put_var_decref(entry) elif (entry.type.is_memoryviewslice and (not is_cdef or len(entry.cf_assignments) > 1)): # decref slices of def functions and acquired slices from cdef # functions, but not borrowed slices from cdef functions. code.put_xdecref_memoryviewslice(entry.cname, have_gil=not lenv.nogil) if self.needs_closure: code.put_decref(Naming.cur_scope_cname, lenv.scope_class.type) # ----- Return # This code is duplicated in ModuleNode.generate_module_init_func if not lenv.nogil: default_retval = self.return_type.default_value err_val = self.error_value() if err_val is None and default_retval: err_val = default_retval # FIXME: why is err_val not used? if self.return_type.is_pyobject: code.put_xgiveref(self.return_type.as_pyobject(Naming.retval_cname)) if self.entry.is_special and self.entry.name == "__hash__": # Returning -1 for __hash__ is supposed to signal an error # We do as Python instances and coerce -1 into -2. code.putln("if (unlikely(%s == -1) && !PyErr_Occurred()) %s = -2;" % ( Naming.retval_cname, Naming.retval_cname)) if profile or linetrace: code.funcstate.can_trace = False if self.return_type.is_pyobject: code.put_trace_return(Naming.retval_cname, nogil=not code.funcstate.gil_owned) else: code.put_trace_return("Py_None", nogil=not code.funcstate.gil_owned) if not lenv.nogil: # GIL holding function code.put_finish_refcount_context() if acquire_gil or (lenv.nogil and lenv.has_with_gil_block): # release the GIL (note that with-gil blocks acquire it on exit in their EnsureGILNode) code.put_release_ensured_gil() code.funcstate.gil_owned = False if not self.return_type.is_void: code.putln("return %s;" % Naming.retval_cname) code.putln("}") if preprocessor_guard: code.putln("#endif /*!(%s)*/" % preprocessor_guard) # ----- Go back and insert temp variable declarations tempvardecl_code.put_temp_declarations(code.funcstate) # ----- Python version code.exit_cfunc_scope() if self.py_func: self.py_func.generate_function_definitions(env, code) self.generate_wrapper_functions(code) def declare_argument(self, env, arg): if arg.type.is_void: error(arg.pos, "Invalid use of 'void'") elif not arg.type.is_complete() and not (arg.type.is_array or arg.type.is_memoryviewslice): error(arg.pos, "Argument type '%s' is incomplete" % arg.type) return env.declare_arg(arg.name, arg.type, arg.pos) def generate_arg_type_test(self, arg, code): # Generate type test for one argument. if arg.type.typeobj_is_available(): code.globalstate.use_utility_code( UtilityCode.load_cached("ArgTypeTest", "FunctionArguments.c")) typeptr_cname = arg.type.typeptr_cname arg_code = "((PyObject *)%s)" % arg.entry.cname code.putln( 'if (unlikely(!__Pyx_ArgTypeTest(%s, %s, %d, "%s", %s))) %s' % ( arg_code, typeptr_cname, arg.accept_none, arg.name, arg.type.is_builtin_type, code.error_goto(arg.pos))) else: error(arg.pos, "Cannot test type of extern C class " "without type object name specification") def generate_arg_none_check(self, arg, code): # Generate None check for one argument. if arg.type.is_memoryviewslice: cname = "%s.memview" % arg.entry.cname else: cname = arg.entry.cname code.putln('if (unlikely(((PyObject *)%s) == Py_None)) {' % cname) code.putln('''PyErr_Format(PyExc_TypeError, "Argument '%%.%ds' must not be None", "%s"); %s''' % ( max(200, len(arg.name)), arg.name, code.error_goto(arg.pos))) code.putln('}') def generate_wrapper_functions(self, code): pass def generate_execution_code(self, code): code.mark_pos(self.pos) # Evaluate and store argument default values for arg in self.args: if not arg.is_dynamic: arg.generate_assignment_code(code) # # Special code for the __getbuffer__ function # def getbuffer_init(self, code): info = self.local_scope.arg_entries[1].cname # Python 3.0 betas have a bug in memoryview which makes it call # getbuffer with a NULL parameter. For now we work around this; # the following block should be removed when this bug is fixed. code.putln("if (%s != NULL) {" % info) code.putln("%s->obj = Py_None; __Pyx_INCREF(Py_None);" % info) code.put_giveref("%s->obj" % info) # Do not refnanny object within structs code.putln("}") def getbuffer_error_cleanup(self, code): info = self.local_scope.arg_entries[1].cname code.putln("if (%s != NULL && %s->obj != NULL) {" % (info, info)) code.put_gotref("%s->obj" % info) code.putln("__Pyx_DECREF(%s->obj); %s->obj = NULL;" % (info, info)) code.putln("}") def getbuffer_normal_cleanup(self, code): info = self.local_scope.arg_entries[1].cname code.putln("if (%s != NULL && %s->obj == Py_None) {" % (info, info)) code.put_gotref("Py_None") code.putln("__Pyx_DECREF(Py_None); %s->obj = NULL;" % info) code.putln("}") def get_preprocessor_guard(self): if not self.entry.is_special: return None name = self.entry.name slot = TypeSlots.method_name_to_slot.get(name) if not slot: return None if name == '__long__' and not self.entry.scope.lookup_here('__int__'): return None if name in ("__getbuffer__", "__releasebuffer__") and self.entry.scope.is_c_class_scope: return None return slot.preprocessor_guard_code() class CFuncDefNode(FuncDefNode): # C function definition. # # modifiers ['inline'] # visibility 'private' or 'public' or 'extern' # base_type CBaseTypeNode # declarator CDeclaratorNode # cfunc_declarator the CFuncDeclarator of this function # (this is also available through declarator or a # base thereof) # body StatListNode # api boolean # decorators [DecoratorNode] list of decorators # # with_gil boolean Acquire GIL around body # type CFuncType # py_func wrapper for calling from Python # overridable whether or not this is a cpdef function # inline_in_pxd whether this is an inline function in a pxd file # template_declaration String or None Used for c++ class methods # is_const_method whether this is a const method # is_static_method whether this is a static method # is_c_class_method whether this is a cclass method child_attrs = ["base_type", "declarator", "body", "py_func_stat"] inline_in_pxd = False decorators = None directive_locals = None directive_returns = None override = None template_declaration = None is_const_method = False py_func_stat = None def unqualified_name(self): return self.entry.name def analyse_declarations(self, env): self.is_c_class_method = env.is_c_class_scope if self.directive_locals is None: self.directive_locals = {} self.directive_locals.update(env.directives['locals']) if self.directive_returns is not None: base_type = self.directive_returns.analyse_as_type(env) if base_type is None: error(self.directive_returns.pos, "Not a type") base_type = PyrexTypes.error_type else: base_type = self.base_type.analyse(env) self.is_static_method = 'staticmethod' in env.directives and not env.lookup_here('staticmethod') # The 2 here is because we need both function and argument names. if isinstance(self.declarator, CFuncDeclaratorNode): name_declarator, type = self.declarator.analyse(base_type, env, nonempty = 2 * (self.body is not None), directive_locals = self.directive_locals) else: name_declarator, type = self.declarator.analyse(base_type, env, nonempty = 2 * (self.body is not None)) if not type.is_cfunction: error(self.pos, "Suite attached to non-function declaration") # Remember the actual type according to the function header # written here, because the type in the symbol table entry # may be different if we're overriding a C method inherited # from the base type of an extension type. self.type = type type.is_overridable = self.overridable declarator = self.declarator while not hasattr(declarator, 'args'): declarator = declarator.base self.cfunc_declarator = declarator self.args = declarator.args opt_arg_count = self.cfunc_declarator.optional_arg_count if (self.visibility == 'public' or self.api) and opt_arg_count: error(self.cfunc_declarator.pos, "Function with optional arguments may not be declared " "public or api") if (type.exception_check == '+' and self.visibility != 'extern'): warning(self.cfunc_declarator.pos, "Only extern functions can throw C++ exceptions.") for formal_arg, type_arg in zip(self.args, type.args): self.align_argument_type(env, type_arg) formal_arg.type = type_arg.type formal_arg.name = type_arg.name formal_arg.cname = type_arg.cname self._validate_type_visibility(type_arg.type, type_arg.pos, env) if type_arg.type.is_fused: self.has_fused_arguments = True if type_arg.type.is_buffer and 'inline' in self.modifiers: warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1) if type_arg.type.is_buffer: if self.type.nogil: error(formal_arg.pos, "Buffer may not be acquired without the GIL. " "Consider using memoryview slices instead.") elif 'inline' in self.modifiers: warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1) self._validate_type_visibility(type.return_type, self.pos, env) name = name_declarator.name cname = name_declarator.cname type.is_const_method = self.is_const_method type.is_static_method = self.is_static_method self.entry = env.declare_cfunction( name, type, self.pos, cname=cname, visibility=self.visibility, api=self.api, defining=self.body is not None, modifiers=self.modifiers, overridable=self.overridable) self.entry.inline_func_in_pxd = self.inline_in_pxd self.return_type = type.return_type if self.return_type.is_array and self.visibility != 'extern': error(self.pos, "Function cannot return an array") if self.return_type.is_cpp_class: self.return_type.check_nullary_constructor(self.pos, "used as a return value") if self.overridable and not env.is_module_scope and not self.is_static_method: if len(self.args) < 1 or not self.args[0].type.is_pyobject: # An error will be produced in the cdef function self.overridable = False self.declare_cpdef_wrapper(env) self.create_local_scope(env) def declare_cpdef_wrapper(self, env): if self.overridable: if self.is_static_method: # TODO(robertwb): Finish this up, perhaps via more function refactoring. error(self.pos, "static cpdef methods not yet supported") name = self.entry.name py_func_body = self.call_self_node(is_module_scope = env.is_module_scope) if self.is_static_method: from .ExprNodes import NameNode decorators = [DecoratorNode(self.pos, decorator=NameNode(self.pos, name='staticmethod'))] decorators[0].decorator.analyse_types(env) else: decorators = [] self.py_func = DefNode(pos = self.pos, name = self.entry.name, args = self.args, star_arg = None, starstar_arg = None, doc = self.doc, body = py_func_body, decorators = decorators, is_wrapper = 1) self.py_func.is_module_scope = env.is_module_scope self.py_func.analyse_declarations(env) self.py_func_stat = StatListNode(pos = self.pos, stats = [self.py_func]) self.py_func.type = PyrexTypes.py_object_type self.entry.as_variable = self.py_func.entry self.entry.used = self.entry.as_variable.used = True # Reset scope entry the above cfunction env.entries[name] = self.entry if (not self.entry.is_final_cmethod and (not env.is_module_scope or Options.lookup_module_cpdef)): self.override = OverrideCheckNode(self.pos, py_func = self.py_func) self.body = StatListNode(self.pos, stats=[self.override, self.body]) def _validate_type_visibility(self, type, pos, env): """ Ensure that types used in cdef functions are public or api, or defined in a C header. """ public_or_api = (self.visibility == 'public' or self.api) entry = getattr(type, 'entry', None) if public_or_api and entry and env.is_module_scope: if not (entry.visibility in ('public', 'extern') or entry.api or entry.in_cinclude): error(pos, "Function declared public or api may not have " "private types") def call_self_node(self, omit_optional_args=0, is_module_scope=0): from . import ExprNodes args = self.type.args if omit_optional_args: args = args[:len(args) - self.type.optional_arg_count] arg_names = [arg.name for arg in args] if is_module_scope: cfunc = ExprNodes.NameNode(self.pos, name=self.entry.name) call_arg_names = arg_names skip_dispatch = Options.lookup_module_cpdef elif self.type.is_static_method: class_entry = self.entry.scope.parent_type.entry class_node = ExprNodes.NameNode(self.pos, name=class_entry.name) class_node.entry = class_entry cfunc = ExprNodes.AttributeNode(self.pos, obj=class_node, attribute=self.entry.name) # Calling static c(p)def methods on an instance disallowed. # TODO(robertwb): Support by passing self to check for override? skip_dispatch = True else: type_entry = self.type.args[0].type.entry type_arg = ExprNodes.NameNode(self.pos, name=type_entry.name) type_arg.entry = type_entry cfunc = ExprNodes.AttributeNode(self.pos, obj=type_arg, attribute=self.entry.name) skip_dispatch = not is_module_scope or Options.lookup_module_cpdef c_call = ExprNodes.SimpleCallNode( self.pos, function=cfunc, args=[ExprNodes.NameNode(self.pos, name=n) for n in arg_names], wrapper_call=skip_dispatch) return ReturnStatNode(pos=self.pos, return_type=PyrexTypes.py_object_type, value=c_call) def declare_arguments(self, env): for arg in self.type.args: if not arg.name: error(arg.pos, "Missing argument name") self.declare_argument(env, arg) def need_gil_acquisition(self, lenv): return self.type.with_gil def nogil_check(self, env): type = self.type with_gil = type.with_gil if type.nogil and not with_gil: if type.return_type.is_pyobject: error(self.pos, "Function with Python return type cannot be declared nogil") for entry in self.local_scope.var_entries: if entry.type.is_pyobject and not entry.in_with_gil_block: error(self.pos, "Function declared nogil has Python locals or temporaries") def analyse_expressions(self, env): self.local_scope.directives = env.directives if self.py_func is not None: # this will also analyse the default values self.py_func = self.py_func.analyse_expressions(env) else: self.analyse_default_values(env) self.analyse_annotations(env) self.acquire_gil = self.need_gil_acquisition(self.local_scope) return self def needs_assignment_synthesis(self, env, code=None): return False def generate_function_header(self, code, with_pymethdef, with_opt_args = 1, with_dispatch = 1, cname = None): scope = self.local_scope arg_decls = [] type = self.type for arg in type.args[:len(type.args)-type.optional_arg_count]: arg_decl = arg.declaration_code() entry = scope.lookup(arg.name) if not entry.cf_used: arg_decl = 'CYTHON_UNUSED %s' % arg_decl arg_decls.append(arg_decl) if with_dispatch and self.overridable: dispatch_arg = PyrexTypes.c_int_type.declaration_code( Naming.skip_dispatch_cname) if self.override: arg_decls.append(dispatch_arg) else: arg_decls.append('CYTHON_UNUSED %s' % dispatch_arg) if type.optional_arg_count and with_opt_args: arg_decls.append(type.op_arg_struct.declaration_code(Naming.optional_args_cname)) if type.has_varargs: arg_decls.append("...") if not arg_decls: arg_decls = ["void"] if cname is None: cname = self.entry.func_cname entity = type.function_header_code(cname, ', '.join(arg_decls)) if self.entry.visibility == 'private' and '::' not in cname: storage_class = "static " else: storage_class = "" dll_linkage = None modifiers = code.build_function_modifiers(self.entry.func_modifiers) header = self.return_type.declaration_code(entity, dll_linkage=dll_linkage) #print (storage_class, modifiers, header) needs_proto = self.is_c_class_method if self.template_declaration: if needs_proto: code.globalstate.parts['module_declarations'].putln(self.template_declaration) code.putln(self.template_declaration) if needs_proto: code.globalstate.parts['module_declarations'].putln("%s%s%s; /* proto*/" % (storage_class, modifiers, header)) code.putln("%s%s%s {" % (storage_class, modifiers, header)) def generate_argument_declarations(self, env, code): scope = self.local_scope for arg in self.args: if arg.default: entry = scope.lookup(arg.name) if self.override or entry.cf_used: result = arg.calculate_default_value_code(code) code.putln('%s = %s;' % ( arg.type.declaration_code(arg.cname), result)) def generate_keyword_list(self, code): pass def generate_argument_parsing_code(self, env, code): i = 0 used = 0 scope = self.local_scope if self.type.optional_arg_count: code.putln('if (%s) {' % Naming.optional_args_cname) for arg in self.args: if arg.default: entry = scope.lookup(arg.name) if self.override or entry.cf_used: code.putln('if (%s->%sn > %s) {' % (Naming.optional_args_cname, Naming.pyrex_prefix, i)) declarator = arg.declarator while not hasattr(declarator, 'name'): declarator = declarator.base code.putln('%s = %s->%s;' % (arg.cname, Naming.optional_args_cname, self.type.opt_arg_cname(declarator.name))) used += 1 i += 1 for _ in range(used): code.putln('}') code.putln('}') # Move arguments into closure if required def put_into_closure(entry): if entry.in_closure and not arg.default: code.putln('%s = %s;' % (entry.cname, entry.original_cname)) code.put_var_incref(entry) code.put_var_giveref(entry) for arg in self.args: put_into_closure(scope.lookup_here(arg.name)) def generate_argument_conversion_code(self, code): pass def generate_argument_type_tests(self, code): # Generate type tests for args whose type in a parent # class is a supertype of the declared type. for arg in self.type.args: if arg.needs_type_test: self.generate_arg_type_test(arg, code) elif arg.type.is_pyobject and not arg.accept_none: self.generate_arg_none_check(arg, code) def generate_execution_code(self, code): super(CFuncDefNode, self).generate_execution_code(code) if self.py_func_stat: self.py_func_stat.generate_execution_code(code) def error_value(self): if self.return_type.is_pyobject: return "0" else: #return None return self.entry.type.exception_value def caller_will_check_exceptions(self): return self.entry.type.exception_check def generate_wrapper_functions(self, code): # If the C signature of a function has changed, we need to generate # wrappers to put in the slots here. k = 0 entry = self.entry func_type = entry.type while entry.prev_entry is not None: k += 1 entry = entry.prev_entry entry.func_cname = "%s%swrap_%s" % (self.entry.func_cname, Naming.pyrex_prefix, k) code.putln() self.generate_function_header(code, 0, with_dispatch = entry.type.is_overridable, with_opt_args = entry.type.optional_arg_count, cname = entry.func_cname) if not self.return_type.is_void: code.put('return ') args = self.type.args arglist = [arg.cname for arg in args[:len(args)-self.type.optional_arg_count]] if entry.type.is_overridable: arglist.append(Naming.skip_dispatch_cname) elif func_type.is_overridable: arglist.append('0') if entry.type.optional_arg_count: arglist.append(Naming.optional_args_cname) elif func_type.optional_arg_count: arglist.append('NULL') code.putln('%s(%s);' % (self.entry.func_cname, ', '.join(arglist))) code.putln('}') class PyArgDeclNode(Node): # Argument which must be a Python object (used # for * and ** arguments). # # name string # entry Symtab.Entry # annotation ExprNode or None Py3 argument annotation child_attrs = [] is_self_arg = False is_type_arg = False def generate_function_definitions(self, env, code): self.entry.generate_function_definitions(env, code) class DecoratorNode(Node): # A decorator # # decorator NameNode or CallNode or AttributeNode child_attrs = ['decorator'] class DefNode(FuncDefNode): # A Python function definition. # # name string the Python name of the function # lambda_name string the internal name of a lambda 'function' # decorators [DecoratorNode] list of decorators # args [CArgDeclNode] formal arguments # doc EncodedString or None # body StatListNode # return_type_annotation # ExprNode or None the Py3 return type annotation # # The following subnode is constructed internally # when the def statement is inside a Python class definition. # # fused_py_func DefNode The original fused cpdef DefNode # (in case this is a specialization) # specialized_cpdefs [DefNode] list of specialized cpdef DefNodes # py_cfunc_node PyCFunctionNode/InnerFunctionNode The PyCFunction to create and assign # # decorator_indirection IndirectionNode Used to remove __Pyx_Method_ClassMethod for fused functions child_attrs = ["args", "star_arg", "starstar_arg", "body", "decorators", "return_type_annotation"] lambda_name = None reqd_kw_flags_cname = "0" is_wrapper = 0 no_assignment_synthesis = 0 decorators = None return_type_annotation = None entry = None acquire_gil = 0 self_in_stararg = 0 py_cfunc_node = None requires_classobj = False defaults_struct = None # Dynamic kwrds structure name doc = None fused_py_func = False specialized_cpdefs = None py_wrapper = None py_wrapper_required = True func_cname = None defaults_getter = None def __init__(self, pos, **kwds): FuncDefNode.__init__(self, pos, **kwds) k = rk = r = 0 for arg in self.args: if arg.kw_only: k += 1 if not arg.default: rk += 1 if not arg.default: r += 1 self.num_kwonly_args = k self.num_required_kw_args = rk self.num_required_args = r def as_cfunction(self, cfunc=None, scope=None, overridable=True, returns=None, modifiers=None): if self.star_arg: error(self.star_arg.pos, "cdef function cannot have star argument") if self.starstar_arg: error(self.starstar_arg.pos, "cdef function cannot have starstar argument") if cfunc is None: cfunc_args = [] for formal_arg in self.args: name_declarator, type = formal_arg.analyse(scope, nonempty=1) cfunc_args.append(PyrexTypes.CFuncTypeArg(name = name_declarator.name, cname = None, type = py_object_type, pos = formal_arg.pos)) cfunc_type = PyrexTypes.CFuncType(return_type = py_object_type, args = cfunc_args, has_varargs = False, exception_value = None, exception_check = False, nogil = False, with_gil = False, is_overridable = overridable) cfunc = CVarDefNode(self.pos, type=cfunc_type) else: if scope is None: scope = cfunc.scope cfunc_type = cfunc.type if len(self.args) != len(cfunc_type.args) or cfunc_type.has_varargs: error(self.pos, "wrong number of arguments") error(cfunc.pos, "previous declaration here") for i, (formal_arg, type_arg) in enumerate(zip(self.args, cfunc_type.args)): name_declarator, type = formal_arg.analyse(scope, nonempty=1, is_self_arg = (i == 0 and scope.is_c_class_scope)) if type is None or type is PyrexTypes.py_object_type: formal_arg.type = type_arg.type formal_arg.name_declarator = name_declarator from . import ExprNodes if cfunc_type.exception_value is None: exception_value = None else: exception_value = ExprNodes.ConstNode(self.pos, value=cfunc_type.exception_value, type=cfunc_type.return_type) declarator = CFuncDeclaratorNode(self.pos, base = CNameDeclaratorNode(self.pos, name=self.name, cname=None), args = self.args, has_varargs = False, exception_check = cfunc_type.exception_check, exception_value = exception_value, with_gil = cfunc_type.with_gil, nogil = cfunc_type.nogil) return CFuncDefNode(self.pos, modifiers = modifiers or [], base_type = CAnalysedBaseTypeNode(self.pos, type=cfunc_type.return_type), declarator = declarator, body = self.body, doc = self.doc, overridable = cfunc_type.is_overridable, type = cfunc_type, with_gil = cfunc_type.with_gil, nogil = cfunc_type.nogil, visibility = 'private', api = False, directive_locals = getattr(cfunc, 'directive_locals', {}), directive_returns = returns) def is_cdef_func_compatible(self): """Determines if the function's signature is compatible with a cdef function. This can be used before calling .as_cfunction() to see if that will be successful. """ if self.needs_closure: return False if self.star_arg or self.starstar_arg: return False return True def analyse_declarations(self, env): self.is_classmethod = self.is_staticmethod = False if self.decorators: for decorator in self.decorators: func = decorator.decorator if func.is_name: self.is_classmethod |= func.name == 'classmethod' self.is_staticmethod |= func.name == 'staticmethod' if self.is_classmethod and env.lookup_here('classmethod'): # classmethod() was overridden - not much we can do here ... self.is_classmethod = False if self.is_staticmethod and env.lookup_here('staticmethod'): # staticmethod() was overridden - not much we can do here ... self.is_staticmethod = False if self.name == '__new__' and env.is_py_class_scope: self.is_staticmethod = 1 self.analyse_argument_types(env) if self.name == '<lambda>': self.declare_lambda_function(env) else: self.declare_pyfunction(env) self.analyse_signature(env) self.return_type = self.entry.signature.return_type() # if a signature annotation provides a more specific return object type, use it if self.return_type is py_object_type and self.return_type_annotation: if env.directives['annotation_typing'] and not self.entry.is_special: _, return_type = _analyse_signature_annotation(self.return_type_annotation, env) if return_type and return_type.is_pyobject: self.return_type = return_type self.create_local_scope(env) self.py_wrapper = DefNodeWrapper( self.pos, target=self, name=self.entry.name, args=self.args, star_arg=self.star_arg, starstar_arg=self.starstar_arg, return_type=self.return_type) self.py_wrapper.analyse_declarations(env) def analyse_argument_types(self, env): self.directive_locals = env.directives['locals'] allow_none_for_extension_args = env.directives['allow_none_for_extension_args'] f2s = env.fused_to_specific env.fused_to_specific = None for arg in self.args: if hasattr(arg, 'name'): name_declarator = None else: base_type = arg.base_type.analyse(env) name_declarator, type = \ arg.declarator.analyse(base_type, env) arg.name = name_declarator.name arg.type = type if type.is_fused: self.has_fused_arguments = True self.align_argument_type(env, arg) if name_declarator and name_declarator.cname: error(self.pos, "Python function argument cannot have C name specification") arg.type = arg.type.as_argument_type() arg.hdr_type = None arg.needs_conversion = 0 arg.needs_type_test = 0 arg.is_generic = 1 if arg.type.is_pyobject or arg.type.is_buffer or arg.type.is_memoryviewslice: if arg.or_none: arg.accept_none = True elif arg.not_none: arg.accept_none = False elif (arg.type.is_extension_type or arg.type.is_builtin_type or arg.type.is_buffer or arg.type.is_memoryviewslice): if arg.default and arg.default.constant_result is None: # special case: def func(MyType obj = None) arg.accept_none = True else: # default depends on compiler directive arg.accept_none = allow_none_for_extension_args else: # probably just a plain 'object' arg.accept_none = True else: arg.accept_none = True # won't be used, but must be there if arg.not_none: error(arg.pos, "Only Python type arguments can have 'not None'") if arg.or_none: error(arg.pos, "Only Python type arguments can have 'or None'") env.fused_to_specific = f2s def analyse_signature(self, env): if self.entry.is_special: if self.decorators: error(self.pos, "special functions of cdef classes cannot have decorators") self.entry.trivial_signature = len(self.args) == 1 and not (self.star_arg or self.starstar_arg) elif not env.directives['always_allow_keywords'] and not (self.star_arg or self.starstar_arg): # Use the simpler calling signature for zero- and one-argument functions. if self.entry.signature is TypeSlots.pyfunction_signature: if len(self.args) == 0: self.entry.signature = TypeSlots.pyfunction_noargs elif len(self.args) == 1: if self.args[0].default is None and not self.args[0].kw_only: self.entry.signature = TypeSlots.pyfunction_onearg elif self.entry.signature is TypeSlots.pymethod_signature: if len(self.args) == 1: self.entry.signature = TypeSlots.unaryfunc elif len(self.args) == 2: if self.args[1].default is None and not self.args[1].kw_only: self.entry.signature = TypeSlots.ibinaryfunc sig = self.entry.signature nfixed = sig.num_fixed_args() if sig is TypeSlots.pymethod_signature and nfixed == 1 \ and len(self.args) == 0 and self.star_arg: # this is the only case where a diverging number of # arguments is not an error - when we have no explicit # 'self' parameter as in method(*args) sig = self.entry.signature = TypeSlots.pyfunction_signature # self is not 'really' used self.self_in_stararg = 1 nfixed = 0 if self.is_staticmethod and env.is_c_class_scope: nfixed = 0 self.self_in_stararg = True # FIXME: why for staticmethods? self.entry.signature = sig = copy.copy(sig) sig.fixed_arg_format = "*" sig.is_staticmethod = True sig.has_generic_args = True if ((self.is_classmethod or self.is_staticmethod) and self.has_fused_arguments and env.is_c_class_scope): del self.decorator_indirection.stats[:] for i in range(min(nfixed, len(self.args))): arg = self.args[i] arg.is_generic = 0 if sig.is_self_arg(i) and not self.is_staticmethod: if self.is_classmethod: arg.is_type_arg = 1 arg.hdr_type = arg.type = Builtin.type_type else: arg.is_self_arg = 1 arg.hdr_type = arg.type = env.parent_type arg.needs_conversion = 0 else: arg.hdr_type = sig.fixed_arg_type(i) if not arg.type.same_as(arg.hdr_type): if arg.hdr_type.is_pyobject and arg.type.is_pyobject: arg.needs_type_test = 1 else: arg.needs_conversion = 1 if arg.needs_conversion: arg.hdr_cname = Naming.arg_prefix + arg.name else: arg.hdr_cname = Naming.var_prefix + arg.name if nfixed > len(self.args): self.bad_signature() return elif nfixed < len(self.args): if not sig.has_generic_args: self.bad_signature() for arg in self.args: if arg.is_generic and \ (arg.type.is_extension_type or arg.type.is_builtin_type): arg.needs_type_test = 1 def bad_signature(self): sig = self.entry.signature expected_str = "%d" % sig.num_fixed_args() if sig.has_generic_args: expected_str += " or more" name = self.name if name.startswith("__") and name.endswith("__"): desc = "Special method" else: desc = "Method" error(self.pos, "%s %s has wrong number of arguments " "(%d declared, %s expected)" % ( desc, self.name, len(self.args), expected_str)) def declare_pyfunction(self, env): #print "DefNode.declare_pyfunction:", self.name, "in", env ### name = self.name entry = env.lookup_here(name) if entry: if entry.is_final_cmethod and not env.parent_type.is_final_type: error(self.pos, "Only final types can have final Python (def/cpdef) methods") if (entry.type.is_cfunction and not entry.is_builtin_cmethod and not self.is_wrapper): warning(self.pos, "Overriding cdef method with def method.", 5) entry = env.declare_pyfunction(name, self.pos, allow_redefine=not self.is_wrapper) self.entry = entry prefix = env.next_id(env.scope_prefix) self.entry.pyfunc_cname = Naming.pyfunc_prefix + prefix + name if Options.docstrings: entry.doc = embed_position(self.pos, self.doc) entry.doc_cname = Naming.funcdoc_prefix + prefix + name if entry.is_special: if entry.name in TypeSlots.invisible or not entry.doc or (entry.name in '__getattr__' and env.directives['fast_getattr']): entry.wrapperbase_cname = None else: entry.wrapperbase_cname = Naming.wrapperbase_prefix + prefix + name else: entry.doc = None def declare_lambda_function(self, env): entry = env.declare_lambda_function(self.lambda_name, self.pos) entry.doc = None self.entry = entry self.entry.pyfunc_cname = entry.cname def declare_arguments(self, env): for arg in self.args: if not arg.name: error(arg.pos, "Missing argument name") if arg.needs_conversion: arg.entry = env.declare_var(arg.name, arg.type, arg.pos) if arg.type.is_pyobject: arg.entry.init = "0" else: arg.entry = self.declare_argument(env, arg) arg.entry.is_arg = 1 arg.entry.used = 1 arg.entry.is_self_arg = arg.is_self_arg self.declare_python_arg(env, self.star_arg) self.declare_python_arg(env, self.starstar_arg) def declare_python_arg(self, env, arg): if arg: if env.directives['infer_types'] != False: type = PyrexTypes.unspecified_type else: type = py_object_type entry = env.declare_var(arg.name, type, arg.pos) entry.is_arg = 1 entry.used = 1 entry.init = "0" entry.xdecref_cleanup = 1 arg.entry = entry def analyse_expressions(self, env): self.local_scope.directives = env.directives self.analyse_default_values(env) self.analyse_annotations(env) if self.return_type_annotation: self.return_type_annotation = self.return_type_annotation.analyse_types(env) if not self.needs_assignment_synthesis(env) and self.decorators: for decorator in self.decorators[::-1]: decorator.decorator = decorator.decorator.analyse_expressions(env) self.py_wrapper.prepare_argument_coercion(env) return self def needs_assignment_synthesis(self, env, code=None): if self.is_staticmethod: return True if self.is_wrapper or self.specialized_cpdefs or self.entry.is_fused_specialized: return False if self.no_assignment_synthesis: return False # Should enable for module level as well, that will require more testing... if self.entry.is_anonymous: return True if env.is_module_scope: if code is None: return env.directives['binding'] else: return code.globalstate.directives['binding'] return env.is_py_class_scope or env.is_closure_scope def error_value(self): return self.entry.signature.error_value def caller_will_check_exceptions(self): return self.entry.signature.exception_check def generate_function_definitions(self, env, code): if self.defaults_getter: self.defaults_getter.generate_function_definitions(env, code) # Before closure cnames are mangled if self.py_wrapper_required: # func_cname might be modified by @cname self.py_wrapper.func_cname = self.entry.func_cname self.py_wrapper.generate_function_definitions(env, code) FuncDefNode.generate_function_definitions(self, env, code) def generate_function_header(self, code, with_pymethdef, proto_only=0): if proto_only: if self.py_wrapper_required: self.py_wrapper.generate_function_header( code, with_pymethdef, True) return arg_code_list = [] if self.entry.signature.has_dummy_arg: self_arg = 'PyObject *%s' % Naming.self_cname if not self.needs_outer_scope: self_arg = 'CYTHON_UNUSED ' + self_arg arg_code_list.append(self_arg) def arg_decl_code(arg): entry = arg.entry if entry.in_closure: cname = entry.original_cname else: cname = entry.cname decl = entry.type.declaration_code(cname) if not entry.cf_used: decl = 'CYTHON_UNUSED ' + decl return decl for arg in self.args: arg_code_list.append(arg_decl_code(arg)) if self.star_arg: arg_code_list.append(arg_decl_code(self.star_arg)) if self.starstar_arg: arg_code_list.append(arg_decl_code(self.starstar_arg)) arg_code = ', '.join(arg_code_list) dc = self.return_type.declaration_code(self.entry.pyfunc_cname) decls_code = code.globalstate['decls'] preprocessor_guard = self.get_preprocessor_guard() if preprocessor_guard: decls_code.putln(preprocessor_guard) decls_code.putln( "static %s(%s); /* proto */" % (dc, arg_code)) if preprocessor_guard: decls_code.putln("#endif") code.putln("static %s(%s) {" % (dc, arg_code)) def generate_argument_declarations(self, env, code): pass def generate_keyword_list(self, code): pass def generate_argument_parsing_code(self, env, code): # Move arguments into closure if required def put_into_closure(entry): if entry.in_closure: code.putln('%s = %s;' % (entry.cname, entry.original_cname)) code.put_var_incref(entry) code.put_var_giveref(entry) for arg in self.args: put_into_closure(arg.entry) for arg in self.star_arg, self.starstar_arg: if arg: put_into_closure(arg.entry) def generate_argument_type_tests(self, code): pass class DefNodeWrapper(FuncDefNode): # DefNode python wrapper code generator defnode = None target = None # Target DefNode def __init__(self, *args, **kwargs): FuncDefNode.__init__(self, *args, **kwargs) self.num_kwonly_args = self.target.num_kwonly_args self.num_required_kw_args = self.target.num_required_kw_args self.num_required_args = self.target.num_required_args self.self_in_stararg = self.target.self_in_stararg self.signature = None def analyse_declarations(self, env): target_entry = self.target.entry name = self.name prefix = env.next_id(env.scope_prefix) target_entry.func_cname = Naming.pywrap_prefix + prefix + name target_entry.pymethdef_cname = Naming.pymethdef_prefix + prefix + name self.signature = target_entry.signature def prepare_argument_coercion(self, env): # This is only really required for Cython utility code at this time, # everything else can be done during code generation. But we expand # all utility code here, simply because we cannot easily distinguish # different code types. for arg in self.args: if not arg.type.is_pyobject: if not arg.type.create_from_py_utility_code(env): pass # will fail later elif arg.hdr_type and not arg.hdr_type.is_pyobject: if not arg.hdr_type.create_to_py_utility_code(env): pass # will fail later if self.starstar_arg and not self.starstar_arg.entry.cf_used: # we will set the kwargs argument to NULL instead of a new dict # and must therefore correct the control flow state entry = self.starstar_arg.entry entry.xdecref_cleanup = 1 for ass in entry.cf_assignments: if not ass.is_arg and ass.lhs.is_name: ass.lhs.cf_maybe_null = True def signature_has_nongeneric_args(self): argcount = len(self.args) if argcount == 0 or ( argcount == 1 and (self.args[0].is_self_arg or self.args[0].is_type_arg)): return 0 return 1 def signature_has_generic_args(self): return self.signature.has_generic_args def generate_function_body(self, code): args = [] if self.signature.has_dummy_arg: args.append(Naming.self_cname) for arg in self.args: if arg.hdr_type and not (arg.type.is_memoryviewslice or arg.type.is_struct or arg.type.is_complex): args.append(arg.type.cast_code(arg.entry.cname)) else: args.append(arg.entry.cname) if self.star_arg: args.append(self.star_arg.entry.cname) if self.starstar_arg: args.append(self.starstar_arg.entry.cname) args = ', '.join(args) if not self.return_type.is_void: code.put('%s = ' % Naming.retval_cname) code.putln('%s(%s);' % ( self.target.entry.pyfunc_cname, args)) def generate_function_definitions(self, env, code): lenv = self.target.local_scope # Generate C code for header and body of function code.mark_pos(self.pos) code.putln("") code.putln("/* Python wrapper */") preprocessor_guard = self.target.get_preprocessor_guard() if preprocessor_guard: code.putln(preprocessor_guard) code.enter_cfunc_scope() code.return_from_error_cleanup_label = code.new_label() with_pymethdef = (self.target.needs_assignment_synthesis(env, code) or self.target.pymethdef_required) self.generate_function_header(code, with_pymethdef) self.generate_argument_declarations(lenv, code) tempvardecl_code = code.insertion_point() if self.return_type.is_pyobject: retval_init = ' = 0' else: retval_init = '' if not self.return_type.is_void: code.putln('%s%s;' % ( self.return_type.declaration_code(Naming.retval_cname), retval_init)) code.put_declare_refcount_context() code.put_setup_refcount_context('%s (wrapper)' % self.name) self.generate_argument_parsing_code(lenv, code) self.generate_argument_type_tests(code) self.generate_function_body(code) # ----- Go back and insert temp variable declarations tempvardecl_code.put_temp_declarations(code.funcstate) code.mark_pos(self.pos) code.putln("") code.putln("/* function exit code */") # ----- Error cleanup if code.error_label in code.labels_used: code.put_goto(code.return_label) code.put_label(code.error_label) for cname, type in code.funcstate.all_managed_temps(): code.put_xdecref(cname, type) err_val = self.error_value() if err_val is not None: code.putln("%s = %s;" % (Naming.retval_cname, err_val)) # ----- Non-error return cleanup code.put_label(code.return_label) for entry in lenv.var_entries: if entry.is_arg and entry.type.is_pyobject: code.put_var_decref(entry) code.put_finish_refcount_context() if not self.return_type.is_void: code.putln("return %s;" % Naming.retval_cname) code.putln('}') code.exit_cfunc_scope() if preprocessor_guard: code.putln("#endif /*!(%s)*/" % preprocessor_guard) def generate_function_header(self, code, with_pymethdef, proto_only=0): arg_code_list = [] sig = self.signature if sig.has_dummy_arg or self.self_in_stararg: arg_code = "PyObject *%s" % Naming.self_cname if not sig.has_dummy_arg: arg_code = 'CYTHON_UNUSED ' + arg_code arg_code_list.append(arg_code) for arg in self.args: if not arg.is_generic: if arg.is_self_arg or arg.is_type_arg: arg_code_list.append("PyObject *%s" % arg.hdr_cname) else: arg_code_list.append( arg.hdr_type.declaration_code(arg.hdr_cname)) entry = self.target.entry if not entry.is_special and sig.method_flags() == [TypeSlots.method_noargs]: arg_code_list.append("CYTHON_UNUSED PyObject *unused") if entry.scope.is_c_class_scope and entry.name == "__ipow__": arg_code_list.append("CYTHON_UNUSED PyObject *unused") if sig.has_generic_args: arg_code_list.append( "PyObject *%s, PyObject *%s" % (Naming.args_cname, Naming.kwds_cname)) arg_code = ", ".join(arg_code_list) # Prevent warning: unused function '__pyx_pw_5numpy_7ndarray_1__getbuffer__' mf = "" if (entry.name in ("__getbuffer__", "__releasebuffer__") and entry.scope.is_c_class_scope): mf = "CYTHON_UNUSED " with_pymethdef = False dc = self.return_type.declaration_code(entry.func_cname) header = "static %s%s(%s)" % (mf, dc, arg_code) code.putln("%s; /*proto*/" % header) if proto_only: if self.target.fused_py_func: # If we are the specialized version of the cpdef, we still # want the prototype for the "fused cpdef", in case we're # checking to see if our method was overridden in Python self.target.fused_py_func.generate_function_header( code, with_pymethdef, proto_only=True) return if (Options.docstrings and entry.doc and not self.target.fused_py_func and not entry.scope.is_property_scope and (not entry.is_special or entry.wrapperbase_cname)): # h_code = code.globalstate['h_code'] docstr = entry.doc if docstr.is_unicode: docstr = docstr.utf8encode() code.putln( 'static char %s[] = "%s";' % ( entry.doc_cname, split_string_literal(escape_byte_string(docstr)))) if entry.is_special: code.putln('#if CYTHON_COMPILING_IN_CPYTHON') code.putln( "struct wrapperbase %s;" % entry.wrapperbase_cname) code.putln('#endif') if with_pymethdef or self.target.fused_py_func: code.put( "static PyMethodDef %s = " % entry.pymethdef_cname) code.put_pymethoddef(self.target.entry, ";", allow_skip=False) code.putln("%s {" % header) def generate_argument_declarations(self, env, code): for arg in self.args: if arg.is_generic: if arg.needs_conversion: code.putln("PyObject *%s = 0;" % arg.hdr_cname) else: code.put_var_declaration(arg.entry) for entry in env.var_entries: if entry.is_arg: code.put_var_declaration(entry) def generate_argument_parsing_code(self, env, code): # Generate fast equivalent of PyArg_ParseTuple call for # generic arguments, if any, including args/kwargs old_error_label = code.new_error_label() our_error_label = code.error_label end_label = code.new_label("argument_unpacking_done") has_kwonly_args = self.num_kwonly_args > 0 has_star_or_kw_args = self.star_arg is not None \ or self.starstar_arg is not None or has_kwonly_args for arg in self.args: if not arg.type.is_pyobject: if not arg.type.create_from_py_utility_code(env): pass # will fail later if not self.signature_has_generic_args(): if has_star_or_kw_args: error(self.pos, "This method cannot have * or keyword arguments") self.generate_argument_conversion_code(code) elif not self.signature_has_nongeneric_args(): # func(*args) or func(**kw) or func(*args, **kw) self.generate_stararg_copy_code(code) else: self.generate_tuple_and_keyword_parsing_code(self.args, end_label, code) code.error_label = old_error_label if code.label_used(our_error_label): if not code.label_used(end_label): code.put_goto(end_label) code.put_label(our_error_label) if has_star_or_kw_args: self.generate_arg_decref(self.star_arg, code) if self.starstar_arg: if self.starstar_arg.entry.xdecref_cleanup: code.put_var_xdecref_clear(self.starstar_arg.entry) else: code.put_var_decref_clear(self.starstar_arg.entry) code.put_add_traceback(self.target.entry.qualified_name) code.put_finish_refcount_context() code.putln("return %s;" % self.error_value()) if code.label_used(end_label): code.put_label(end_label) def generate_arg_xdecref(self, arg, code): if arg: code.put_var_xdecref_clear(arg.entry) def generate_arg_decref(self, arg, code): if arg: code.put_var_decref_clear(arg.entry) def generate_stararg_copy_code(self, code): if not self.star_arg: code.globalstate.use_utility_code( UtilityCode.load_cached("RaiseArgTupleInvalid", "FunctionArguments.c")) code.putln("if (unlikely(PyTuple_GET_SIZE(%s) > 0)) {" % Naming.args_cname) code.put('__Pyx_RaiseArgtupleInvalid("%s", 1, 0, 0, PyTuple_GET_SIZE(%s)); return %s;' % ( self.name, Naming.args_cname, self.error_value())) code.putln("}") if self.starstar_arg: if self.star_arg or not self.starstar_arg.entry.cf_used: kwarg_check = "unlikely(%s)" % Naming.kwds_cname else: kwarg_check = "%s" % Naming.kwds_cname else: kwarg_check = "unlikely(%s) && unlikely(PyDict_Size(%s) > 0)" % ( Naming.kwds_cname, Naming.kwds_cname) code.globalstate.use_utility_code( UtilityCode.load_cached("KeywordStringCheck", "FunctionArguments.c")) code.putln( "if (%s && unlikely(!__Pyx_CheckKeywordStrings(%s, \"%s\", %d))) return %s;" % ( kwarg_check, Naming.kwds_cname, self.name, bool(self.starstar_arg), self.error_value())) if self.starstar_arg and self.starstar_arg.entry.cf_used: if all(ref.node.allow_null for ref in self.starstar_arg.entry.cf_references): code.putln("if (%s) {" % kwarg_check) code.putln("%s = PyDict_Copy(%s); if (unlikely(!%s)) return %s;" % ( self.starstar_arg.entry.cname, Naming.kwds_cname, self.starstar_arg.entry.cname, self.error_value())) code.put_gotref(self.starstar_arg.entry.cname) code.putln("} else {") code.putln("%s = NULL;" % (self.starstar_arg.entry.cname,)) code.putln("}") self.starstar_arg.entry.xdecref_cleanup = 1 else: code.put("%s = (%s) ? PyDict_Copy(%s) : PyDict_New(); " % ( self.starstar_arg.entry.cname, Naming.kwds_cname, Naming.kwds_cname)) code.putln("if (unlikely(!%s)) return %s;" % ( self.starstar_arg.entry.cname, self.error_value())) self.starstar_arg.entry.xdecref_cleanup = 0 code.put_gotref(self.starstar_arg.entry.cname) if self.self_in_stararg and not self.target.is_staticmethod: # need to create a new tuple with 'self' inserted as first item code.put("%s = PyTuple_New(PyTuple_GET_SIZE(%s)+1); if (unlikely(!%s)) " % ( self.star_arg.entry.cname, Naming.args_cname, self.star_arg.entry.cname)) if self.starstar_arg and self.starstar_arg.entry.cf_used: code.putln("{") code.put_xdecref_clear(self.starstar_arg.entry.cname, py_object_type) code.putln("return %s;" % self.error_value()) code.putln("}") else: code.putln("return %s;" % self.error_value()) code.put_gotref(self.star_arg.entry.cname) code.put_incref(Naming.self_cname, py_object_type) code.put_giveref(Naming.self_cname) code.putln("PyTuple_SET_ITEM(%s, 0, %s);" % ( self.star_arg.entry.cname, Naming.self_cname)) temp = code.funcstate.allocate_temp(PyrexTypes.c_py_ssize_t_type, manage_ref=False) code.putln("for (%s=0; %s < PyTuple_GET_SIZE(%s); %s++) {" % ( temp, temp, Naming.args_cname, temp)) code.putln("PyObject* item = PyTuple_GET_ITEM(%s, %s);" % ( Naming.args_cname, temp)) code.put_incref("item", py_object_type) code.put_giveref("item") code.putln("PyTuple_SET_ITEM(%s, %s+1, item);" % ( self.star_arg.entry.cname, temp)) code.putln("}") code.funcstate.release_temp(temp) self.star_arg.entry.xdecref_cleanup = 0 elif self.star_arg: code.put_incref(Naming.args_cname, py_object_type) code.putln("%s = %s;" % ( self.star_arg.entry.cname, Naming.args_cname)) self.star_arg.entry.xdecref_cleanup = 0 def generate_tuple_and_keyword_parsing_code(self, args, success_label, code): argtuple_error_label = code.new_label("argtuple_error") positional_args = [] required_kw_only_args = [] optional_kw_only_args = [] for arg in args: if arg.is_generic: if arg.default: if not arg.is_self_arg and not arg.is_type_arg: if arg.kw_only: optional_kw_only_args.append(arg) else: positional_args.append(arg) elif arg.kw_only: required_kw_only_args.append(arg) elif not arg.is_self_arg and not arg.is_type_arg: positional_args.append(arg) # sort required kw-only args before optional ones to avoid special # cases in the unpacking code kw_only_args = required_kw_only_args + optional_kw_only_args min_positional_args = self.num_required_args - self.num_required_kw_args if len(args) > 0 and (args[0].is_self_arg or args[0].is_type_arg): min_positional_args -= 1 max_positional_args = len(positional_args) has_fixed_positional_count = not self.star_arg and \ min_positional_args == max_positional_args has_kw_only_args = bool(kw_only_args) if self.num_required_kw_args: code.globalstate.use_utility_code( UtilityCode.load_cached("RaiseKeywordRequired", "FunctionArguments.c")) if self.starstar_arg or self.star_arg: self.generate_stararg_init_code(max_positional_args, code) code.putln('{') all_args = tuple(positional_args) + tuple(kw_only_args) code.putln("static PyObject **%s[] = {%s,0};" % ( Naming.pykwdlist_cname, ','.join([ '&%s' % code.intern_identifier(arg.name) for arg in all_args ]))) # Before being converted and assigned to the target variables, # borrowed references to all unpacked argument values are # collected into a local PyObject* array called "values", # regardless if they were taken from default arguments, # positional arguments or keyword arguments. Note that # C-typed default arguments are handled at conversion time, # so their array value is NULL in the end if no argument # was passed for them. self.generate_argument_values_setup_code(all_args, code) # --- optimised code when we receive keyword arguments code.putln("if (%s(%s)) {" % ( (self.num_required_kw_args > 0) and "likely" or "unlikely", Naming.kwds_cname)) self.generate_keyword_unpacking_code( min_positional_args, max_positional_args, has_fixed_positional_count, has_kw_only_args, all_args, argtuple_error_label, code) # --- optimised code when we do not receive any keyword arguments if (self.num_required_kw_args and min_positional_args > 0) or min_positional_args == max_positional_args: # Python raises arg tuple related errors first, so we must # check the length here if min_positional_args == max_positional_args and not self.star_arg: compare = '!=' else: compare = '<' code.putln('} else if (PyTuple_GET_SIZE(%s) %s %d) {' % ( Naming.args_cname, compare, min_positional_args)) code.put_goto(argtuple_error_label) if self.num_required_kw_args: # pure error case: keywords required but not passed if max_positional_args > min_positional_args and not self.star_arg: code.putln('} else if (PyTuple_GET_SIZE(%s) > %d) {' % ( Naming.args_cname, max_positional_args)) code.put_goto(argtuple_error_label) code.putln('} else {') for i, arg in enumerate(kw_only_args): if not arg.default: pystring_cname = code.intern_identifier(arg.name) # required keyword-only argument missing code.put('__Pyx_RaiseKeywordRequired("%s", %s); ' % ( self.name, pystring_cname)) code.putln(code.error_goto(self.pos)) break else: # optimised tuple unpacking code code.putln('} else {') if min_positional_args == max_positional_args: # parse the exact number of positional arguments from # the args tuple for i, arg in enumerate(positional_args): code.putln("values[%d] = PyTuple_GET_ITEM(%s, %d);" % (i, Naming.args_cname, i)) else: # parse the positional arguments from the variable length # args tuple and reject illegal argument tuple sizes code.putln('switch (PyTuple_GET_SIZE(%s)) {' % Naming.args_cname) if self.star_arg: code.putln('default:') reversed_args = list(enumerate(positional_args))[::-1] for i, arg in reversed_args: if i >= min_positional_args-1: code.put('case %2d: ' % (i+1)) code.putln("values[%d] = PyTuple_GET_ITEM(%s, %d);" % (i, Naming.args_cname, i)) if min_positional_args == 0: code.put('case 0: ') code.putln('break;') if self.star_arg: if min_positional_args: for i in range(min_positional_args-1, -1, -1): code.putln('case %2d:' % i) code.put_goto(argtuple_error_label) else: code.put('default: ') code.put_goto(argtuple_error_label) code.putln('}') code.putln('}') # end of the conditional unpacking blocks # Convert arg values to their final type and assign them. # Also inject non-Python default arguments, which do cannot # live in the values[] array. for i, arg in enumerate(all_args): self.generate_arg_assignment(arg, "values[%d]" % i, code) code.putln('}') # end of the whole argument unpacking block if code.label_used(argtuple_error_label): code.put_goto(success_label) code.put_label(argtuple_error_label) code.globalstate.use_utility_code( UtilityCode.load_cached("RaiseArgTupleInvalid", "FunctionArguments.c")) code.put('__Pyx_RaiseArgtupleInvalid("%s", %d, %d, %d, PyTuple_GET_SIZE(%s)); ' % ( self.name, has_fixed_positional_count, min_positional_args, max_positional_args, Naming.args_cname)) code.putln(code.error_goto(self.pos)) def generate_arg_assignment(self, arg, item, code): if arg.type.is_pyobject: # Python default arguments were already stored in 'item' at the very beginning if arg.is_generic: item = PyrexTypes.typecast(arg.type, PyrexTypes.py_object_type, item) entry = arg.entry code.putln("%s = %s;" % (entry.cname, item)) else: func = arg.type.from_py_function if func: if arg.default: # C-typed default arguments must be handled here code.putln('if (%s) {' % item) rhs = "%s(%s)" % (func, item) if arg.type.is_enum: rhs = arg.type.cast_code(rhs) code.putln("%s = %s; %s" % ( arg.entry.cname, rhs, code.error_goto_if(arg.type.error_condition(arg.entry.cname), arg.pos))) if arg.default: code.putln('} else {') code.putln( "%s = %s;" % ( arg.entry.cname, arg.calculate_default_value_code(code))) if arg.type.is_memoryviewslice: code.put_incref_memoryviewslice(arg.entry.cname, have_gil=True) code.putln('}') else: error(arg.pos, "Cannot convert Python object argument to type '%s'" % arg.type) def generate_stararg_init_code(self, max_positional_args, code): if self.starstar_arg: self.starstar_arg.entry.xdecref_cleanup = 0 code.putln('%s = PyDict_New(); if (unlikely(!%s)) return %s;' % ( self.starstar_arg.entry.cname, self.starstar_arg.entry.cname, self.error_value())) code.put_gotref(self.starstar_arg.entry.cname) if self.star_arg: self.star_arg.entry.xdecref_cleanup = 0 code.putln('if (PyTuple_GET_SIZE(%s) > %d) {' % ( Naming.args_cname, max_positional_args)) code.putln('%s = PyTuple_GetSlice(%s, %d, PyTuple_GET_SIZE(%s));' % ( self.star_arg.entry.cname, Naming.args_cname, max_positional_args, Naming.args_cname)) code.putln("if (unlikely(!%s)) {" % self.star_arg.entry.cname) if self.starstar_arg: code.put_decref_clear(self.starstar_arg.entry.cname, py_object_type) code.put_finish_refcount_context() code.putln('return %s;' % self.error_value()) code.putln('}') code.put_gotref(self.star_arg.entry.cname) code.putln('} else {') code.put("%s = %s; " % (self.star_arg.entry.cname, Naming.empty_tuple)) code.put_incref(Naming.empty_tuple, py_object_type) code.putln('}') def generate_argument_values_setup_code(self, args, code): max_args = len(args) # the 'values' array collects borrowed references to arguments # before doing any type coercion etc. code.putln("PyObject* values[%d] = {%s};" % ( max_args, ','.join('0'*max_args))) if self.target.defaults_struct: code.putln('%s *%s = __Pyx_CyFunction_Defaults(%s, %s);' % ( self.target.defaults_struct, Naming.dynamic_args_cname, self.target.defaults_struct, Naming.self_cname)) # assign borrowed Python default values to the values array, # so that they can be overwritten by received arguments below for i, arg in enumerate(args): if arg.default and arg.type.is_pyobject: default_value = arg.calculate_default_value_code(code) code.putln('values[%d] = %s;' % (i, arg.type.as_pyobject(default_value))) def generate_keyword_unpacking_code(self, min_positional_args, max_positional_args, has_fixed_positional_count, has_kw_only_args, all_args, argtuple_error_label, code): code.putln('Py_ssize_t kw_args;') code.putln('const Py_ssize_t pos_args = PyTuple_GET_SIZE(%s);' % Naming.args_cname) # copy the values from the args tuple and check that it's not too long code.putln('switch (pos_args) {') if self.star_arg: code.putln('default:') for i in range(max_positional_args-1, -1, -1): code.put('case %2d: ' % (i+1)) code.putln("values[%d] = PyTuple_GET_ITEM(%s, %d);" % ( i, Naming.args_cname, i)) code.putln('case 0: break;') if not self.star_arg: code.put('default: ') # more arguments than allowed code.put_goto(argtuple_error_label) code.putln('}') # The code above is very often (but not always) the same as # the optimised non-kwargs tuple unpacking code, so we keep # the code block above at the very top, before the following # 'external' PyDict_Size() call, to make it easy for the C # compiler to merge the two separate tuple unpacking # implementations into one when they turn out to be identical. # If we received kwargs, fill up the positional/required # arguments with values from the kw dict code.putln('kw_args = PyDict_Size(%s);' % Naming.kwds_cname) if self.num_required_args or max_positional_args > 0: last_required_arg = -1 for i, arg in enumerate(all_args): if not arg.default: last_required_arg = i if last_required_arg < max_positional_args: last_required_arg = max_positional_args-1 if max_positional_args > 0: code.putln('switch (pos_args) {') for i, arg in enumerate(all_args[:last_required_arg+1]): if max_positional_args > 0 and i <= max_positional_args: if self.star_arg and i == max_positional_args: code.putln('default:') else: code.putln('case %2d:' % i) pystring_cname = code.intern_identifier(arg.name) if arg.default: if arg.kw_only: # optional kw-only args are handled separately below continue code.putln('if (kw_args > 0) {') # don't overwrite default argument code.putln('PyObject* value = PyDict_GetItem(%s, %s);' % ( Naming.kwds_cname, pystring_cname)) code.putln('if (value) { values[%d] = value; kw_args--; }' % i) code.putln('}') else: code.putln('if (likely((values[%d] = PyDict_GetItem(%s, %s)) != 0)) kw_args--;' % ( i, Naming.kwds_cname, pystring_cname)) if i < min_positional_args: if i == 0: # special case: we know arg 0 is missing code.put('else ') code.put_goto(argtuple_error_label) else: # print the correct number of values (args or # kwargs) that were passed into positional # arguments up to this point code.putln('else {') code.globalstate.use_utility_code( UtilityCode.load_cached("RaiseArgTupleInvalid", "FunctionArguments.c")) code.put('__Pyx_RaiseArgtupleInvalid("%s", %d, %d, %d, %d); ' % ( self.name, has_fixed_positional_count, min_positional_args, max_positional_args, i)) code.putln(code.error_goto(self.pos)) code.putln('}') elif arg.kw_only: code.putln('else {') code.put('__Pyx_RaiseKeywordRequired("%s", %s); ' %( self.name, pystring_cname)) code.putln(code.error_goto(self.pos)) code.putln('}') if max_positional_args > 0: code.putln('}') if has_kw_only_args: # unpack optional keyword-only arguments separately because # checking for interned strings in a dict is faster than iterating self.generate_optional_kwonly_args_unpacking_code(all_args, code) code.putln('if (unlikely(kw_args > 0)) {') # non-positional/-required kw args left in dict: default args, # kw-only args, **kwargs or error # # This is sort of a catch-all: except for checking required # arguments, this will always do the right thing for unpacking # keyword arguments, so that we can concentrate on optimising # common cases above. if max_positional_args == 0: pos_arg_count = "0" elif self.star_arg: code.putln("const Py_ssize_t used_pos_args = (pos_args < %d) ? pos_args : %d;" % ( max_positional_args, max_positional_args)) pos_arg_count = "used_pos_args" else: pos_arg_count = "pos_args" code.globalstate.use_utility_code( UtilityCode.load_cached("ParseKeywords", "FunctionArguments.c")) code.putln( 'if (unlikely(__Pyx_ParseOptionalKeywords(%s, %s, %s, values, %s, "%s") < 0)) %s' % ( Naming.kwds_cname, Naming.pykwdlist_cname, self.starstar_arg and self.starstar_arg.entry.cname or '0', pos_arg_count, self.name, code.error_goto(self.pos))) code.putln('}') def generate_optional_kwonly_args_unpacking_code(self, all_args, code): optional_args = [] first_optional_arg = -1 for i, arg in enumerate(all_args): if not arg.kw_only or not arg.default: continue if not optional_args: first_optional_arg = i optional_args.append(arg.name) if optional_args: if len(optional_args) > 1: # if we receive more than the named kwargs, we either have **kwargs # (in which case we must iterate anyway) or it's an error (which we # also handle during iteration) => skip this part if there are more code.putln('if (kw_args > 0 && %s(kw_args <= %d)) {' % ( not self.starstar_arg and 'likely' or '', len(optional_args))) code.putln('Py_ssize_t index;') # not unrolling the loop here reduces the C code overhead code.putln('for (index = %d; index < %d && kw_args > 0; index++) {' % ( first_optional_arg, first_optional_arg + len(optional_args))) else: code.putln('if (kw_args == 1) {') code.putln('const Py_ssize_t index = %d;' % first_optional_arg) code.putln('PyObject* value = PyDict_GetItem(%s, *%s[index]);' % ( Naming.kwds_cname, Naming.pykwdlist_cname)) code.putln('if (value) { values[index] = value; kw_args--; }') if len(optional_args) > 1: code.putln('}') code.putln('}') def generate_argument_conversion_code(self, code): # Generate code to convert arguments from signature type to # declared type, if needed. Also copies signature arguments # into closure fields. for arg in self.args: if arg.needs_conversion: self.generate_arg_conversion(arg, code) def generate_arg_conversion(self, arg, code): # Generate conversion code for one argument. old_type = arg.hdr_type new_type = arg.type if old_type.is_pyobject: if arg.default: code.putln("if (%s) {" % arg.hdr_cname) else: code.putln("assert(%s); {" % arg.hdr_cname) self.generate_arg_conversion_from_pyobject(arg, code) code.putln("}") elif new_type.is_pyobject: self.generate_arg_conversion_to_pyobject(arg, code) else: if new_type.assignable_from(old_type): code.putln( "%s = %s;" % (arg.entry.cname, arg.hdr_cname)) else: error(arg.pos, "Cannot convert 1 argument from '%s' to '%s'" % (old_type, new_type)) def generate_arg_conversion_from_pyobject(self, arg, code): new_type = arg.type func = new_type.from_py_function # copied from CoerceFromPyTypeNode if func: lhs = arg.entry.cname rhs = "%s(%s)" % (func, arg.hdr_cname) if new_type.is_enum: rhs = PyrexTypes.typecast(new_type, PyrexTypes.c_long_type, rhs) code.putln("%s = %s; %s" % ( lhs, rhs, code.error_goto_if(new_type.error_condition(arg.entry.cname), arg.pos))) else: error(arg.pos, "Cannot convert Python object argument to type '%s'" % new_type) def generate_arg_conversion_to_pyobject(self, arg, code): old_type = arg.hdr_type func = old_type.to_py_function if func: code.putln("%s = %s(%s); %s" % ( arg.entry.cname, func, arg.hdr_cname, code.error_goto_if_null(arg.entry.cname, arg.pos))) code.put_var_gotref(arg.entry) else: error(arg.pos, "Cannot convert argument of type '%s' to Python object" % old_type) def generate_argument_type_tests(self, code): # Generate type tests for args whose signature # type is PyObject * and whose declared type is # a subtype thereof. for arg in self.args: if arg.needs_type_test: self.generate_arg_type_test(arg, code) elif not arg.accept_none and (arg.type.is_pyobject or arg.type.is_buffer or arg.type.is_memoryviewslice): self.generate_arg_none_check(arg, code) def error_value(self): return self.signature.error_value class GeneratorDefNode(DefNode): # Generator function node that creates a new generator instance when called. # # gbody GeneratorBodyDefNode the function implementing the generator # is_generator = True is_coroutine = False needs_closure = True child_attrs = DefNode.child_attrs + ["gbody"] def __init__(self, pos, **kwargs): # XXX: don't actually needs a body kwargs['body'] = StatListNode(pos, stats=[], is_terminator=True) super(GeneratorDefNode, self).__init__(pos, **kwargs) def analyse_declarations(self, env): super(GeneratorDefNode, self).analyse_declarations(env) self.gbody.local_scope = self.local_scope self.gbody.analyse_declarations(env) def generate_function_body(self, env, code): body_cname = self.gbody.entry.func_cname name = code.intern_identifier(self.name) qualname = code.intern_identifier(self.qualname) code.putln('{') code.putln('__pyx_CoroutineObject *gen = __Pyx_%s_New(' '(__pyx_coroutine_body_t) %s, (PyObject *) %s, %s, %s); %s' % ( 'Coroutine' if self.is_coroutine else 'Generator', body_cname, Naming.cur_scope_cname, name, qualname, code.error_goto_if_null('gen', self.pos))) code.put_decref(Naming.cur_scope_cname, py_object_type) if self.requires_classobj: classobj_cname = 'gen->classobj' code.putln('%s = __Pyx_CyFunction_GetClassObj(%s);' % ( classobj_cname, Naming.self_cname)) code.put_incref(classobj_cname, py_object_type) code.put_giveref(classobj_cname) code.put_finish_refcount_context() code.putln('return (PyObject *) gen;') code.putln('}') def generate_function_definitions(self, env, code): env.use_utility_code(UtilityCode.load_cached( 'Coroutine' if self.is_coroutine else 'Generator', "Coroutine.c")) self.gbody.generate_function_header(code, proto=True) super(GeneratorDefNode, self).generate_function_definitions(env, code) self.gbody.generate_function_definitions(env, code) class AsyncDefNode(GeneratorDefNode): is_coroutine = True class GeneratorBodyDefNode(DefNode): # Main code body of a generator implemented as a DefNode. # is_generator_body = True is_inlined = False inlined_comprehension_type = None # container type for inlined comprehensions def __init__(self, pos=None, name=None, body=None): super(GeneratorBodyDefNode, self).__init__( pos=pos, body=body, name=name, doc=None, args=[], star_arg=None, starstar_arg=None) def declare_generator_body(self, env): prefix = env.next_id(env.scope_prefix) name = env.next_id('generator') cname = Naming.genbody_prefix + prefix + name entry = env.declare_var(None, py_object_type, self.pos, cname=cname, visibility='private') entry.func_cname = cname entry.qualified_name = EncodedString(self.name) self.entry = entry def analyse_declarations(self, env): self.analyse_argument_types(env) self.declare_generator_body(env) def generate_function_header(self, code, proto=False): header = "static PyObject *%s(__pyx_CoroutineObject *%s, PyObject *%s)" % ( self.entry.func_cname, Naming.generator_cname, Naming.sent_value_cname) if proto: code.putln('%s; /* proto */' % header) else: code.putln('%s /* generator body */\n{' % header) def generate_function_definitions(self, env, code): lenv = self.local_scope # Generate closure function definitions self.body.generate_function_definitions(lenv, code) # Generate C code for header and body of function code.enter_cfunc_scope() code.return_from_error_cleanup_label = code.new_label() # ----- Top-level constants used by this function code.mark_pos(self.pos) self.generate_cached_builtins_decls(lenv, code) # ----- Function header code.putln("") self.generate_function_header(code) closure_init_code = code.insertion_point() # ----- Local variables code.putln("PyObject *%s = NULL;" % Naming.retval_cname) tempvardecl_code = code.insertion_point() code.put_declare_refcount_context() code.put_setup_refcount_context(self.entry.name) # ----- Resume switch point. code.funcstate.init_closure_temps(lenv.scope_class.type.scope) resume_code = code.insertion_point() first_run_label = code.new_label('first_run') code.use_label(first_run_label) code.put_label(first_run_label) code.putln('%s' % (code.error_goto_if_null(Naming.sent_value_cname, self.pos))) # ----- prepare target container for inlined comprehension if self.is_inlined and self.inlined_comprehension_type is not None: target_type = self.inlined_comprehension_type if target_type is Builtin.list_type: comp_init = 'PyList_New(0)' elif target_type is Builtin.set_type: comp_init = 'PySet_New(NULL)' elif target_type is Builtin.dict_type: comp_init = 'PyDict_New()' else: raise InternalError( "invalid type of inlined comprehension: %s" % target_type) code.putln("%s = %s; %s" % ( Naming.retval_cname, comp_init, code.error_goto_if_null(Naming.retval_cname, self.pos))) code.put_gotref(Naming.retval_cname) # ----- Function body self.generate_function_body(env, code) # ----- Closure initialization if lenv.scope_class.type.scope.entries: closure_init_code.putln('%s = %s;' % ( lenv.scope_class.type.declaration_code(Naming.cur_scope_cname), lenv.scope_class.type.cast_code('%s->closure' % Naming.generator_cname))) code.mark_pos(self.pos) code.putln("") code.putln("/* function exit code */") # on normal generator termination, we do not take the exception propagation # path: no traceback info is required and not creating it is much faster if not self.is_inlined and not self.body.is_terminator: code.putln('PyErr_SetNone(PyExc_StopIteration);') # ----- Error cleanup if code.error_label in code.labels_used: if not self.body.is_terminator: code.put_goto(code.return_label) code.put_label(code.error_label) if self.is_inlined and self.inlined_comprehension_type is not None: code.put_xdecref_clear(Naming.retval_cname, py_object_type) if Future.generator_stop in env.global_scope().context.future_directives: # PEP 479: turn accidental StopIteration exceptions into a RuntimeError code.globalstate.use_utility_code(UtilityCode.load_cached("pep479", "Coroutine.c")) code.putln("if (unlikely(PyErr_ExceptionMatches(PyExc_StopIteration))) " "__Pyx_Generator_Replace_StopIteration();") for cname, type in code.funcstate.all_managed_temps(): code.put_xdecref(cname, type) code.put_add_traceback(self.entry.qualified_name) # ----- Non-error return cleanup code.put_label(code.return_label) if self.is_inlined: code.put_xgiveref(Naming.retval_cname) else: code.put_xdecref_clear(Naming.retval_cname, py_object_type) code.putln('%s->resume_label = -1;' % Naming.generator_cname) # clean up as early as possible to help breaking any reference cycles code.putln('__Pyx_Coroutine_clear((PyObject*)%s);' % Naming.generator_cname) code.put_finish_refcount_context() code.putln("return %s;" % Naming.retval_cname) code.putln("}") # ----- Go back and insert temp variable declarations tempvardecl_code.put_temp_declarations(code.funcstate) # ----- Generator resume code resume_code.putln("switch (%s->resume_label) {" % ( Naming.generator_cname)) resume_code.putln("case 0: goto %s;" % first_run_label) for i, label in code.yield_labels: resume_code.putln("case %d: goto %s;" % (i, label)) resume_code.putln("default: /* CPython raises the right error here */") resume_code.put_finish_refcount_context() resume_code.putln("return NULL;") resume_code.putln("}") code.exit_cfunc_scope() class OverrideCheckNode(StatNode): # A Node for dispatching to the def method if it # is overriden. # # py_func # # args # func_temp # body child_attrs = ['body'] body = None def analyse_expressions(self, env): self.args = env.arg_entries if self.py_func.is_module_scope: first_arg = 0 else: first_arg = 1 from . import ExprNodes self.func_node = ExprNodes.RawCNameExprNode(self.pos, py_object_type) call_node = ExprNodes.SimpleCallNode( self.pos, function=self.func_node, args=[ ExprNodes.NameNode(self.pos, name=arg.name) for arg in self.args[first_arg:] ]) if env.return_type.is_void or env.return_type.is_returncode: self.body = StatListNode(self.pos, stats=[ ExprStatNode(self.pos, expr=call_node), ReturnStatNode(self.pos, value=None)]) else: self.body = ReturnStatNode(self.pos, value=call_node) self.body = self.body.analyse_expressions(env) return self def generate_execution_code(self, code): interned_attr_cname = code.intern_identifier(self.py_func.entry.name) # Check to see if we are an extension type if self.py_func.is_module_scope: self_arg = "((PyObject *)%s)" % Naming.module_cname else: self_arg = "((PyObject *)%s)" % self.args[0].cname code.putln("/* Check if called by wrapper */") code.putln("if (unlikely(%s)) ;" % Naming.skip_dispatch_cname) code.putln("/* Check if overridden in Python */") if self.py_func.is_module_scope: code.putln("else {") else: code.putln("else if (unlikely(Py_TYPE(%s)->tp_dictoffset != 0)) {" % self_arg) func_node_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True) self.func_node.set_cname(func_node_temp) # need to get attribute manually--scope would return cdef method code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectGetAttrStr", "ObjectHandling.c")) err = code.error_goto_if_null(func_node_temp, self.pos) code.putln("%s = __Pyx_PyObject_GetAttrStr(%s, %s); %s" % ( func_node_temp, self_arg, interned_attr_cname, err)) code.put_gotref(func_node_temp) is_builtin_function_or_method = "PyCFunction_Check(%s)" % func_node_temp is_overridden = "(PyCFunction_GET_FUNCTION(%s) != (PyCFunction)%s)" % ( func_node_temp, self.py_func.entry.func_cname) code.putln("if (!%s || %s) {" % (is_builtin_function_or_method, is_overridden)) self.body.generate_execution_code(code) code.putln("}") code.put_decref_clear(func_node_temp, PyrexTypes.py_object_type) code.funcstate.release_temp(func_node_temp) code.putln("}") class ClassDefNode(StatNode, BlockNode): pass class PyClassDefNode(ClassDefNode): # A Python class definition. # # name EncodedString Name of the class # doc string or None # body StatNode Attribute definition code # entry Symtab.Entry # scope PyClassScope # decorators [DecoratorNode] list of decorators or None # # The following subnodes are constructed internally: # # dict DictNode Class dictionary or Py3 namespace # classobj ClassNode Class object # target NameNode Variable to assign class object to child_attrs = ["body", "dict", "metaclass", "mkw", "bases", "class_result", "target", "class_cell", "decorators"] decorators = None class_result = None is_py3_style_class = False # Python3 style class (kwargs) metaclass = None mkw = None def __init__(self, pos, name, bases, doc, body, decorators=None, keyword_args=None, force_py3_semantics=False): StatNode.__init__(self, pos) self.name = name self.doc = doc self.body = body self.decorators = decorators self.bases = bases from . import ExprNodes if self.doc and Options.docstrings: doc = embed_position(self.pos, self.doc) doc_node = ExprNodes.StringNode(pos, value=doc) else: doc_node = None allow_py2_metaclass = not force_py3_semantics if keyword_args: allow_py2_metaclass = False self.is_py3_style_class = True if keyword_args.is_dict_literal: if keyword_args.key_value_pairs: for i, item in list(enumerate(keyword_args.key_value_pairs))[::-1]: if item.key.value == 'metaclass': if self.metaclass is not None: error(item.pos, "keyword argument 'metaclass' passed multiple times") # special case: we already know the metaclass, # so we don't need to do the "build kwargs, # find metaclass" dance at runtime self.metaclass = item.value del keyword_args.key_value_pairs[i] self.mkw = keyword_args else: assert self.metaclass is not None else: # MergedDictNode self.mkw = ExprNodes.ProxyNode(keyword_args) if force_py3_semantics or self.bases or self.mkw or self.metaclass: if self.metaclass is None: if keyword_args and not keyword_args.is_dict_literal: # **kwargs may contain 'metaclass' arg mkdict = self.mkw else: mkdict = None if (not mkdict and self.bases.is_sequence_constructor and not self.bases.args): pass # no base classes => no inherited metaclass else: self.metaclass = ExprNodes.PyClassMetaclassNode( pos, mkw=mkdict, bases=self.bases) needs_metaclass_calculation = False else: needs_metaclass_calculation = True self.dict = ExprNodes.PyClassNamespaceNode( pos, name=name, doc=doc_node, metaclass=self.metaclass, bases=self.bases, mkw=self.mkw) self.classobj = ExprNodes.Py3ClassNode( pos, name=name, bases=self.bases, dict=self.dict, doc=doc_node, metaclass=self.metaclass, mkw=self.mkw, calculate_metaclass=needs_metaclass_calculation, allow_py2_metaclass=allow_py2_metaclass) else: # no bases, no metaclass => old style class creation self.dict = ExprNodes.DictNode(pos, key_value_pairs=[]) self.classobj = ExprNodes.ClassNode( pos, name=name, bases=bases, dict=self.dict, doc=doc_node) self.target = ExprNodes.NameNode(pos, name=name) self.class_cell = ExprNodes.ClassCellInjectorNode(self.pos) def as_cclass(self): """ Return this node as if it were declared as an extension class """ if self.is_py3_style_class: error(self.classobj.pos, "Python3 style class could not be represented as C class") return bases = self.classobj.bases.args if len(bases) == 0: base_class_name = None base_class_module = None elif len(bases) == 1: base = bases[0] path = [] from .ExprNodes import AttributeNode, NameNode while isinstance(base, AttributeNode): path.insert(0, base.attribute) base = base.obj if isinstance(base, NameNode): path.insert(0, base.name) base_class_name = path[-1] if len(path) > 1: base_class_module = u'.'.join(path[:-1]) else: base_class_module = None else: error(self.classobj.bases.args.pos, "Invalid base class") else: error(self.classobj.bases.args.pos, "C class may only have one base class") return None return CClassDefNode(self.pos, visibility = 'private', module_name = None, class_name = self.name, base_class_module = base_class_module, base_class_name = base_class_name, decorators = self.decorators, body = self.body, in_pxd = False, doc = self.doc) def create_scope(self, env): genv = env while genv.is_py_class_scope or genv.is_c_class_scope: genv = genv.outer_scope cenv = self.scope = PyClassScope(name = self.name, outer_scope = genv) return cenv def analyse_declarations(self, env): class_result = self.classobj if self.decorators: from .ExprNodes import SimpleCallNode for decorator in self.decorators[::-1]: class_result = SimpleCallNode( decorator.pos, function = decorator.decorator, args = [class_result]) self.decorators = None self.class_result = class_result self.class_result.analyse_declarations(env) self.target.analyse_target_declaration(env) cenv = self.create_scope(env) cenv.directives = env.directives cenv.class_obj_cname = self.target.entry.cname self.body.analyse_declarations(cenv) def analyse_expressions(self, env): if self.bases: self.bases = self.bases.analyse_expressions(env) if self.metaclass: self.metaclass = self.metaclass.analyse_expressions(env) if self.mkw: self.mkw = self.mkw.analyse_expressions(env) self.dict = self.dict.analyse_expressions(env) self.class_result = self.class_result.analyse_expressions(env) genv = env.global_scope() cenv = self.scope self.body = self.body.analyse_expressions(cenv) self.target.analyse_target_expression(env, self.classobj) self.class_cell = self.class_cell.analyse_expressions(cenv) return self def generate_function_definitions(self, env, code): self.generate_lambda_definitions(self.scope, code) self.body.generate_function_definitions(self.scope, code) def generate_execution_code(self, code): code.mark_pos(self.pos) code.pyclass_stack.append(self) cenv = self.scope if self.bases: self.bases.generate_evaluation_code(code) if self.mkw: self.mkw.generate_evaluation_code(code) if self.metaclass: self.metaclass.generate_evaluation_code(code) self.dict.generate_evaluation_code(code) cenv.namespace_cname = cenv.class_obj_cname = self.dict.result() self.class_cell.generate_evaluation_code(code) self.body.generate_execution_code(code) self.class_result.generate_evaluation_code(code) self.class_cell.generate_injection_code( code, self.class_result.result()) self.class_cell.generate_disposal_code(code) cenv.namespace_cname = cenv.class_obj_cname = self.classobj.result() self.target.generate_assignment_code(self.class_result, code) self.dict.generate_disposal_code(code) self.dict.free_temps(code) if self.metaclass: self.metaclass.generate_disposal_code(code) self.metaclass.free_temps(code) if self.mkw: self.mkw.generate_disposal_code(code) self.mkw.free_temps(code) if self.bases: self.bases.generate_disposal_code(code) self.bases.free_temps(code) code.pyclass_stack.pop() class CClassDefNode(ClassDefNode): # An extension type definition. # # visibility 'private' or 'public' or 'extern' # typedef_flag boolean # api boolean # module_name string or None For import of extern type objects # class_name string Unqualified name of class # as_name string or None Name to declare as in this scope # base_class_module string or None Module containing the base class # base_class_name string or None Name of the base class # objstruct_name string or None Specified C name of object struct # typeobj_name string or None Specified C name of type object # in_pxd boolean Is in a .pxd file # decorators [DecoratorNode] list of decorators or None # doc string or None # body StatNode or None # entry Symtab.Entry # base_type PyExtensionType or None # buffer_defaults_node DictNode or None Declares defaults for a buffer # buffer_defaults_pos child_attrs = ["body"] buffer_defaults_node = None buffer_defaults_pos = None typedef_flag = False api = False objstruct_name = None typeobj_name = None decorators = None shadow = False def buffer_defaults(self, env): if not hasattr(self, '_buffer_defaults'): from . import Buffer if self.buffer_defaults_node: self._buffer_defaults = Buffer.analyse_buffer_options( self.buffer_defaults_pos, env, [], self.buffer_defaults_node, need_complete=False) else: self._buffer_defaults = None return self._buffer_defaults def declare(self, env): if self.module_name and self.visibility != 'extern': module_path = self.module_name.split(".") home_scope = env.find_imported_module(module_path, self.pos) if not home_scope: return None else: home_scope = env self.entry = home_scope.declare_c_class( name = self.class_name, pos = self.pos, defining = 0, implementing = 0, module_name = self.module_name, base_type = None, objstruct_cname = self.objstruct_name, typeobj_cname = self.typeobj_name, visibility = self.visibility, typedef_flag = self.typedef_flag, api = self.api, buffer_defaults = self.buffer_defaults(env), shadow = self.shadow) def analyse_declarations(self, env): #print "CClassDefNode.analyse_declarations:", self.class_name #print "...visibility =", self.visibility #print "...module_name =", self.module_name if env.in_cinclude and not self.objstruct_name: error(self.pos, "Object struct name specification required for " "C class defined in 'extern from' block") if self.decorators: error(self.pos, "Decorators not allowed on cdef classes (used on type '%s')" % self.class_name) self.base_type = None # Now that module imports are cached, we need to # import the modules for extern classes. if self.module_name: self.module = None for module in env.cimported_modules: if module.name == self.module_name: self.module = module if self.module is None: self.module = ModuleScope(self.module_name, None, env.context) self.module.has_extern_class = 1 env.add_imported_module(self.module) if self.base_class_name: if self.base_class_module: base_class_scope = env.find_module(self.base_class_module, self.pos) else: base_class_scope = env if self.base_class_name == 'object': # extension classes are special and don't need to inherit from object if base_class_scope is None or base_class_scope.lookup('object') is None: self.base_class_name = None self.base_class_module = None base_class_scope = None if base_class_scope: base_class_entry = base_class_scope.find(self.base_class_name, self.pos) if base_class_entry: if not base_class_entry.is_type: error(self.pos, "'%s' is not a type name" % self.base_class_name) elif not base_class_entry.type.is_extension_type and \ not (base_class_entry.type.is_builtin_type and base_class_entry.type.objstruct_cname): error(self.pos, "'%s' is not an extension type" % self.base_class_name) elif not base_class_entry.type.is_complete(): error(self.pos, "Base class '%s' of type '%s' is incomplete" % ( self.base_class_name, self.class_name)) elif base_class_entry.type.scope and base_class_entry.type.scope.directives and \ base_class_entry.type.is_final_type: error(self.pos, "Base class '%s' of type '%s' is final" % ( self.base_class_name, self.class_name)) elif base_class_entry.type.is_builtin_type and \ base_class_entry.type.name in ('tuple', 'str', 'bytes'): error(self.pos, "inheritance from PyVarObject types like '%s' is not currently supported" % base_class_entry.type.name) else: self.base_type = base_class_entry.type if env.directives.get('freelist', 0) > 0: warning(self.pos, "freelists cannot be used on subtypes, only the base class can manage them", 1) has_body = self.body is not None if has_body and self.base_type and not self.base_type.scope: # To properly initialize inherited attributes, the base type must # be analysed before this type. self.base_type.defered_declarations.append(lambda : self.analyse_declarations(env)) return if self.module_name and self.visibility != 'extern': module_path = self.module_name.split(".") home_scope = env.find_imported_module(module_path, self.pos) if not home_scope: return else: home_scope = env if self.visibility == 'extern': if (self.module_name == '__builtin__' and self.class_name in Builtin.builtin_types and env.qualified_name[:8] != 'cpython.'): # allow overloaded names for cimporting from cpython warning(self.pos, "%s already a builtin Cython type" % self.class_name, 1) self.entry = home_scope.declare_c_class( name = self.class_name, pos = self.pos, defining = has_body and self.in_pxd, implementing = has_body and not self.in_pxd, module_name = self.module_name, base_type = self.base_type, objstruct_cname = self.objstruct_name, typeobj_cname = self.typeobj_name, visibility = self.visibility, typedef_flag = self.typedef_flag, api = self.api, buffer_defaults = self.buffer_defaults(env), shadow = self.shadow) if self.shadow: home_scope.lookup(self.class_name).as_variable = self.entry if home_scope is not env and self.visibility == 'extern': env.add_imported_entry(self.class_name, self.entry, self.pos) self.scope = scope = self.entry.type.scope if scope is not None: scope.directives = env.directives if self.doc and Options.docstrings: scope.doc = embed_position(self.pos, self.doc) if has_body: self.body.analyse_declarations(scope) if self.in_pxd: scope.defined = 1 else: scope.implemented = 1 env.allocate_vtable_names(self.entry) for thunk in self.entry.type.defered_declarations: thunk() def analyse_expressions(self, env): if self.body: scope = self.entry.type.scope self.body = self.body.analyse_expressions(scope) return self def generate_function_definitions(self, env, code): if self.body: self.generate_lambda_definitions(self.scope, code) self.body.generate_function_definitions(self.scope, code) def generate_execution_code(self, code): # This is needed to generate evaluation code for # default values of method arguments. code.mark_pos(self.pos) if self.body: self.body.generate_execution_code(code) def annotate(self, code): if self.body: self.body.annotate(code) class PropertyNode(StatNode): # Definition of a property in an extension type. # # name string # doc EncodedString or None Doc string # entry Symtab.Entry # body StatListNode child_attrs = ["body"] def analyse_declarations(self, env): self.entry = env.declare_property(self.name, self.doc, self.pos) self.entry.scope.directives = env.directives self.body.analyse_declarations(self.entry.scope) def analyse_expressions(self, env): self.body = self.body.analyse_expressions(env) return self def generate_function_definitions(self, env, code): self.body.generate_function_definitions(env, code) def generate_execution_code(self, code): pass def annotate(self, code): self.body.annotate(code) class GlobalNode(StatNode): # Global variable declaration. # # names [string] child_attrs = [] def analyse_declarations(self, env): for name in self.names: env.declare_global(name, self.pos) def analyse_expressions(self, env): return self def generate_execution_code(self, code): pass class NonlocalNode(StatNode): # Nonlocal variable declaration via the 'nonlocal' keyword. # # names [string] child_attrs = [] def analyse_declarations(self, env): for name in self.names: env.declare_nonlocal(name, self.pos) def analyse_expressions(self, env): return self def generate_execution_code(self, code): pass class ExprStatNode(StatNode): # Expression used as a statement. # # expr ExprNode child_attrs = ["expr"] def analyse_declarations(self, env): from . import ExprNodes if isinstance(self.expr, ExprNodes.GeneralCallNode): func = self.expr.function.as_cython_attribute() if func == u'declare': args, kwds = self.expr.explicit_args_kwds() if len(args): error(self.expr.pos, "Variable names must be specified.") for var, type_node in kwds.key_value_pairs: type = type_node.analyse_as_type(env) if type is None: error(type_node.pos, "Unknown type") else: env.declare_var(var.value, type, var.pos, is_cdef = True) self.__class__ = PassStatNode def analyse_expressions(self, env): self.expr.result_is_used = False # hint that .result() may safely be left empty self.expr = self.expr.analyse_expressions(env) return self def nogil_check(self, env): if self.expr.type.is_pyobject and self.expr.is_temp: self.gil_error() gil_message = "Discarding owned Python object" def generate_execution_code(self, code): code.mark_pos(self.pos) self.expr.generate_evaluation_code(code) if not self.expr.is_temp and self.expr.result(): code.putln("%s;" % self.expr.result()) self.expr.generate_disposal_code(code) self.expr.free_temps(code) def generate_function_definitions(self, env, code): self.expr.generate_function_definitions(env, code) def annotate(self, code): self.expr.annotate(code) class AssignmentNode(StatNode): # Abstract base class for assignment nodes. # # The analyse_expressions and generate_execution_code # phases of assignments are split into two sub-phases # each, to enable all the right hand sides of a # parallel assignment to be evaluated before assigning # to any of the left hand sides. def analyse_expressions(self, env): node = self.analyse_types(env) if isinstance(node, AssignmentNode) and not isinstance(node, ParallelAssignmentNode): if node.rhs.type.is_ptr and node.rhs.is_ephemeral(): error(self.pos, "Storing unsafe C derivative of temporary Python reference") return node # def analyse_expressions(self, env): # self.analyse_expressions_1(env) # self.analyse_expressions_2(env) def generate_execution_code(self, code): code.mark_pos(self.pos) self.generate_rhs_evaluation_code(code) self.generate_assignment_code(code) class SingleAssignmentNode(AssignmentNode): # The simplest case: # # a = b # # lhs ExprNode Left hand side # rhs ExprNode Right hand side # first bool Is this guaranteed the first assignment to lhs? # is_overloaded_assignment bool Is this assignment done via an overloaded operator= child_attrs = ["lhs", "rhs"] first = False is_overloaded_assignment = False declaration_only = False def analyse_declarations(self, env): from . import ExprNodes # handle declarations of the form x = cython.foo() if isinstance(self.rhs, ExprNodes.CallNode): func_name = self.rhs.function.as_cython_attribute() if func_name: args, kwds = self.rhs.explicit_args_kwds() if func_name in ['declare', 'typedef']: if len(args) > 2 or kwds is not None: error(self.rhs.pos, "Can only declare one type at a time.") return type = args[0].analyse_as_type(env) if type is None: error(args[0].pos, "Unknown type") return lhs = self.lhs if func_name == 'declare': if isinstance(lhs, ExprNodes.NameNode): vars = [(lhs.name, lhs.pos)] elif isinstance(lhs, ExprNodes.TupleNode): vars = [(var.name, var.pos) for var in lhs.args] else: error(lhs.pos, "Invalid declaration") return for var, pos in vars: env.declare_var(var, type, pos, is_cdef = True) if len(args) == 2: # we have a value self.rhs = args[1] else: self.declaration_only = True else: self.declaration_only = True if not isinstance(lhs, ExprNodes.NameNode): error(lhs.pos, "Invalid declaration.") env.declare_typedef(lhs.name, type, self.pos, visibility='private') elif func_name in ['struct', 'union']: self.declaration_only = True if len(args) > 0 or kwds is None: error(self.rhs.pos, "Struct or union members must be given by name.") return members = [] for member, type_node in kwds.key_value_pairs: type = type_node.analyse_as_type(env) if type is None: error(type_node.pos, "Unknown type") else: members.append((member.value, type, member.pos)) if len(members) < len(kwds.key_value_pairs): return if not isinstance(self.lhs, ExprNodes.NameNode): error(self.lhs.pos, "Invalid declaration.") name = self.lhs.name scope = StructOrUnionScope(name) env.declare_struct_or_union(name, func_name, scope, False, self.rhs.pos) for member, type, pos in members: scope.declare_var(member, type, pos) elif func_name == 'fused_type': # dtype = cython.fused_type(...) self.declaration_only = True if kwds: error(self.rhs.function.pos, "fused_type does not take keyword arguments") fusednode = FusedTypeNode(self.rhs.pos, name=self.lhs.name, types=args) fusednode.analyse_declarations(env) if self.declaration_only: return else: self.lhs.analyse_target_declaration(env) def analyse_types(self, env, use_temp=0): from . import ExprNodes self.rhs = self.rhs.analyse_types(env) unrolled_assignment = self.unroll_rhs(env) if unrolled_assignment: return unrolled_assignment self.lhs = self.lhs.analyse_target_types(env) self.lhs.gil_assignment_check(env) unrolled_assignment = self.unroll_lhs(env) if unrolled_assignment: return unrolled_assignment if self.lhs.memslice_broadcast or self.rhs.memslice_broadcast: self.lhs.memslice_broadcast = True self.rhs.memslice_broadcast = True if (self.lhs.is_subscript and not self.rhs.type.is_memoryviewslice and (self.lhs.memslice_slice or self.lhs.is_memslice_copy) and (self.lhs.type.dtype.assignable_from(self.rhs.type) or self.rhs.type.is_pyobject)): # scalar slice assignment self.lhs.is_memslice_scalar_assignment = True dtype = self.lhs.type.dtype elif self.lhs.type.is_array: if not isinstance(self.lhs, ExprNodes.SliceIndexNode): # cannot assign to C array, only to its full slice self.lhs = ExprNodes.SliceIndexNode( self.lhs.pos, base=self.lhs, start=None, stop=None) self.lhs = self.lhs.analyse_target_types(env) dtype = self.lhs.type else: dtype = self.lhs.type if self.lhs.type.is_cpp_class: op = env.lookup_operator_for_types(self.pos, '=', [self.lhs.type, self.rhs.type]) if op: rhs = self.rhs self.is_overloaded_assignment = 1 else: rhs = self.rhs.coerce_to(dtype, env) else: rhs = self.rhs.coerce_to(dtype, env) if use_temp or rhs.is_attribute or ( not rhs.is_name and not rhs.is_literal and rhs.type.is_pyobject): # things like (cdef) attribute access are not safe (traverses pointers) rhs = rhs.coerce_to_temp(env) elif rhs.type.is_pyobject: rhs = rhs.coerce_to_simple(env) self.rhs = rhs return self def unroll(self, node, target_size, env): from . import ExprNodes, UtilNodes base = node start_node = stop_node = step_node = check_node = None if node.type.is_ctuple: slice_size = node.type.size elif node.type.is_ptr or node.type.is_array: while isinstance(node, ExprNodes.SliceIndexNode) and not (node.start or node.stop): base = node = node.base if isinstance(node, ExprNodes.SliceIndexNode): base = node.base start_node = node.start if start_node: start_node = start_node.coerce_to(PyrexTypes.c_py_ssize_t_type, env) stop_node = node.stop if stop_node: stop_node = stop_node.coerce_to(PyrexTypes.c_py_ssize_t_type, env) else: if node.type.is_array and node.type.size: stop_node = ExprNodes.IntNode( self.pos, value=str(node.type.size), constant_result=(node.type.size if isinstance(node.type.size, (int, long)) else ExprNodes.constant_value_not_set)) else: error(self.pos, "C array iteration requires known end index") return step_node = None #node.step if step_node: step_node = step_node.coerce_to(PyrexTypes.c_py_ssize_t_type, env) # TODO: Factor out SliceIndexNode.generate_slice_guard_code() for use here. def get_const(node, none_value): if node is None: return none_value elif node.has_constant_result(): return node.constant_result else: raise ValueError("Not a constant.") try: slice_size = (get_const(stop_node, None) - get_const(start_node, 0)) / get_const(step_node, 1) except ValueError: error(self.pos, "C array assignment currently requires known endpoints") return elif node.type.is_array: slice_size = node.type.size if not isinstance(slice_size, (int, long)): return # might still work when coercing to Python else: return else: return if slice_size != target_size: error(self.pos, "Assignment to/from slice of wrong length, expected %s, got %s" % ( slice_size, target_size)) return items = [] base = UtilNodes.LetRefNode(base) refs = [base] if start_node and not start_node.is_literal: start_node = UtilNodes.LetRefNode(start_node) refs.append(start_node) if stop_node and not stop_node.is_literal: stop_node = UtilNodes.LetRefNode(stop_node) refs.append(stop_node) if step_node and not step_node.is_literal: step_node = UtilNodes.LetRefNode(step_node) refs.append(step_node) for ix in range(target_size): ix_node = ExprNodes.IntNode(self.pos, value=str(ix), constant_result=ix, type=PyrexTypes.c_py_ssize_t_type) if step_node is not None: if step_node.has_constant_result(): step_value = ix_node.constant_result * step_node.constant_result ix_node = ExprNodes.IntNode(self.pos, value=str(step_value), constant_result=step_value) else: ix_node = ExprNodes.MulNode(self.pos, operator='*', operand1=step_node, operand2=ix_node) if start_node is not None: if start_node.has_constant_result() and ix_node.has_constant_result(): index_value = ix_node.constant_result + start_node.constant_result ix_node = ExprNodes.IntNode(self.pos, value=str(index_value), constant_result=index_value) else: ix_node = ExprNodes.AddNode( self.pos, operator='+', operand1=start_node, operand2=ix_node) items.append(ExprNodes.IndexNode(self.pos, base=base, index=ix_node.analyse_types(env))) return check_node, refs, items def unroll_assignments(self, refs, check_node, lhs_list, rhs_list, env): from . import UtilNodes assignments = [] for lhs, rhs in zip(lhs_list, rhs_list): assignments.append(SingleAssignmentNode(self.pos, lhs=lhs, rhs=rhs, first=self.first)) all = ParallelAssignmentNode(pos=self.pos, stats=assignments).analyse_expressions(env) if check_node: all = StatListNode(pos=self.pos, stats=[check_node, all]) for ref in refs[::-1]: all = UtilNodes.LetNode(ref, all) return all def unroll_rhs(self, env): from . import ExprNodes if not isinstance(self.lhs, ExprNodes.TupleNode): return if any(arg.is_starred for arg in self.lhs.args): return unrolled = self.unroll(self.rhs, len(self.lhs.args), env) if not unrolled: return check_node, refs, rhs = unrolled return self.unroll_assignments(refs, check_node, self.lhs.args, rhs, env) def unroll_lhs(self, env): if self.lhs.type.is_ctuple: # Handled directly. return from . import ExprNodes, UtilNodes if not isinstance(self.rhs, ExprNodes.TupleNode): return unrolled = self.unroll(self.lhs, len(self.rhs.args), env) if not unrolled: return check_node, refs, lhs = unrolled return self.unroll_assignments(refs, check_node, lhs, self.rhs.args, env) def generate_rhs_evaluation_code(self, code): self.rhs.generate_evaluation_code(code) def generate_assignment_code(self, code): if self.is_overloaded_assignment: self.lhs.generate_assignment_code(self.rhs, code, overloaded_assignment=True) else: self.lhs.generate_assignment_code(self.rhs, code) def generate_function_definitions(self, env, code): self.rhs.generate_function_definitions(env, code) def annotate(self, code): self.lhs.annotate(code) self.rhs.annotate(code) class CascadedAssignmentNode(AssignmentNode): # An assignment with multiple left hand sides: # # a = b = c # # lhs_list [ExprNode] Left hand sides # rhs ExprNode Right hand sides # # Used internally: # # coerced_values [ExprNode] RHS coerced to all distinct LHS types # cloned_values [ExprNode] cloned RHS value for each LHS # assignment_overloads [Bool] If each assignment uses a C++ operator= child_attrs = ["lhs_list", "rhs", "coerced_values", "cloned_values"] cloned_values = None coerced_values = None assignment_overloads = None def analyse_declarations(self, env): for lhs in self.lhs_list: lhs.analyse_target_declaration(env) def analyse_types(self, env, use_temp=0): from .ExprNodes import CloneNode, ProxyNode # collect distinct types used on the LHS lhs_types = set() for lhs in self.lhs_list: lhs.analyse_target_types(env) lhs.gil_assignment_check(env) lhs_types.add(lhs.type) rhs = self.rhs.analyse_types(env) # common special case: only one type needed on the LHS => coerce only once if len(lhs_types) == 1: # Avoid coercion for overloaded assignment operators. if next(iter(lhs_types)).is_cpp_class: op = env.lookup_operator('=', [lhs, self.rhs]) if not op: rhs = rhs.coerce_to(lhs_types.pop(), env) else: rhs = rhs.coerce_to(lhs_types.pop(), env) if not rhs.is_name and not rhs.is_literal and ( use_temp or rhs.is_attribute or rhs.type.is_pyobject): rhs = rhs.coerce_to_temp(env) else: rhs = rhs.coerce_to_simple(env) self.rhs = ProxyNode(rhs) if rhs.is_temp else rhs # clone RHS and coerce it to all distinct LHS types self.coerced_values = [] coerced_values = {} self.assignment_overloads = [] for lhs in self.lhs_list: overloaded = False if lhs.type.is_cpp_class: op = env.lookup_operator('=', [lhs, self.rhs]) if op: rhs = self.rhs self.assignment_overloads.append(True) overloaded = True else: self.assignment_overloads.append(False) else: self.assignment_overloads.append(False) if lhs.type not in coerced_values and lhs.type != rhs.type: if not overloaded: rhs = CloneNode(self.rhs).coerce_to(lhs.type, env) self.coerced_values.append(rhs) coerced_values[lhs.type] = rhs else: self.assignment_overloads.append(False) # clone coerced values for all LHS assignments self.cloned_values = [] for lhs in self.lhs_list: rhs = coerced_values.get(lhs.type, self.rhs) self.cloned_values.append(CloneNode(rhs)) return self def generate_rhs_evaluation_code(self, code): self.rhs.generate_evaluation_code(code) def generate_assignment_code(self, code): # prepare all coercions for rhs in self.coerced_values: rhs.generate_evaluation_code(code) # assign clones to LHS for lhs, rhs, overload in zip(self.lhs_list, self.cloned_values, self.assignment_overloads): rhs.generate_evaluation_code(code) lhs.generate_assignment_code(rhs, code, overloaded_assignment=overload) # dispose of coerced values and original RHS for rhs_value in self.coerced_values: rhs_value.generate_disposal_code(code) rhs_value.free_temps(code) self.rhs.generate_disposal_code(code) self.rhs.free_temps(code) def generate_function_definitions(self, env, code): self.rhs.generate_function_definitions(env, code) def annotate(self, code): for rhs in self.coerced_values: rhs.annotate(code) for lhs, rhs in zip(self.lhs_list, self.cloned_values): lhs.annotate(code) rhs.annotate(code) self.rhs.annotate(code) class ParallelAssignmentNode(AssignmentNode): # A combined packing/unpacking assignment: # # a, b, c = d, e, f # # This has been rearranged by the parser into # # a = d ; b = e ; c = f # # but we must evaluate all the right hand sides # before assigning to any of the left hand sides. # # stats [AssignmentNode] The constituent assignments child_attrs = ["stats"] def analyse_declarations(self, env): for stat in self.stats: stat.analyse_declarations(env) def analyse_expressions(self, env): self.stats = [ stat.analyse_types(env, use_temp = 1) for stat in self.stats ] return self # def analyse_expressions(self, env): # for stat in self.stats: # stat.analyse_expressions_1(env, use_temp = 1) # for stat in self.stats: # stat.analyse_expressions_2(env) def generate_execution_code(self, code): code.mark_pos(self.pos) for stat in self.stats: stat.generate_rhs_evaluation_code(code) for stat in self.stats: stat.generate_assignment_code(code) def generate_function_definitions(self, env, code): for stat in self.stats: stat.generate_function_definitions(env, code) def annotate(self, code): for stat in self.stats: stat.annotate(code) class InPlaceAssignmentNode(AssignmentNode): # An in place arithmetic operand: # # a += b # a -= b # ... # # lhs ExprNode Left hand side # rhs ExprNode Right hand side # operator char one of "+-*/%^&|" # # This code is a bit tricky because in order to obey Python # semantics the sub-expressions (e.g. indices) of the lhs must # not be evaluated twice. So we must re-use the values calculated # in evaluation phase for the assignment phase as well. # Fortunately, the type of the lhs node is fairly constrained # (it must be a NameNode, AttributeNode, or IndexNode). child_attrs = ["lhs", "rhs"] def analyse_declarations(self, env): self.lhs.analyse_target_declaration(env) def analyse_types(self, env): self.rhs = self.rhs.analyse_types(env) self.lhs = self.lhs.analyse_target_types(env) # When assigning to a fully indexed buffer or memoryview, coerce the rhs if (self.lhs.is_subscript and (self.lhs.memslice_index or self.lhs.is_buffer_access)): self.rhs = self.rhs.coerce_to(self.lhs.type, env) elif self.lhs.type.is_string and self.operator in '+-': # use pointer arithmetic for char* LHS instead of string concat self.rhs = self.rhs.coerce_to(PyrexTypes.c_py_ssize_t_type, env) return self def generate_execution_code(self, code): code.mark_pos(self.pos) self.rhs.generate_evaluation_code(code) self.lhs.generate_subexpr_evaluation_code(code) c_op = self.operator if c_op == "//": c_op = "/" elif c_op == "**": error(self.pos, "No C inplace power operator") if self.lhs.is_subscript and self.lhs.is_buffer_access: if self.lhs.type.is_pyobject: error(self.pos, "In-place operators not allowed on object buffers in this release.") if (c_op in ('/', '%') and self.lhs.type.is_int and not code.globalstate.directives['cdivision']): error(self.pos, "In-place non-c divide operators not allowed on int buffers.") self.lhs.generate_buffer_setitem_code(self.rhs, code, c_op) else: # C++ # TODO: make sure overload is declared code.putln("%s %s= %s;" % (self.lhs.result(), c_op, self.rhs.result())) self.lhs.generate_subexpr_disposal_code(code) self.lhs.free_subexpr_temps(code) self.rhs.generate_disposal_code(code) self.rhs.free_temps(code) def annotate(self, code): self.lhs.annotate(code) self.rhs.annotate(code) def create_binop_node(self): from . import ExprNodes return ExprNodes.binop_node(self.pos, self.operator, self.lhs, self.rhs) class PrintStatNode(StatNode): # print statement # # arg_tuple TupleNode # stream ExprNode or None (stdout) # append_newline boolean child_attrs = ["arg_tuple", "stream"] def analyse_expressions(self, env): if self.stream: stream = self.stream.analyse_expressions(env) self.stream = stream.coerce_to_pyobject(env) arg_tuple = self.arg_tuple.analyse_expressions(env) self.arg_tuple = arg_tuple.coerce_to_pyobject(env) env.use_utility_code(printing_utility_code) if len(self.arg_tuple.args) == 1 and self.append_newline: env.use_utility_code(printing_one_utility_code) return self nogil_check = Node.gil_error gil_message = "Python print statement" def generate_execution_code(self, code): code.mark_pos(self.pos) if self.stream: self.stream.generate_evaluation_code(code) stream_result = self.stream.py_result() else: stream_result = '0' if len(self.arg_tuple.args) == 1 and self.append_newline: arg = self.arg_tuple.args[0] arg.generate_evaluation_code(code) code.putln( "if (__Pyx_PrintOne(%s, %s) < 0) %s" % ( stream_result, arg.py_result(), code.error_goto(self.pos))) arg.generate_disposal_code(code) arg.free_temps(code) else: self.arg_tuple.generate_evaluation_code(code) code.putln( "if (__Pyx_Print(%s, %s, %d) < 0) %s" % ( stream_result, self.arg_tuple.py_result(), self.append_newline, code.error_goto(self.pos))) self.arg_tuple.generate_disposal_code(code) self.arg_tuple.free_temps(code) if self.stream: self.stream.generate_disposal_code(code) self.stream.free_temps(code) def generate_function_definitions(self, env, code): if self.stream: self.stream.generate_function_definitions(env, code) self.arg_tuple.generate_function_definitions(env, code) def annotate(self, code): if self.stream: self.stream.annotate(code) self.arg_tuple.annotate(code) class ExecStatNode(StatNode): # exec statement # # args [ExprNode] child_attrs = ["args"] def analyse_expressions(self, env): for i, arg in enumerate(self.args): arg = arg.analyse_expressions(env) arg = arg.coerce_to_pyobject(env) self.args[i] = arg env.use_utility_code(Builtin.pyexec_utility_code) return self nogil_check = Node.gil_error gil_message = "Python exec statement" def generate_execution_code(self, code): code.mark_pos(self.pos) args = [] for arg in self.args: arg.generate_evaluation_code(code) args.append( arg.py_result() ) args = tuple(args + ['0', '0'][:3-len(args)]) temp_result = code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=True) code.putln("%s = __Pyx_PyExec3(%s, %s, %s);" % ( (temp_result,) + args)) for arg in self.args: arg.generate_disposal_code(code) arg.free_temps(code) code.putln( code.error_goto_if_null(temp_result, self.pos)) code.put_gotref(temp_result) code.put_decref_clear(temp_result, py_object_type) code.funcstate.release_temp(temp_result) def annotate(self, code): for arg in self.args: arg.annotate(code) class DelStatNode(StatNode): # del statement # # args [ExprNode] child_attrs = ["args"] ignore_nonexisting = False def analyse_declarations(self, env): for arg in self.args: arg.analyse_target_declaration(env) def analyse_expressions(self, env): for i, arg in enumerate(self.args): arg = self.args[i] = arg.analyse_target_expression(env, None) if arg.type.is_pyobject or (arg.is_name and arg.type.is_memoryviewslice): if arg.is_name and arg.entry.is_cglobal: error(arg.pos, "Deletion of global C variable") elif arg.type.is_ptr and arg.type.base_type.is_cpp_class: self.cpp_check(env) elif arg.type.is_cpp_class: error(arg.pos, "Deletion of non-heap C++ object") elif arg.is_subscript and arg.base.type is Builtin.bytearray_type: pass # del ba[i] else: error(arg.pos, "Deletion of non-Python, non-C++ object") #arg.release_target_temp(env) return self def nogil_check(self, env): for arg in self.args: if arg.type.is_pyobject: self.gil_error() gil_message = "Deleting Python object" def generate_execution_code(self, code): code.mark_pos(self.pos) for arg in self.args: if (arg.type.is_pyobject or arg.type.is_memoryviewslice or arg.is_subscript and arg.base.type is Builtin.bytearray_type): arg.generate_deletion_code( code, ignore_nonexisting=self.ignore_nonexisting) elif arg.type.is_ptr and arg.type.base_type.is_cpp_class: arg.generate_result_code(code) code.putln("delete %s;" % arg.result()) # else error reported earlier def annotate(self, code): for arg in self.args: arg.annotate(code) class PassStatNode(StatNode): # pass statement child_attrs = [] def analyse_expressions(self, env): return self def generate_execution_code(self, code): pass class IndirectionNode(StatListNode): """ This adds an indirection so that the node can be shared and a subtree can be removed at any time by clearing self.stats. """ def __init__(self, stats): super(IndirectionNode, self).__init__(stats[0].pos, stats=stats) class BreakStatNode(StatNode): child_attrs = [] is_terminator = True def analyse_expressions(self, env): return self def generate_execution_code(self, code): code.mark_pos(self.pos) if not code.break_label: error(self.pos, "break statement not inside loop") else: code.put_goto(code.break_label) class ContinueStatNode(StatNode): child_attrs = [] is_terminator = True def analyse_expressions(self, env): return self def generate_execution_code(self, code): code.mark_pos(self.pos) if code.funcstate.in_try_finally: error(self.pos, "continue statement inside try of try...finally") elif not code.continue_label: error(self.pos, "continue statement not inside loop") else: code.put_goto(code.continue_label) class ReturnStatNode(StatNode): # return statement # # value ExprNode or None # return_type PyrexType # in_generator return inside of generator => raise StopIteration child_attrs = ["value"] is_terminator = True in_generator = False # Whether we are in a parallel section in_parallel = False def analyse_expressions(self, env): return_type = env.return_type self.return_type = return_type if not return_type: error(self.pos, "Return not inside a function body") return self if self.value: self.value = self.value.analyse_types(env) if return_type.is_void or return_type.is_returncode: error(self.value.pos, "Return with value in void function") else: self.value = self.value.coerce_to(env.return_type, env) else: if (not return_type.is_void and not return_type.is_pyobject and not return_type.is_returncode): error(self.pos, "Return value required") return self def nogil_check(self, env): if self.return_type.is_pyobject: self.gil_error() gil_message = "Returning Python object" def generate_execution_code(self, code): code.mark_pos(self.pos) if not self.return_type: # error reported earlier return if self.return_type.is_pyobject: code.put_xdecref(Naming.retval_cname, self.return_type) if self.value: self.value.generate_evaluation_code(code) if self.return_type.is_memoryviewslice: from . import MemoryView MemoryView.put_acquire_memoryviewslice( lhs_cname=Naming.retval_cname, lhs_type=self.return_type, lhs_pos=self.value.pos, rhs=self.value, code=code, have_gil=self.in_nogil_context) elif self.in_generator: # return value == raise StopIteration(value), but uncatchable code.globalstate.use_utility_code( UtilityCode.load_cached("ReturnWithStopIteration", "Coroutine.c")) code.putln("%s = NULL; __Pyx_ReturnWithStopIteration(%s);" % ( Naming.retval_cname, self.value.py_result())) self.value.generate_disposal_code(code) else: self.value.make_owned_reference(code) code.putln( "%s = %s;" % ( Naming.retval_cname, self.value.result_as(self.return_type))) self.value.generate_post_assignment_code(code) self.value.free_temps(code) else: if self.return_type.is_pyobject: if self.in_generator: code.putln("%s = NULL;" % Naming.retval_cname) else: code.put_init_to_py_none(Naming.retval_cname, self.return_type) elif self.return_type.is_returncode: self.put_return(code, self.return_type.default_value) for cname, type in code.funcstate.temps_holding_reference(): code.put_decref_clear(cname, type) code.put_goto(code.return_label) def put_return(self, code, value): if self.in_parallel: code.putln_openmp("#pragma omp critical(__pyx_returning)") code.putln("%s = %s;" % (Naming.retval_cname, value)) def generate_function_definitions(self, env, code): if self.value is not None: self.value.generate_function_definitions(env, code) def annotate(self, code): if self.value: self.value.annotate(code) class RaiseStatNode(StatNode): # raise statement # # exc_type ExprNode or None # exc_value ExprNode or None # exc_tb ExprNode or None # cause ExprNode or None child_attrs = ["exc_type", "exc_value", "exc_tb", "cause"] is_terminator = True def analyse_expressions(self, env): if self.exc_type: exc_type = self.exc_type.analyse_types(env) self.exc_type = exc_type.coerce_to_pyobject(env) if self.exc_value: exc_value = self.exc_value.analyse_types(env) self.exc_value = exc_value.coerce_to_pyobject(env) if self.exc_tb: exc_tb = self.exc_tb.analyse_types(env) self.exc_tb = exc_tb.coerce_to_pyobject(env) if self.cause: cause = self.cause.analyse_types(env) self.cause = cause.coerce_to_pyobject(env) # special cases for builtin exceptions self.builtin_exc_name = None if self.exc_type and not self.exc_value and not self.exc_tb: exc = self.exc_type from . import ExprNodes if (isinstance(exc, ExprNodes.SimpleCallNode) and not (exc.args or (exc.arg_tuple is not None and exc.arg_tuple.args))): exc = exc.function # extract the exception type if exc.is_name and exc.entry.is_builtin: self.builtin_exc_name = exc.name if self.builtin_exc_name == 'MemoryError': self.exc_type = None # has a separate implementation return self nogil_check = Node.gil_error gil_message = "Raising exception" def generate_execution_code(self, code): code.mark_pos(self.pos) if self.builtin_exc_name == 'MemoryError': code.putln('PyErr_NoMemory(); %s' % code.error_goto(self.pos)) return if self.exc_type: self.exc_type.generate_evaluation_code(code) type_code = self.exc_type.py_result() else: type_code = "0" if self.exc_value: self.exc_value.generate_evaluation_code(code) value_code = self.exc_value.py_result() else: value_code = "0" if self.exc_tb: self.exc_tb.generate_evaluation_code(code) tb_code = self.exc_tb.py_result() else: tb_code = "0" if self.cause: self.cause.generate_evaluation_code(code) cause_code = self.cause.py_result() else: cause_code = "0" code.globalstate.use_utility_code(raise_utility_code) code.putln( "__Pyx_Raise(%s, %s, %s, %s);" % ( type_code, value_code, tb_code, cause_code)) for obj in (self.exc_type, self.exc_value, self.exc_tb, self.cause): if obj: obj.generate_disposal_code(code) obj.free_temps(code) code.putln( code.error_goto(self.pos)) def generate_function_definitions(self, env, code): if self.exc_type is not None: self.exc_type.generate_function_definitions(env, code) if self.exc_value is not None: self.exc_value.generate_function_definitions(env, code) if self.exc_tb is not None: self.exc_tb.generate_function_definitions(env, code) if self.cause is not None: self.cause.generate_function_definitions(env, code) def annotate(self, code): if self.exc_type: self.exc_type.annotate(code) if self.exc_value: self.exc_value.annotate(code) if self.exc_tb: self.exc_tb.annotate(code) if self.cause: self.cause.annotate(code) class ReraiseStatNode(StatNode): child_attrs = [] is_terminator = True def analyse_expressions(self, env): return self nogil_check = Node.gil_error gil_message = "Raising exception" def generate_execution_code(self, code): code.mark_pos(self.pos) vars = code.funcstate.exc_vars if vars: code.globalstate.use_utility_code(restore_exception_utility_code) code.put_giveref(vars[0]) code.put_giveref(vars[1]) # fresh exceptions may not have a traceback yet (-> finally!) code.put_xgiveref(vars[2]) code.putln("__Pyx_ErrRestore(%s, %s, %s);" % tuple(vars)) for varname in vars: code.put("%s = 0; " % varname) code.putln() code.putln(code.error_goto(self.pos)) else: code.globalstate.use_utility_code( UtilityCode.load_cached("ReRaiseException", "Exceptions.c")) code.putln("__Pyx_ReraiseException(); %s" % code.error_goto(self.pos)) class AssertStatNode(StatNode): # assert statement # # cond ExprNode # value ExprNode or None child_attrs = ["cond", "value"] def analyse_expressions(self, env): self.cond = self.cond.analyse_boolean_expression(env) if self.value: value = self.value.analyse_types(env) if value.type is Builtin.tuple_type or not value.type.is_builtin_type: # prevent tuple values from being interpreted as argument value tuples from .ExprNodes import TupleNode value = TupleNode(value.pos, args=[value], slow=True) self.value = value.analyse_types(env, skip_children=True).coerce_to_pyobject(env) else: self.value = value.coerce_to_pyobject(env) return self nogil_check = Node.gil_error gil_message = "Raising exception" def generate_execution_code(self, code): code.putln("#ifndef CYTHON_WITHOUT_ASSERTIONS") code.putln("if (unlikely(!Py_OptimizeFlag)) {") code.mark_pos(self.pos) self.cond.generate_evaluation_code(code) code.putln( "if (unlikely(!%s)) {" % self.cond.result()) if self.value: self.value.generate_evaluation_code(code) code.putln( "PyErr_SetObject(PyExc_AssertionError, %s);" % self.value.py_result()) self.value.generate_disposal_code(code) self.value.free_temps(code) else: code.putln( "PyErr_SetNone(PyExc_AssertionError);") code.putln( code.error_goto(self.pos)) code.putln( "}") self.cond.generate_disposal_code(code) self.cond.free_temps(code) code.putln( "}") code.putln("#endif") def generate_function_definitions(self, env, code): self.cond.generate_function_definitions(env, code) if self.value is not None: self.value.generate_function_definitions(env, code) def annotate(self, code): self.cond.annotate(code) if self.value: self.value.annotate(code) class IfStatNode(StatNode): # if statement # # if_clauses [IfClauseNode] # else_clause StatNode or None child_attrs = ["if_clauses", "else_clause"] def analyse_declarations(self, env): for if_clause in self.if_clauses: if_clause.analyse_declarations(env) if self.else_clause: self.else_clause.analyse_declarations(env) def analyse_expressions(self, env): self.if_clauses = [if_clause.analyse_expressions(env) for if_clause in self.if_clauses] if self.else_clause: self.else_clause = self.else_clause.analyse_expressions(env) return self def generate_execution_code(self, code): code.mark_pos(self.pos) end_label = code.new_label() last = len(self.if_clauses) if not self.else_clause: last -= 1 # avoid redundant goto at end of last if-clause for i, if_clause in enumerate(self.if_clauses): if_clause.generate_execution_code(code, end_label, is_last=i == last) if self.else_clause: code.mark_pos(self.else_clause.pos) code.putln("/*else*/ {") self.else_clause.generate_execution_code(code) code.putln("}") code.put_label(end_label) def generate_function_definitions(self, env, code): for clause in self.if_clauses: clause.generate_function_definitions(env, code) if self.else_clause is not None: self.else_clause.generate_function_definitions(env, code) def annotate(self, code): for if_clause in self.if_clauses: if_clause.annotate(code) if self.else_clause: self.else_clause.annotate(code) class IfClauseNode(Node): # if or elif clause in an if statement # # condition ExprNode # body StatNode child_attrs = ["condition", "body"] def analyse_declarations(self, env): self.body.analyse_declarations(env) def analyse_expressions(self, env): self.condition = self.condition.analyse_temp_boolean_expression(env) self.body = self.body.analyse_expressions(env) return self def generate_execution_code(self, code, end_label, is_last): self.condition.generate_evaluation_code(code) code.mark_pos(self.pos) code.putln("if (%s) {" % self.condition.result()) self.condition.generate_disposal_code(code) self.condition.free_temps(code) self.body.generate_execution_code(code) code.mark_pos(self.pos, trace=False) if not (is_last or self.body.is_terminator): code.put_goto(end_label) code.putln("}") def generate_function_definitions(self, env, code): self.condition.generate_function_definitions(env, code) self.body.generate_function_definitions(env, code) def annotate(self, code): self.condition.annotate(code) self.body.annotate(code) class SwitchCaseNode(StatNode): # Generated in the optimization of an if-elif-else node # # conditions [ExprNode] # body StatNode child_attrs = ['conditions', 'body'] def generate_execution_code(self, code): for cond in self.conditions: code.mark_pos(cond.pos) cond.generate_evaluation_code(code) code.putln("case %s:" % cond.result()) self.body.generate_execution_code(code) code.mark_pos(self.pos, trace=False) code.putln("break;") def generate_function_definitions(self, env, code): for cond in self.conditions: cond.generate_function_definitions(env, code) self.body.generate_function_definitions(env, code) def annotate(self, code): for cond in self.conditions: cond.annotate(code) self.body.annotate(code) class SwitchStatNode(StatNode): # Generated in the optimization of an if-elif-else node # # test ExprNode # cases [SwitchCaseNode] # else_clause StatNode or None child_attrs = ['test', 'cases', 'else_clause'] def generate_execution_code(self, code): self.test.generate_evaluation_code(code) code.mark_pos(self.pos) code.putln("switch (%s) {" % self.test.result()) for case in self.cases: case.generate_execution_code(code) if self.else_clause is not None: code.putln("default:") self.else_clause.generate_execution_code(code) code.putln("break;") else: # Always generate a default clause to prevent C compiler warnings # about unmatched enum values (it was not the user who decided to # generate the switch statement, so shouldn't be bothered). code.putln("default: break;") code.putln("}") def generate_function_definitions(self, env, code): self.test.generate_function_definitions(env, code) for case in self.cases: case.generate_function_definitions(env, code) if self.else_clause is not None: self.else_clause.generate_function_definitions(env, code) def annotate(self, code): self.test.annotate(code) for case in self.cases: case.annotate(code) if self.else_clause is not None: self.else_clause.annotate(code) class LoopNode(object): pass class WhileStatNode(LoopNode, StatNode): # while statement # # condition ExprNode # body StatNode # else_clause StatNode child_attrs = ["condition", "body", "else_clause"] def analyse_declarations(self, env): self.body.analyse_declarations(env) if self.else_clause: self.else_clause.analyse_declarations(env) def analyse_expressions(self, env): if self.condition: self.condition = self.condition.analyse_temp_boolean_expression(env) self.body = self.body.analyse_expressions(env) if self.else_clause: self.else_clause = self.else_clause.analyse_expressions(env) return self def generate_execution_code(self, code): code.mark_pos(self.pos) old_loop_labels = code.new_loop_labels() code.putln( "while (1) {") if self.condition: self.condition.generate_evaluation_code(code) self.condition.generate_disposal_code(code) code.putln( "if (!%s) break;" % self.condition.result()) self.condition.free_temps(code) self.body.generate_execution_code(code) code.put_label(code.continue_label) code.putln("}") break_label = code.break_label code.set_loop_labels(old_loop_labels) if self.else_clause: code.mark_pos(self.else_clause.pos) code.putln("/*else*/ {") self.else_clause.generate_execution_code(code) code.putln("}") code.put_label(break_label) def generate_function_definitions(self, env, code): if self.condition: self.condition.generate_function_definitions(env, code) self.body.generate_function_definitions(env, code) if self.else_clause is not None: self.else_clause.generate_function_definitions(env, code) def annotate(self, code): if self.condition: self.condition.annotate(code) self.body.annotate(code) if self.else_clause: self.else_clause.annotate(code) class DictIterationNextNode(Node): # Helper node for calling PyDict_Next() inside of a WhileStatNode # and checking the dictionary size for changes. Created in # Optimize.py. child_attrs = ['dict_obj', 'expected_size', 'pos_index_var', 'coerced_key_var', 'coerced_value_var', 'coerced_tuple_var', 'key_target', 'value_target', 'tuple_target', 'is_dict_flag'] coerced_key_var = key_ref = None coerced_value_var = value_ref = None coerced_tuple_var = tuple_ref = None def __init__(self, dict_obj, expected_size, pos_index_var, key_target, value_target, tuple_target, is_dict_flag): Node.__init__( self, dict_obj.pos, dict_obj = dict_obj, expected_size = expected_size, pos_index_var = pos_index_var, key_target = key_target, value_target = value_target, tuple_target = tuple_target, is_dict_flag = is_dict_flag, is_temp = True, type = PyrexTypes.c_bint_type) def analyse_expressions(self, env): from . import ExprNodes self.dict_obj = self.dict_obj.analyse_types(env) self.expected_size = self.expected_size.analyse_types(env) if self.pos_index_var: self.pos_index_var = self.pos_index_var.analyse_types(env) if self.key_target: self.key_target = self.key_target.analyse_target_types(env) self.key_ref = ExprNodes.TempNode(self.key_target.pos, PyrexTypes.py_object_type) self.coerced_key_var = self.key_ref.coerce_to(self.key_target.type, env) if self.value_target: self.value_target = self.value_target.analyse_target_types(env) self.value_ref = ExprNodes.TempNode(self.value_target.pos, type=PyrexTypes.py_object_type) self.coerced_value_var = self.value_ref.coerce_to(self.value_target.type, env) if self.tuple_target: self.tuple_target = self.tuple_target.analyse_target_types(env) self.tuple_ref = ExprNodes.TempNode(self.tuple_target.pos, PyrexTypes.py_object_type) self.coerced_tuple_var = self.tuple_ref.coerce_to(self.tuple_target.type, env) self.is_dict_flag = self.is_dict_flag.analyse_types(env) return self def generate_function_definitions(self, env, code): self.dict_obj.generate_function_definitions(env, code) def generate_execution_code(self, code): code.globalstate.use_utility_code(UtilityCode.load_cached("dict_iter", "Optimize.c")) self.dict_obj.generate_evaluation_code(code) assignments = [] temp_addresses = [] for var, result, target in [(self.key_ref, self.coerced_key_var, self.key_target), (self.value_ref, self.coerced_value_var, self.value_target), (self.tuple_ref, self.coerced_tuple_var, self.tuple_target)]: if target is None: addr = 'NULL' else: assignments.append((var, result, target)) var.allocate(code) addr = '&%s' % var.result() temp_addresses.append(addr) result_temp = code.funcstate.allocate_temp(PyrexTypes.c_int_type, False) code.putln("%s = __Pyx_dict_iter_next(%s, %s, &%s, %s, %s, %s, %s);" % ( result_temp, self.dict_obj.py_result(), self.expected_size.result(), self.pos_index_var.result(), temp_addresses[0], temp_addresses[1], temp_addresses[2], self.is_dict_flag.result() )) code.putln("if (unlikely(%s == 0)) break;" % result_temp) code.putln(code.error_goto_if("%s == -1" % result_temp, self.pos)) code.funcstate.release_temp(result_temp) # evaluate all coercions before the assignments for var, result, target in assignments: code.put_gotref(var.result()) for var, result, target in assignments: result.generate_evaluation_code(code) for var, result, target in assignments: target.generate_assignment_code(result, code) var.release(code) def ForStatNode(pos, **kw): if 'iterator' in kw: if kw['iterator'].is_async: return AsyncForStatNode(pos, **kw) else: return ForInStatNode(pos, **kw) else: return ForFromStatNode(pos, **kw) class _ForInStatNode(LoopNode, StatNode): # Base class of 'for-in' statements. # # target ExprNode # iterator IteratorNode | AwaitExprNode(AsyncIteratorNode) # body StatNode # else_clause StatNode # item NextNode | AwaitExprNode(AsyncNextNode) # is_async boolean true for 'async for' statements child_attrs = ["target", "item", "iterator", "body", "else_clause"] item = None is_async = False def _create_item_node(self): raise NotImplementedError("must be implemented by subclasses") def analyse_declarations(self, env): self.target.analyse_target_declaration(env) self.body.analyse_declarations(env) if self.else_clause: self.else_clause.analyse_declarations(env) self._create_item_node() def analyse_expressions(self, env): self.target = self.target.analyse_target_types(env) self.iterator = self.iterator.analyse_expressions(env) self._create_item_node() # must rewrap self.item after analysis self.item = self.item.analyse_expressions(env) if (not self.is_async and (self.iterator.type.is_ptr or self.iterator.type.is_array) and self.target.type.assignable_from(self.iterator.type)): # C array slice optimization. pass else: self.item = self.item.coerce_to(self.target.type, env) self.body = self.body.analyse_expressions(env) if self.else_clause: self.else_clause = self.else_clause.analyse_expressions(env) return self def generate_execution_code(self, code): code.mark_pos(self.pos) old_loop_labels = code.new_loop_labels() self.iterator.generate_evaluation_code(code) code.putln("for (;;) {") self.item.generate_evaluation_code(code) self.target.generate_assignment_code(self.item, code) self.body.generate_execution_code(code) code.mark_pos(self.pos) code.put_label(code.continue_label) code.putln("}") break_label = code.break_label code.set_loop_labels(old_loop_labels) if self.else_clause: # in nested loops, the 'else' block can contain a # 'continue' statement for the outer loop, but we may need # to generate cleanup code before taking that path, so we # intercept it here orig_continue_label = code.continue_label code.continue_label = code.new_label('outer_continue') code.putln("/*else*/ {") self.else_clause.generate_execution_code(code) code.putln("}") if code.label_used(code.continue_label): code.put_goto(break_label) code.mark_pos(self.pos) code.put_label(code.continue_label) self.iterator.generate_disposal_code(code) code.put_goto(orig_continue_label) code.set_loop_labels(old_loop_labels) code.mark_pos(self.pos) if code.label_used(break_label): code.put_label(break_label) self.iterator.generate_disposal_code(code) self.iterator.free_temps(code) def generate_function_definitions(self, env, code): self.target.generate_function_definitions(env, code) self.iterator.generate_function_definitions(env, code) self.body.generate_function_definitions(env, code) if self.else_clause is not None: self.else_clause.generate_function_definitions(env, code) def annotate(self, code): self.target.annotate(code) self.iterator.annotate(code) self.body.annotate(code) if self.else_clause: self.else_clause.annotate(code) self.item.annotate(code) class ForInStatNode(_ForInStatNode): # 'for' statement is_async = False def _create_item_node(self): from .ExprNodes import NextNode self.item = NextNode(self.iterator) class AsyncForStatNode(_ForInStatNode): # 'async for' statement # # iterator AwaitExprNode(AsyncIteratorNode) # item AwaitIterNextExprNode(AsyncIteratorNode) is_async = True def __init__(self, pos, iterator, **kw): assert 'item' not in kw from . import ExprNodes # AwaitExprNodes must appear before running MarkClosureVisitor kw['iterator'] = ExprNodes.AwaitExprNode(iterator.pos, arg=iterator) kw['item'] = ExprNodes.AwaitIterNextExprNode(iterator.pos, arg=None) _ForInStatNode.__init__(self, pos, **kw) def _create_item_node(self): from . import ExprNodes self.item.arg = ExprNodes.AsyncNextNode(self.iterator) class ForFromStatNode(LoopNode, StatNode): # for name from expr rel name rel expr # # target NameNode # bound1 ExprNode # relation1 string # relation2 string # bound2 ExprNode # step ExprNode or None # body StatNode # else_clause StatNode or None # # Used internally: # # from_range bool # is_py_target bool # loopvar_node ExprNode (usually a NameNode or temp node) # py_loopvar_node PyTempNode or None child_attrs = ["target", "bound1", "bound2", "step", "body", "else_clause"] is_py_target = False loopvar_node = None py_loopvar_node = None from_range = False gil_message = "For-loop using object bounds or target" def nogil_check(self, env): for x in (self.target, self.bound1, self.bound2): if x.type.is_pyobject: self.gil_error() def analyse_declarations(self, env): self.target.analyse_target_declaration(env) self.body.analyse_declarations(env) if self.else_clause: self.else_clause.analyse_declarations(env) def analyse_expressions(self, env): from . import ExprNodes self.target = self.target.analyse_target_types(env) self.bound1 = self.bound1.analyse_types(env) self.bound2 = self.bound2.analyse_types(env) if self.step is not None: if isinstance(self.step, ExprNodes.UnaryMinusNode): warning(self.step.pos, "Probable infinite loop in for-from-by statement. Consider switching the directions of the relations.", 2) self.step = self.step.analyse_types(env) if self.target.type.is_numeric: loop_type = self.target.type else: loop_type = PyrexTypes.c_int_type if not self.bound1.type.is_pyobject: loop_type = PyrexTypes.widest_numeric_type(loop_type, self.bound1.type) if not self.bound2.type.is_pyobject: loop_type = PyrexTypes.widest_numeric_type(loop_type, self.bound2.type) if self.step is not None and not self.step.type.is_pyobject: loop_type = PyrexTypes.widest_numeric_type(loop_type, self.step.type) self.bound1 = self.bound1.coerce_to(loop_type, env) self.bound2 = self.bound2.coerce_to(loop_type, env) if not self.bound2.is_literal: self.bound2 = self.bound2.coerce_to_temp(env) if self.step is not None: self.step = self.step.coerce_to(loop_type, env) if not self.step.is_literal: self.step = self.step.coerce_to_temp(env) target_type = self.target.type if not (target_type.is_pyobject or target_type.is_numeric): error(self.target.pos, "for-from loop variable must be c numeric type or Python object") if target_type.is_numeric: self.is_py_target = False if isinstance(self.target, ExprNodes.IndexNode) and self.target.is_buffer_access: raise error(self.pos, "Buffer indexing not allowed as for loop target.") self.loopvar_node = self.target self.py_loopvar_node = None else: self.is_py_target = True c_loopvar_node = ExprNodes.TempNode(self.pos, loop_type, env) self.loopvar_node = c_loopvar_node self.py_loopvar_node = \ ExprNodes.CloneNode(c_loopvar_node).coerce_to_pyobject(env) self.body = self.body.analyse_expressions(env) if self.else_clause: self.else_clause = self.else_clause.analyse_expressions(env) return self def generate_execution_code(self, code): code.mark_pos(self.pos) old_loop_labels = code.new_loop_labels() from_range = self.from_range self.bound1.generate_evaluation_code(code) self.bound2.generate_evaluation_code(code) offset, incop = self.relation_table[self.relation1] if self.step is not None: self.step.generate_evaluation_code(code) step = self.step.result() incop = "%s=%s" % (incop[0], step) from . import ExprNodes if isinstance(self.loopvar_node, ExprNodes.TempNode): self.loopvar_node.allocate(code) if isinstance(self.py_loopvar_node, ExprNodes.TempNode): self.py_loopvar_node.allocate(code) if from_range: loopvar_name = code.funcstate.allocate_temp(self.target.type, False) else: loopvar_name = self.loopvar_node.result() if self.target.type.is_int and not self.target.type.signed and self.relation2[0] == '>': # Handle the case where the endpoint of an unsigned int iteration # is within step of 0. if not self.step: step = 1 code.putln( "for (%s = %s%s + %s; %s %s %s + %s; ) { %s%s;" % ( loopvar_name, self.bound1.result(), offset, step, loopvar_name, self.relation2, self.bound2.result(), step, loopvar_name, incop)) else: code.putln( "for (%s = %s%s; %s %s %s; %s%s) {" % ( loopvar_name, self.bound1.result(), offset, loopvar_name, self.relation2, self.bound2.result(), loopvar_name, incop)) if self.py_loopvar_node: self.py_loopvar_node.generate_evaluation_code(code) self.target.generate_assignment_code(self.py_loopvar_node, code) elif from_range: code.putln("%s = %s;" % ( self.target.result(), loopvar_name)) self.body.generate_execution_code(code) code.put_label(code.continue_label) if self.py_loopvar_node: # This mess is to make for..from loops with python targets behave # exactly like those with C targets with regards to re-assignment # of the loop variable. if self.target.entry.is_pyglobal: # We know target is a NameNode, this is the only ugly case. target_node = ExprNodes.PyTempNode(self.target.pos, None) target_node.allocate(code) interned_cname = code.intern_identifier(self.target.entry.name) if self.target.entry.scope.is_module_scope: code.globalstate.use_utility_code( UtilityCode.load_cached("GetModuleGlobalName", "ObjectHandling.c")) lookup_func = '__Pyx_GetModuleGlobalName(%s)' else: code.globalstate.use_utility_code( UtilityCode.load_cached("GetNameInClass", "ObjectHandling.c")) lookup_func = '__Pyx_GetNameInClass(%s, %%s)' % ( self.target.entry.scope.namespace_cname) code.putln("%s = %s; %s" % ( target_node.result(), lookup_func % interned_cname, code.error_goto_if_null(target_node.result(), self.target.pos))) code.put_gotref(target_node.result()) else: target_node = self.target from_py_node = ExprNodes.CoerceFromPyTypeNode( self.loopvar_node.type, target_node, self.target.entry.scope) from_py_node.temp_code = loopvar_name from_py_node.generate_result_code(code) if self.target.entry.is_pyglobal: code.put_decref(target_node.result(), target_node.type) target_node.release(code) code.putln("}") if self.py_loopvar_node: # This is potentially wasteful, but we don't want the semantics to # depend on whether or not the loop is a python type. self.py_loopvar_node.generate_evaluation_code(code) self.target.generate_assignment_code(self.py_loopvar_node, code) if from_range: code.funcstate.release_temp(loopvar_name) break_label = code.break_label code.set_loop_labels(old_loop_labels) if self.else_clause: code.putln("/*else*/ {") self.else_clause.generate_execution_code(code) code.putln("}") code.put_label(break_label) self.bound1.generate_disposal_code(code) self.bound1.free_temps(code) self.bound2.generate_disposal_code(code) self.bound2.free_temps(code) if isinstance(self.loopvar_node, ExprNodes.TempNode): self.loopvar_node.release(code) if isinstance(self.py_loopvar_node, ExprNodes.TempNode): self.py_loopvar_node.release(code) if self.step is not None: self.step.generate_disposal_code(code) self.step.free_temps(code) relation_table = { # {relop : (initial offset, increment op)} '<=': ("", "++"), '<' : ("+1", "++"), '>=': ("", "--"), '>' : ("-1", "--") } def generate_function_definitions(self, env, code): self.target.generate_function_definitions(env, code) self.bound1.generate_function_definitions(env, code) self.bound2.generate_function_definitions(env, code) if self.step is not None: self.step.generate_function_definitions(env, code) self.body.generate_function_definitions(env, code) if self.else_clause is not None: self.else_clause.generate_function_definitions(env, code) def annotate(self, code): self.target.annotate(code) self.bound1.annotate(code) self.bound2.annotate(code) if self.step: self.step.annotate(code) self.body.annotate(code) if self.else_clause: self.else_clause.annotate(code) class WithStatNode(StatNode): """ Represents a Python with statement. Implemented by the WithTransform as follows: MGR = EXPR EXIT = MGR.__exit__ VALUE = MGR.__enter__() EXC = True try: try: TARGET = VALUE # optional BODY except: EXC = False if not EXIT(*EXCINFO): raise finally: if EXC: EXIT(None, None, None) MGR = EXIT = VALUE = None """ # manager The with statement manager object # target ExprNode the target lhs of the __enter__() call # body StatNode # enter_call ExprNode the call to the __enter__() method # exit_var String the cname of the __exit__() method reference child_attrs = ["manager", "enter_call", "target", "body"] enter_call = None target_temp = None def analyse_declarations(self, env): self.manager.analyse_declarations(env) self.enter_call.analyse_declarations(env) self.body.analyse_declarations(env) def analyse_expressions(self, env): self.manager = self.manager.analyse_types(env) self.enter_call = self.enter_call.analyse_types(env) if self.target: # set up target_temp before descending into body (which uses it) from .ExprNodes import TempNode self.target_temp = TempNode(self.enter_call.pos, self.enter_call.type) self.body = self.body.analyse_expressions(env) return self def generate_function_definitions(self, env, code): self.manager.generate_function_definitions(env, code) self.enter_call.generate_function_definitions(env, code) self.body.generate_function_definitions(env, code) def generate_execution_code(self, code): code.mark_pos(self.pos) code.putln("/*with:*/ {") self.manager.generate_evaluation_code(code) self.exit_var = code.funcstate.allocate_temp(py_object_type, manage_ref=False) code.globalstate.use_utility_code( UtilityCode.load_cached("PyObjectLookupSpecial", "ObjectHandling.c")) code.putln("%s = __Pyx_PyObject_LookupSpecial(%s, %s); %s" % ( self.exit_var, self.manager.py_result(), code.intern_identifier(EncodedString('__aexit__' if self.is_async else '__exit__')), code.error_goto_if_null(self.exit_var, self.pos), )) code.put_gotref(self.exit_var) # need to free exit_var in the face of exceptions during setup old_error_label = code.new_error_label() intermediate_error_label = code.error_label self.enter_call.generate_evaluation_code(code) if self.target: # The temp result will be cleaned up by the WithTargetAssignmentStatNode # after assigning its result to the target of the 'with' statement. self.target_temp.allocate(code) self.enter_call.make_owned_reference(code) code.putln("%s = %s;" % (self.target_temp.result(), self.enter_call.result())) self.enter_call.generate_post_assignment_code(code) else: self.enter_call.generate_disposal_code(code) self.enter_call.free_temps(code) self.manager.generate_disposal_code(code) self.manager.free_temps(code) code.error_label = old_error_label self.body.generate_execution_code(code) if code.label_used(intermediate_error_label): step_over_label = code.new_label() code.put_goto(step_over_label) code.put_label(intermediate_error_label) code.put_decref_clear(self.exit_var, py_object_type) code.put_goto(old_error_label) code.put_label(step_over_label) code.funcstate.release_temp(self.exit_var) code.putln('}') class WithTargetAssignmentStatNode(AssignmentNode): # The target assignment of the 'with' statement value (return # value of the __enter__() call). # # This is a special cased assignment that properly cleans up the RHS. # # lhs ExprNode the assignment target # rhs ExprNode a (coerced) TempNode for the rhs (from WithStatNode) # with_node WithStatNode the surrounding with-statement child_attrs = ["rhs", "lhs"] with_node = None rhs = None def analyse_declarations(self, env): self.lhs.analyse_target_declaration(env) def analyse_expressions(self, env): self.lhs = self.lhs.analyse_target_types(env) self.lhs.gil_assignment_check(env) self.rhs = self.with_node.target_temp.coerce_to(self.lhs.type, env) return self def generate_execution_code(self, code): self.rhs.generate_evaluation_code(code) self.lhs.generate_assignment_code(self.rhs, code) self.with_node.target_temp.release(code) def annotate(self, code): self.lhs.annotate(code) self.rhs.annotate(code) class TryExceptStatNode(StatNode): # try .. except statement # # body StatNode # except_clauses [ExceptClauseNode] # else_clause StatNode or None child_attrs = ["body", "except_clauses", "else_clause"] def analyse_declarations(self, env): self.body.analyse_declarations(env) for except_clause in self.except_clauses: except_clause.analyse_declarations(env) if self.else_clause: self.else_clause.analyse_declarations(env) def analyse_expressions(self, env): self.body = self.body.analyse_expressions(env) default_clause_seen = 0 for i, except_clause in enumerate(self.except_clauses): except_clause = self.except_clauses[i] = except_clause.analyse_expressions(env) if default_clause_seen: error(except_clause.pos, "default 'except:' must be last") if not except_clause.pattern: default_clause_seen = 1 self.has_default_clause = default_clause_seen if self.else_clause: self.else_clause = self.else_clause.analyse_expressions(env) return self nogil_check = Node.gil_error gil_message = "Try-except statement" def generate_execution_code(self, code): old_return_label = code.return_label old_break_label = code.break_label old_continue_label = code.continue_label old_error_label = code.new_error_label() our_error_label = code.error_label except_end_label = code.new_label('exception_handled') except_error_label = code.new_label('except_error') except_return_label = code.new_label('except_return') try_return_label = code.new_label('try_return') try_break_label = code.new_label('try_break') try_continue_label = code.new_label('try_continue') try_end_label = code.new_label('try_end') exc_save_vars = [code.funcstate.allocate_temp(py_object_type, False) for _ in xrange(3)] code.mark_pos(self.pos) code.putln("{") save_exc = code.insertion_point() code.putln( "/*try:*/ {") code.return_label = try_return_label code.break_label = try_break_label code.continue_label = try_continue_label self.body.generate_execution_code(code) code.mark_pos(self.pos, trace=False) code.putln( "}") temps_to_clean_up = code.funcstate.all_free_managed_temps() can_raise = code.label_used(our_error_label) if can_raise: # inject code before the try block to save away the exception state code.globalstate.use_utility_code(reset_exception_utility_code) save_exc.putln("__Pyx_ExceptionSave(%s);" % ', '.join(['&%s' % var for var in exc_save_vars])) for var in exc_save_vars: save_exc.put_xgotref(var) def restore_saved_exception(): for name in exc_save_vars: code.put_xgiveref(name) code.putln("__Pyx_ExceptionReset(%s);" % ', '.join(exc_save_vars)) else: # try block cannot raise exceptions, but we had to allocate the temps above, # so just keep the C compiler from complaining about them being unused save_exc.putln("if (%s); else {/*mark used*/}" % '||'.join(exc_save_vars)) def restore_saved_exception(): pass code.error_label = except_error_label code.return_label = except_return_label normal_case_terminates = self.body.is_terminator if self.else_clause: code.mark_pos(self.else_clause.pos) code.putln( "/*else:*/ {") self.else_clause.generate_execution_code(code) code.putln( "}") if not normal_case_terminates: normal_case_terminates = self.else_clause.is_terminator if can_raise: if not normal_case_terminates: for var in exc_save_vars: code.put_xdecref_clear(var, py_object_type) code.put_goto(try_end_label) code.put_label(our_error_label) for temp_name, temp_type in temps_to_clean_up: code.put_xdecref_clear(temp_name, temp_type) for except_clause in self.except_clauses: except_clause.generate_handling_code(code, except_end_label) if not self.has_default_clause: code.put_goto(except_error_label) for exit_label, old_label in [(except_error_label, old_error_label), (try_break_label, old_break_label), (try_continue_label, old_continue_label), (try_return_label, old_return_label), (except_return_label, old_return_label)]: if code.label_used(exit_label): if not normal_case_terminates and not code.label_used(try_end_label): code.put_goto(try_end_label) code.put_label(exit_label) code.mark_pos(self.pos, trace=False) restore_saved_exception() code.put_goto(old_label) if code.label_used(except_end_label): if not normal_case_terminates and not code.label_used(try_end_label): code.put_goto(try_end_label) code.put_label(except_end_label) restore_saved_exception() if code.label_used(try_end_label): code.put_label(try_end_label) code.putln("}") for cname in exc_save_vars: code.funcstate.release_temp(cname) code.return_label = old_return_label code.break_label = old_break_label code.continue_label = old_continue_label code.error_label = old_error_label def generate_function_definitions(self, env, code): self.body.generate_function_definitions(env, code) for except_clause in self.except_clauses: except_clause.generate_function_definitions(env, code) if self.else_clause is not None: self.else_clause.generate_function_definitions(env, code) def annotate(self, code): self.body.annotate(code) for except_node in self.except_clauses: except_node.annotate(code) if self.else_clause: self.else_clause.annotate(code) class ExceptClauseNode(Node): # Part of try ... except statement. # # pattern [ExprNode] # target ExprNode or None # body StatNode # excinfo_target TupleNode(3*ResultRefNode) or None optional target for exception info (not owned here!) # match_flag string result of exception match # exc_value ExcValueNode used internally # function_name string qualified name of enclosing function # exc_vars (string * 3) local exception variables # is_except_as bool Py3-style "except ... as xyz" # excinfo_target is never set by the parser, but can be set by a transform # in order to extract more extensive information about the exception as a # sys.exc_info()-style tuple into a target variable child_attrs = ["pattern", "target", "body", "exc_value"] exc_value = None excinfo_target = None is_except_as = False def analyse_declarations(self, env): if self.target: self.target.analyse_target_declaration(env) self.body.analyse_declarations(env) def analyse_expressions(self, env): self.function_name = env.qualified_name if self.pattern: # normalise/unpack self.pattern into a list for i, pattern in enumerate(self.pattern): pattern = pattern.analyse_expressions(env) self.pattern[i] = pattern.coerce_to_pyobject(env) if self.target: from . import ExprNodes self.exc_value = ExprNodes.ExcValueNode(self.pos) self.target = self.target.analyse_target_expression(env, self.exc_value) self.body = self.body.analyse_expressions(env) return self def generate_handling_code(self, code, end_label): code.mark_pos(self.pos) if self.pattern: exc_tests = [] for pattern in self.pattern: pattern.generate_evaluation_code(code) exc_tests.append("PyErr_ExceptionMatches(%s)" % pattern.py_result()) match_flag = code.funcstate.allocate_temp(PyrexTypes.c_int_type, False) code.putln( "%s = %s;" % (match_flag, ' || '.join(exc_tests))) for pattern in self.pattern: pattern.generate_disposal_code(code) pattern.free_temps(code) code.putln( "if (%s) {" % match_flag) code.funcstate.release_temp(match_flag) else: code.putln("/*except:*/ {") if (not getattr(self.body, 'stats', True) and self.excinfo_target is None and self.target is None): # most simple case: no exception variable, empty body (pass) # => reset the exception state, done code.putln("PyErr_Restore(0,0,0);") code.put_goto(end_label) code.putln("}") return exc_vars = [code.funcstate.allocate_temp(py_object_type, manage_ref=True) for _ in xrange(3)] code.put_add_traceback(self.function_name) # We always have to fetch the exception value even if # there is no target, because this also normalises the # exception and stores it in the thread state. code.globalstate.use_utility_code(get_exception_utility_code) exc_args = "&%s, &%s, &%s" % tuple(exc_vars) code.putln("if (__Pyx_GetException(%s) < 0) %s" % (exc_args, code.error_goto(self.pos))) for x in exc_vars: code.put_gotref(x) if self.target: self.exc_value.set_var(exc_vars[1]) self.exc_value.generate_evaluation_code(code) self.target.generate_assignment_code(self.exc_value, code) if self.excinfo_target is not None: for tempvar, node in zip(exc_vars, self.excinfo_target.args): node.set_var(tempvar) old_break_label, old_continue_label = code.break_label, code.continue_label code.break_label = code.new_label('except_break') code.continue_label = code.new_label('except_continue') old_exc_vars = code.funcstate.exc_vars code.funcstate.exc_vars = exc_vars self.body.generate_execution_code(code) code.funcstate.exc_vars = old_exc_vars if not self.body.is_terminator: for var in exc_vars: code.put_decref_clear(var, py_object_type) code.put_goto(end_label) for new_label, old_label in [(code.break_label, old_break_label), (code.continue_label, old_continue_label)]: if code.label_used(new_label): code.put_label(new_label) for var in exc_vars: code.put_decref_clear(var, py_object_type) code.put_goto(old_label) code.break_label = old_break_label code.continue_label = old_continue_label for temp in exc_vars: code.funcstate.release_temp(temp) code.putln( "}") def generate_function_definitions(self, env, code): if self.target is not None: self.target.generate_function_definitions(env, code) self.body.generate_function_definitions(env, code) def annotate(self, code): if self.pattern: for pattern in self.pattern: pattern.annotate(code) if self.target: self.target.annotate(code) self.body.annotate(code) class TryFinallyStatNode(StatNode): # try ... finally statement # # body StatNode # finally_clause StatNode # finally_except_clause deep-copy of finally_clause for exception case # # The plan is that we funnel all continue, break # return and error gotos into the beginning of the # finally block, setting a variable to remember which # one we're doing. At the end of the finally block, we # switch on the variable to figure out where to go. # In addition, if we're doing an error, we save the # exception on entry to the finally block and restore # it on exit. child_attrs = ["body", "finally_clause", "finally_except_clause"] preserve_exception = 1 # handle exception case, in addition to return/break/continue handle_error_case = True func_return_type = None finally_except_clause = None disallow_continue_in_try_finally = 0 # There doesn't seem to be any point in disallowing # continue in the try block, since we have no problem # handling it. is_try_finally_in_nogil = False @staticmethod def create_analysed(pos, env, body, finally_clause): node = TryFinallyStatNode(pos, body=body, finally_clause=finally_clause) return node def analyse_declarations(self, env): self.body.analyse_declarations(env) self.finally_except_clause = copy.deepcopy(self.finally_clause) self.finally_except_clause.analyse_declarations(env) self.finally_clause.analyse_declarations(env) def analyse_expressions(self, env): self.body = self.body.analyse_expressions(env) self.finally_clause = self.finally_clause.analyse_expressions(env) self.finally_except_clause = self.finally_except_clause.analyse_expressions(env) if env.return_type and not env.return_type.is_void: self.func_return_type = env.return_type return self nogil_check = Node.gil_error gil_message = "Try-finally statement" def generate_execution_code(self, code): code.mark_pos(self.pos) old_error_label = code.error_label old_labels = code.all_new_labels() new_labels = code.get_all_labels() new_error_label = code.error_label if not self.handle_error_case: code.error_label = old_error_label catch_label = code.new_label() code.putln("/*try:*/ {") if self.disallow_continue_in_try_finally: was_in_try_finally = code.funcstate.in_try_finally code.funcstate.in_try_finally = 1 self.body.generate_execution_code(code) if self.disallow_continue_in_try_finally: code.funcstate.in_try_finally = was_in_try_finally code.putln("}") code.set_all_labels(old_labels) temps_to_clean_up = code.funcstate.all_free_managed_temps() code.mark_pos(self.finally_clause.pos) code.putln("/*finally:*/ {") def fresh_finally_clause(_next=[self.finally_clause]): # generate the original subtree once and always keep a fresh copy node = _next[0] node_copy = copy.deepcopy(node) if node is self.finally_clause: _next[0] = node_copy else: node = node_copy return node preserve_error = self.preserve_exception and code.label_used(new_error_label) needs_success_cleanup = not self.finally_clause.is_terminator if not self.body.is_terminator: code.putln('/*normal exit:*/{') fresh_finally_clause().generate_execution_code(code) if not self.finally_clause.is_terminator: code.put_goto(catch_label) code.putln('}') if preserve_error: code.putln('/*exception exit:*/{') if self.is_try_finally_in_nogil: code.declare_gilstate() if needs_success_cleanup: exc_lineno_cnames = tuple([ code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False) for _ in range(2)]) exc_filename_cname = code.funcstate.allocate_temp( PyrexTypes.CPtrType(PyrexTypes.c_const_type(PyrexTypes.c_char_type)), manage_ref=False) else: exc_lineno_cnames = exc_filename_cname = None exc_vars = tuple([ code.funcstate.allocate_temp(py_object_type, manage_ref=False) for _ in range(6)]) code.put_label(new_error_label) self.put_error_catcher( code, temps_to_clean_up, exc_vars, exc_lineno_cnames, exc_filename_cname) finally_old_labels = code.all_new_labels() code.putln('{') old_exc_vars = code.funcstate.exc_vars code.funcstate.exc_vars = exc_vars[:3] self.finally_except_clause.generate_execution_code(code) code.funcstate.exc_vars = old_exc_vars code.putln('}') if needs_success_cleanup: self.put_error_uncatcher(code, exc_vars, exc_lineno_cnames, exc_filename_cname) if exc_lineno_cnames: for cname in exc_lineno_cnames: code.funcstate.release_temp(cname) if exc_filename_cname: code.funcstate.release_temp(exc_filename_cname) code.put_goto(old_error_label) for new_label, old_label in zip(code.get_all_labels(), finally_old_labels): if not code.label_used(new_label): continue code.put_label(new_label) self.put_error_cleaner(code, exc_vars) code.put_goto(old_label) for cname in exc_vars: code.funcstate.release_temp(cname) code.putln('}') code.set_all_labels(old_labels) return_label = code.return_label for i, (new_label, old_label) in enumerate(zip(new_labels, old_labels)): if not code.label_used(new_label): continue if new_label == new_error_label and preserve_error: continue # handled above code.put('%s: ' % new_label) code.putln('{') ret_temp = None if old_label == return_label and not self.finally_clause.is_terminator: # store away return value for later reuse if (self.func_return_type and not self.is_try_finally_in_nogil and not isinstance(self.finally_clause, GILExitNode)): ret_temp = code.funcstate.allocate_temp( self.func_return_type, manage_ref=False) code.putln("%s = %s;" % (ret_temp, Naming.retval_cname)) if self.func_return_type.is_pyobject: code.putln("%s = 0;" % Naming.retval_cname) fresh_finally_clause().generate_execution_code(code) if ret_temp: code.putln("%s = %s;" % (Naming.retval_cname, ret_temp)) if self.func_return_type.is_pyobject: code.putln("%s = 0;" % ret_temp) code.funcstate.release_temp(ret_temp) ret_temp = None if not self.finally_clause.is_terminator: code.put_goto(old_label) code.putln('}') # End finally code.put_label(catch_label) code.putln( "}") def generate_function_definitions(self, env, code): self.body.generate_function_definitions(env, code) self.finally_clause.generate_function_definitions(env, code) def put_error_catcher(self, code, temps_to_clean_up, exc_vars, exc_lineno_cnames, exc_filename_cname): code.globalstate.use_utility_code(restore_exception_utility_code) code.globalstate.use_utility_code(get_exception_utility_code) code.globalstate.use_utility_code(swap_exception_utility_code) code.putln(' '.join(["%s = 0;"]*len(exc_vars)) % exc_vars) if self.is_try_finally_in_nogil: code.put_ensure_gil(declare_gilstate=False) for temp_name, type in temps_to_clean_up: code.put_xdecref_clear(temp_name, type) # not using preprocessor here to avoid warnings about # unused utility functions and/or temps code.putln("if (PY_MAJOR_VERSION >= 3)" " __Pyx_ExceptionSwap(&%s, &%s, &%s);" % exc_vars[3:]) code.putln("if ((PY_MAJOR_VERSION < 3) ||" # if __Pyx_GetException() fails in Py3, # store the newly raised exception instead " unlikely(__Pyx_GetException(&%s, &%s, &%s) < 0)) " "__Pyx_ErrFetch(&%s, &%s, &%s);" % (exc_vars[:3] * 2)) for var in exc_vars: code.put_xgotref(var) if exc_lineno_cnames: code.putln("%s = %s; %s = %s; %s = %s;" % ( exc_lineno_cnames[0], Naming.lineno_cname, exc_lineno_cnames[1], Naming.clineno_cname, exc_filename_cname, Naming.filename_cname)) if self.is_try_finally_in_nogil: code.put_release_ensured_gil() def put_error_uncatcher(self, code, exc_vars, exc_lineno_cnames, exc_filename_cname): code.globalstate.use_utility_code(restore_exception_utility_code) code.globalstate.use_utility_code(reset_exception_utility_code) if self.is_try_finally_in_nogil: code.put_ensure_gil(declare_gilstate=False) # not using preprocessor here to avoid warnings about # unused utility functions and/or temps code.putln("if (PY_MAJOR_VERSION >= 3) {") for var in exc_vars[3:]: code.put_xgiveref(var) code.putln("__Pyx_ExceptionReset(%s, %s, %s);" % exc_vars[3:]) code.putln("}") for var in exc_vars[:3]: code.put_xgiveref(var) code.putln("__Pyx_ErrRestore(%s, %s, %s);" % exc_vars[:3]) if self.is_try_finally_in_nogil: code.put_release_ensured_gil() code.putln(' '.join(["%s = 0;"]*len(exc_vars)) % exc_vars) if exc_lineno_cnames: code.putln("%s = %s; %s = %s; %s = %s;" % ( Naming.lineno_cname, exc_lineno_cnames[0], Naming.clineno_cname, exc_lineno_cnames[1], Naming.filename_cname, exc_filename_cname)) def put_error_cleaner(self, code, exc_vars): code.globalstate.use_utility_code(reset_exception_utility_code) if self.is_try_finally_in_nogil: code.put_ensure_gil(declare_gilstate=False) # not using preprocessor here to avoid warnings about # unused utility functions and/or temps code.putln("if (PY_MAJOR_VERSION >= 3) {") for var in exc_vars[3:]: code.put_xgiveref(var) code.putln("__Pyx_ExceptionReset(%s, %s, %s);" % exc_vars[3:]) code.putln("}") for var in exc_vars[:3]: code.put_xdecref_clear(var, py_object_type) if self.is_try_finally_in_nogil: code.put_release_ensured_gil() code.putln(' '.join(["%s = 0;"]*3) % exc_vars[3:]) def annotate(self, code): self.body.annotate(code) self.finally_clause.annotate(code) class NogilTryFinallyStatNode(TryFinallyStatNode): """ A try/finally statement that may be used in nogil code sections. """ preserve_exception = False nogil_check = None class GILStatNode(NogilTryFinallyStatNode): # 'with gil' or 'with nogil' statement # # state string 'gil' or 'nogil' state_temp = None def __init__(self, pos, state, body): self.state = state self.create_state_temp_if_needed(pos, state, body) TryFinallyStatNode.__init__(self, pos, body=body, finally_clause=GILExitNode( pos, state=state, state_temp=self.state_temp)) def create_state_temp_if_needed(self, pos, state, body): from .ParseTreeTransforms import YieldNodeCollector collector = YieldNodeCollector() collector.visitchildren(body) if not collector.yields and not collector.awaits: return if state == 'gil': temp_type = PyrexTypes.c_gilstate_type else: temp_type = PyrexTypes.c_threadstate_ptr_type from . import ExprNodes self.state_temp = ExprNodes.TempNode(pos, temp_type) def analyse_declarations(self, env): env._in_with_gil_block = (self.state == 'gil') if self.state == 'gil': env.has_with_gil_block = True return super(GILStatNode, self).analyse_declarations(env) def analyse_expressions(self, env): env.use_utility_code( UtilityCode.load_cached("ForceInitThreads", "ModuleSetupCode.c")) was_nogil = env.nogil env.nogil = self.state == 'nogil' node = TryFinallyStatNode.analyse_expressions(self, env) env.nogil = was_nogil return node def generate_execution_code(self, code): code.mark_pos(self.pos) code.begin_block() if self.state_temp: self.state_temp.allocate(code) variable = self.state_temp.result() else: variable = None old_gil_config = code.funcstate.gil_owned if self.state == 'gil': code.put_ensure_gil(variable=variable) code.funcstate.gil_owned = True else: code.put_release_gil(variable=variable) code.funcstate.gil_owned = False TryFinallyStatNode.generate_execution_code(self, code) if self.state_temp: self.state_temp.release(code) code.funcstate.gil_owned = old_gil_config code.end_block() class GILExitNode(StatNode): """ Used as the 'finally' block in a GILStatNode state string 'gil' or 'nogil' """ child_attrs = [] state_temp = None def analyse_expressions(self, env): return self def generate_execution_code(self, code): if self.state_temp: variable = self.state_temp.result() else: variable = None if self.state == 'gil': code.put_release_ensured_gil(variable) else: code.put_acquire_gil(variable) class EnsureGILNode(GILExitNode): """ Ensure the GIL in nogil functions for cleanup before returning. """ def generate_execution_code(self, code): code.put_ensure_gil(declare_gilstate=False) utility_code_for_cimports = { # utility code (or inlining c) in a pxd (or pyx) file. # TODO: Consider a generic user-level mechanism for importing 'cpython.array' : ("ArrayAPI", "arrayarray.h"), 'cpython.array.array' : ("ArrayAPI", "arrayarray.h"), } utility_code_for_imports = { # utility code used when special modules are imported. # TODO: Consider a generic user-level mechanism for importing 'asyncio': ("__Pyx_patch_asyncio", "PatchAsyncIO", "Coroutine.c"), 'inspect': ("__Pyx_patch_inspect", "PatchInspect", "Coroutine.c"), } class CImportStatNode(StatNode): # cimport statement # # module_name string Qualified name of module being imported # as_name string or None Name specified in "as" clause, if any # is_absolute bool True for absolute imports, False otherwise child_attrs = [] is_absolute = False def analyse_declarations(self, env): if not env.is_module_scope: error(self.pos, "cimport only allowed at module level") return module_scope = env.find_module( self.module_name, self.pos, relative_level=0 if self.is_absolute else -1) if "." in self.module_name: names = [EncodedString(name) for name in self.module_name.split(".")] top_name = names[0] top_module_scope = env.context.find_submodule(top_name) module_scope = top_module_scope for name in names[1:]: submodule_scope = module_scope.find_submodule(name) module_scope.declare_module(name, submodule_scope, self.pos) module_scope = submodule_scope if self.as_name: env.declare_module(self.as_name, module_scope, self.pos) else: env.add_imported_module(module_scope) env.declare_module(top_name, top_module_scope, self.pos) else: name = self.as_name or self.module_name env.declare_module(name, module_scope, self.pos) if self.module_name in utility_code_for_cimports: env.use_utility_code(UtilityCode.load_cached( *utility_code_for_cimports[self.module_name])) def analyse_expressions(self, env): return self def generate_execution_code(self, code): pass class FromCImportStatNode(StatNode): # from ... cimport statement # # module_name string Qualified name of module # relative_level int or None Relative import: number of dots before module_name # imported_names [(pos, name, as_name, kind)] Names to be imported child_attrs = [] module_name = None relative_level = None imported_names = None def analyse_declarations(self, env): if not env.is_module_scope: error(self.pos, "cimport only allowed at module level") return if self.relative_level and self.relative_level > env.qualified_name.count('.'): error(self.pos, "relative cimport beyond main package is not allowed") return module_scope = env.find_module(self.module_name, self.pos, relative_level=self.relative_level) module_name = module_scope.qualified_name env.add_imported_module(module_scope) for pos, name, as_name, kind in self.imported_names: if name == "*": for local_name, entry in module_scope.entries.items(): env.add_imported_entry(local_name, entry, pos) else: entry = module_scope.lookup(name) if entry: if kind and not self.declaration_matches(entry, kind): entry.redeclared(pos) entry.used = 1 else: if kind == 'struct' or kind == 'union': entry = module_scope.declare_struct_or_union( name, kind=kind, scope=None, typedef_flag=0, pos=pos) elif kind == 'class': entry = module_scope.declare_c_class(name, pos=pos, module_name=module_name) else: submodule_scope = env.context.find_module( name, relative_to=module_scope, pos=self.pos, absolute_fallback=False) if submodule_scope.parent_module is module_scope: env.declare_module(as_name or name, submodule_scope, self.pos) else: error(pos, "Name '%s' not declared in module '%s'" % (name, module_name)) if entry: local_name = as_name or name env.add_imported_entry(local_name, entry, pos) if module_name.startswith('cpython'): # enough for now if module_name in utility_code_for_cimports: env.use_utility_code(UtilityCode.load_cached( *utility_code_for_cimports[module_name])) for _, name, _, _ in self.imported_names: fqname = '%s.%s' % (module_name, name) if fqname in utility_code_for_cimports: env.use_utility_code(UtilityCode.load_cached( *utility_code_for_cimports[fqname])) def declaration_matches(self, entry, kind): if not entry.is_type: return 0 type = entry.type if kind == 'class': if not type.is_extension_type: return 0 else: if not type.is_struct_or_union: return 0 if kind != type.kind: return 0 return 1 def analyse_expressions(self, env): return self def generate_execution_code(self, code): pass class FromImportStatNode(StatNode): # from ... import statement # # module ImportNode # items [(string, NameNode)] # interned_items [(string, NameNode, ExprNode)] # item PyTempNode used internally # import_star boolean used internally child_attrs = ["module"] import_star = 0 def analyse_declarations(self, env): for name, target in self.items: if name == "*": if not env.is_module_scope: error(self.pos, "import * only allowed at module level") return env.has_import_star = 1 self.import_star = 1 else: target.analyse_target_declaration(env) def analyse_expressions(self, env): from . import ExprNodes self.module = self.module.analyse_expressions(env) self.item = ExprNodes.RawCNameExprNode(self.pos, py_object_type) self.interned_items = [] for name, target in self.items: if name == '*': for _, entry in env.entries.items(): if not entry.is_type and entry.type.is_extension_type: env.use_utility_code(UtilityCode.load_cached("ExtTypeTest", "ObjectHandling.c")) break else: entry = env.lookup(target.name) # check whether or not entry is already cimported if (entry.is_type and entry.type.name == name and hasattr(entry.type, 'module_name')): if entry.type.module_name == self.module.module_name.value: # cimported with absolute name continue try: # cimported with relative name module = env.find_module(self.module.module_name.value, pos=self.pos, relative_level=self.module.level) if entry.type.module_name == module.qualified_name: continue except AttributeError: pass target = target.analyse_target_expression(env, None) # FIXME? if target.type is py_object_type: coerced_item = None else: coerced_item = self.item.coerce_to(target.type, env) self.interned_items.append((name, target, coerced_item)) return self def generate_execution_code(self, code): code.mark_pos(self.pos) self.module.generate_evaluation_code(code) if self.import_star: code.putln( 'if (%s(%s) < 0) %s;' % ( Naming.import_star, self.module.py_result(), code.error_goto(self.pos))) item_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True) self.item.set_cname(item_temp) if self.interned_items: code.globalstate.use_utility_code( UtilityCode.load_cached("ImportFrom", "ImportExport.c")) for name, target, coerced_item in self.interned_items: code.putln( '%s = __Pyx_ImportFrom(%s, %s); %s' % ( item_temp, self.module.py_result(), code.intern_identifier(name), code.error_goto_if_null(item_temp, self.pos))) code.put_gotref(item_temp) if coerced_item is None: target.generate_assignment_code(self.item, code) else: coerced_item.allocate_temp_result(code) coerced_item.generate_result_code(code) target.generate_assignment_code(coerced_item, code) code.put_decref_clear(item_temp, py_object_type) code.funcstate.release_temp(item_temp) self.module.generate_disposal_code(code) self.module.free_temps(code) class ParallelNode(Node): """ Base class for cython.parallel constructs. """ nogil_check = None class ParallelStatNode(StatNode, ParallelNode): """ Base class for 'with cython.parallel.parallel():' and 'for i in prange():'. assignments { Entry(var) : (var.pos, inplace_operator_or_None) } assignments to variables in this parallel section parent parent ParallelStatNode or None is_parallel indicates whether this node is OpenMP parallel (true for #pragma omp parallel for and #pragma omp parallel) is_parallel is true for: #pragma omp parallel #pragma omp parallel for sections, but NOT for #pragma omp for We need this to determine the sharing attributes. privatization_insertion_point a code insertion point used to make temps private (esp. the "nsteps" temp) args tuple the arguments passed to the parallel construct kwargs DictNode the keyword arguments passed to the parallel construct (replaced by its compile time value) """ child_attrs = ['body', 'num_threads'] body = None is_prange = False is_nested_prange = False error_label_used = False num_threads = None chunksize = None parallel_exc = ( Naming.parallel_exc_type, Naming.parallel_exc_value, Naming.parallel_exc_tb, ) parallel_pos_info = ( Naming.parallel_filename, Naming.parallel_lineno, Naming.parallel_clineno, ) pos_info = ( Naming.filename_cname, Naming.lineno_cname, Naming.clineno_cname, ) critical_section_counter = 0 def __init__(self, pos, **kwargs): super(ParallelStatNode, self).__init__(pos, **kwargs) # All assignments in this scope self.assignments = kwargs.get('assignments') or {} # All seen closure cnames and their temporary cnames self.seen_closure_vars = set() # Dict of variables that should be declared (first|last|)private or # reduction { Entry: (op, lastprivate) }. # If op is not None, it's a reduction. self.privates = {} # [NameNode] self.assigned_nodes = [] def analyse_declarations(self, env): self.body.analyse_declarations(env) self.num_threads = None if self.kwargs: # Try to find num_threads and chunksize keyword arguments pairs = [] for dictitem in self.kwargs.key_value_pairs: if dictitem.key.value == 'num_threads': self.num_threads = dictitem.value elif self.is_prange and dictitem.key.value == 'chunksize': self.chunksize = dictitem.value else: pairs.append(dictitem) self.kwargs.key_value_pairs = pairs try: self.kwargs = self.kwargs.compile_time_value(env) except Exception, e: error(self.kwargs.pos, "Only compile-time values may be " "supplied as keyword arguments") else: self.kwargs = {} for kw, val in self.kwargs.iteritems(): if kw not in self.valid_keyword_arguments: error(self.pos, "Invalid keyword argument: %s" % kw) else: setattr(self, kw, val) def analyse_expressions(self, env): if self.num_threads: self.num_threads = self.num_threads.analyse_expressions(env) if self.chunksize: self.chunksize = self.chunksize.analyse_expressions(env) self.body = self.body.analyse_expressions(env) self.analyse_sharing_attributes(env) if self.num_threads is not None: if (self.parent and self.parent.num_threads is not None and not self.parent.is_prange): error(self.pos, "num_threads already declared in outer section") elif self.parent and not self.parent.is_prange: error(self.pos, "num_threads must be declared in the parent parallel section") elif (self.num_threads.type.is_int and self.num_threads.is_literal and self.num_threads.compile_time_value(env) <= 0): error(self.pos, "argument to num_threads must be greater than 0") if not self.num_threads.is_simple(): self.num_threads = self.num_threads.coerce_to( PyrexTypes.c_int_type, env).coerce_to_temp(env) return self def analyse_sharing_attributes(self, env): """ Analyse the privates for this block and set them in self.privates. This should be called in a post-order fashion during the analyse_expressions phase """ for entry, (pos, op) in self.assignments.iteritems(): if self.is_prange and not self.is_parallel: # closely nested prange in a with parallel block, disallow # assigning to privates in the with parallel block (we # consider it too implicit and magicky for users) if entry in self.parent.assignments: error(pos, "Cannot assign to private of outer parallel block") continue if not self.is_prange and op: # Again possible, but considered to magicky error(pos, "Reductions not allowed for parallel blocks") continue # By default all variables should have the same values as if # executed sequentially lastprivate = True self.propagate_var_privatization(entry, pos, op, lastprivate) def propagate_var_privatization(self, entry, pos, op, lastprivate): """ Propagate the sharing attributes of a variable. If the privatization is determined by a parent scope, done propagate further. If we are a prange, we propagate our sharing attributes outwards to other pranges. If we are a prange in parallel block and the parallel block does not determine the variable private, we propagate to the parent of the parent. Recursion stops at parallel blocks, as they have no concept of lastprivate or reduction. So the following cases propagate: sum is a reduction for all loops: for i in prange(n): for j in prange(n): for k in prange(n): sum += i * j * k sum is a reduction for both loops, local_var is private to the parallel with block: for i in prange(n): with parallel: local_var = ... # private to the parallel for j in prange(n): sum += i * j Nested with parallel blocks are disallowed, because they wouldn't allow you to propagate lastprivates or reductions: #pragma omp parallel for lastprivate(i) for i in prange(n): sum = 0 #pragma omp parallel private(j, sum) with parallel: #pragma omp parallel with parallel: #pragma omp for lastprivate(j) reduction(+:sum) for j in prange(n): sum += i # sum and j are well-defined here # sum and j are undefined here # sum and j are undefined here """ self.privates[entry] = (op, lastprivate) if entry.type.is_memoryviewslice: error(pos, "Memoryview slices can only be shared in parallel sections") return if self.is_prange: if not self.is_parallel and entry not in self.parent.assignments: # Parent is a parallel with block parent = self.parent.parent else: parent = self.parent # We don't need to propagate privates, only reductions and # lastprivates if parent and (op or lastprivate): parent.propagate_var_privatization(entry, pos, op, lastprivate) def _allocate_closure_temp(self, code, entry): """ Helper function that allocate a temporary for a closure variable that is assigned to. """ if self.parent: return self.parent._allocate_closure_temp(code, entry) if entry.cname in self.seen_closure_vars: return entry.cname cname = code.funcstate.allocate_temp(entry.type, True) # Add both the actual cname and the temp cname, as the actual cname # will be replaced with the temp cname on the entry self.seen_closure_vars.add(entry.cname) self.seen_closure_vars.add(cname) self.modified_entries.append((entry, entry.cname)) code.putln("%s = %s;" % (cname, entry.cname)) entry.cname = cname def initialize_privates_to_nan(self, code, exclude=None): first = True for entry, (op, lastprivate) in self.privates.iteritems(): if not op and (not exclude or entry != exclude): invalid_value = entry.type.invalid_value() if invalid_value: if first: code.putln("/* Initialize private variables to " "invalid values */") first = False code.putln("%s = %s;" % (entry.cname, entry.type.cast_code(invalid_value))) def evaluate_before_block(self, code, expr): c = self.begin_of_parallel_control_block_point_after_decls # we need to set the owner to ourselves temporarily, as # allocate_temp may generate a comment in the middle of our pragma # otherwise when DebugFlags.debug_temp_code_comments is in effect owner = c.funcstate.owner c.funcstate.owner = c expr.generate_evaluation_code(c) c.funcstate.owner = owner return expr.result() def put_num_threads(self, code): """ Write self.num_threads if set as the num_threads OpenMP directive """ if self.num_threads is not None: code.put(" num_threads(%s)" % self.evaluate_before_block(code, self.num_threads)) def declare_closure_privates(self, code): """ If a variable is in a scope object, we need to allocate a temp and assign the value from the temp to the variable in the scope object after the parallel section. This kind of copying should be done only in the outermost parallel section. """ self.modified_entries = [] for entry in self.assignments: if entry.from_closure or entry.in_closure: self._allocate_closure_temp(code, entry) def release_closure_privates(self, code): """ Release any temps used for variables in scope objects. As this is the outermost parallel block, we don't need to delete the cnames from self.seen_closure_vars. """ for entry, original_cname in self.modified_entries: code.putln("%s = %s;" % (original_cname, entry.cname)) code.funcstate.release_temp(entry.cname) entry.cname = original_cname def privatize_temps(self, code, exclude_temps=()): """ Make any used temporaries private. Before the relevant code block code.start_collecting_temps() should have been called. """ if self.is_parallel: c = self.privatization_insertion_point self.temps = temps = code.funcstate.stop_collecting_temps() privates, firstprivates = [], [] for temp, type in temps: if type.is_pyobject or type.is_memoryviewslice: firstprivates.append(temp) else: privates.append(temp) if privates: c.put(" private(%s)" % ", ".join(privates)) if firstprivates: c.put(" firstprivate(%s)" % ", ".join(firstprivates)) if self.breaking_label_used: shared_vars = [Naming.parallel_why] if self.error_label_used: shared_vars.extend(self.parallel_exc) c.put(" private(%s, %s, %s)" % self.pos_info) c.put(" shared(%s)" % ', '.join(shared_vars)) def cleanup_temps(self, code): # Now clean up any memoryview slice and object temporaries if self.is_parallel and not self.is_nested_prange: code.putln("/* Clean up any temporaries */") for temp, type in self.temps: if type.is_memoryviewslice: code.put_xdecref_memoryviewslice(temp, have_gil=False) elif type.is_pyobject: code.put_xdecref(temp, type) code.putln("%s = NULL;" % temp) def setup_parallel_control_flow_block(self, code): """ Sets up a block that surrounds the parallel block to determine how the parallel section was exited. Any kind of return is trapped (break, continue, return, exceptions). This is the idea: { int why = 0; #pragma omp parallel { return # -> goto new_return_label; goto end_parallel; new_return_label: why = 3; goto end_parallel; end_parallel:; #pragma omp flush(why) # we need to flush for every iteration } if (why == 3) goto old_return_label; } """ self.old_loop_labels = code.new_loop_labels() self.old_error_label = code.new_error_label() self.old_return_label = code.return_label code.return_label = code.new_label(name="return") code.begin_block() # parallel control flow block self.begin_of_parallel_control_block_point = code.insertion_point() self.begin_of_parallel_control_block_point_after_decls = code.insertion_point() self.undef_builtin_expect_apple_gcc_bug(code) def begin_parallel_block(self, code): """ Each OpenMP thread in a parallel section that contains a with gil block must have the thread-state initialized. The call to PyGILState_Release() then deallocates our threadstate. If we wouldn't do this, each with gil block would allocate and deallocate one, thereby losing exception information before it can be saved before leaving the parallel section. """ self.begin_of_parallel_block = code.insertion_point() def end_parallel_block(self, code): """ To ensure all OpenMP threads have thread states, we ensure the GIL in each thread (which creates a thread state if it doesn't exist), after which we release the GIL. On exit, reacquire the GIL and release the thread state. If compiled without OpenMP support (at the C level), then we still have to acquire the GIL to decref any object temporaries. """ if self.error_label_used: begin_code = self.begin_of_parallel_block end_code = code begin_code.putln("#ifdef _OPENMP") begin_code.put_ensure_gil(declare_gilstate=True) begin_code.putln("Py_BEGIN_ALLOW_THREADS") begin_code.putln("#endif /* _OPENMP */") end_code.putln("#ifdef _OPENMP") end_code.putln("Py_END_ALLOW_THREADS") end_code.putln("#else") end_code.put_safe("{\n") end_code.put_ensure_gil() end_code.putln("#endif /* _OPENMP */") self.cleanup_temps(end_code) end_code.put_release_ensured_gil() end_code.putln("#ifndef _OPENMP") end_code.put_safe("}\n") end_code.putln("#endif /* _OPENMP */") def trap_parallel_exit(self, code, should_flush=False): """ Trap any kind of return inside a parallel construct. 'should_flush' indicates whether the variable should be flushed, which is needed by prange to skip the loop. It also indicates whether we need to register a continue (we need this for parallel blocks, but not for prange loops, as it is a direct jump there). It uses the same mechanism as try/finally: 1 continue 2 break 3 return 4 error """ save_lastprivates_label = code.new_label() dont_return_label = code.new_label() self.any_label_used = False self.breaking_label_used = False self.error_label_used = False self.parallel_private_temps = [] all_labels = code.get_all_labels() # Figure this out before starting to generate any code for label in all_labels: if code.label_used(label): self.breaking_label_used = (self.breaking_label_used or label != code.continue_label) self.any_label_used = True if self.any_label_used: code.put_goto(dont_return_label) for i, label in enumerate(all_labels): if not code.label_used(label): continue is_continue_label = label == code.continue_label code.put_label(label) if not (should_flush and is_continue_label): if label == code.error_label: self.error_label_used = True self.fetch_parallel_exception(code) code.putln("%s = %d;" % (Naming.parallel_why, i + 1)) if (self.breaking_label_used and self.is_prange and not is_continue_label): code.put_goto(save_lastprivates_label) else: code.put_goto(dont_return_label) if self.any_label_used: if self.is_prange and self.breaking_label_used: # Don't rely on lastprivate, save our lastprivates code.put_label(save_lastprivates_label) self.save_parallel_vars(code) code.put_label(dont_return_label) if should_flush and self.breaking_label_used: code.putln_openmp("#pragma omp flush(%s)" % Naming.parallel_why) def save_parallel_vars(self, code): """ The following shenanigans are instated when we break, return or propagate errors from a prange. In this case we cannot rely on lastprivate() to do its job, as no iterations may have executed yet in the last thread, leaving the values undefined. It is most likely that the breaking thread has well-defined values of the lastprivate variables, so we keep those values. """ section_name = ("__pyx_parallel_lastprivates%d" % self.critical_section_counter) code.putln_openmp("#pragma omp critical(%s)" % section_name) ParallelStatNode.critical_section_counter += 1 code.begin_block() # begin critical section c = self.begin_of_parallel_control_block_point temp_count = 0 for entry, (op, lastprivate) in self.privates.iteritems(): if not lastprivate or entry.type.is_pyobject: continue type_decl = entry.type.empty_declaration_code() temp_cname = "__pyx_parallel_temp%d" % temp_count private_cname = entry.cname temp_count += 1 invalid_value = entry.type.invalid_value() if invalid_value: init = ' = ' + invalid_value else: init = '' # Declare the parallel private in the outer block c.putln("%s %s%s;" % (type_decl, temp_cname, init)) # Initialize before escaping code.putln("%s = %s;" % (temp_cname, private_cname)) self.parallel_private_temps.append((temp_cname, private_cname)) code.end_block() # end critical section def fetch_parallel_exception(self, code): """ As each OpenMP thread may raise an exception, we need to fetch that exception from the threadstate and save it for after the parallel section where it can be re-raised in the master thread. Although it would seem that __pyx_filename, __pyx_lineno and __pyx_clineno are only assigned to under exception conditions (i.e., when we have the GIL), and thus should be allowed to be shared without any race condition, they are in fact subject to the same race conditions that they were previously when they were global variables and functions were allowed to release the GIL: thread A thread B acquire set lineno release acquire set lineno release acquire fetch exception release skip the fetch deallocate threadstate deallocate threadstate """ code.begin_block() code.put_ensure_gil(declare_gilstate=True) code.putln_openmp("#pragma omp flush(%s)" % Naming.parallel_exc_type) code.putln( "if (!%s) {" % Naming.parallel_exc_type) code.putln("__Pyx_ErrFetch(&%s, &%s, &%s);" % self.parallel_exc) pos_info = chain(*zip(self.parallel_pos_info, self.pos_info)) code.funcstate.uses_error_indicator = True code.putln("%s = %s; %s = %s; %s = %s;" % tuple(pos_info)) code.put_gotref(Naming.parallel_exc_type) code.putln( "}") code.put_release_ensured_gil() code.end_block() def restore_parallel_exception(self, code): "Re-raise a parallel exception" code.begin_block() code.put_ensure_gil(declare_gilstate=True) code.put_giveref(Naming.parallel_exc_type) code.putln("__Pyx_ErrRestore(%s, %s, %s);" % self.parallel_exc) pos_info = chain(*zip(self.pos_info, self.parallel_pos_info)) code.putln("%s = %s; %s = %s; %s = %s;" % tuple(pos_info)) code.put_release_ensured_gil() code.end_block() def restore_labels(self, code): """ Restore all old labels. Call this before the 'else' clause to for loops and always before ending the parallel control flow block. """ code.set_all_labels(self.old_loop_labels + (self.old_return_label, self.old_error_label)) def end_parallel_control_flow_block(self, code, break_=False, continue_=False): """ This ends the parallel control flow block and based on how the parallel section was exited, takes the corresponding action. The break_ and continue_ parameters indicate whether these should be propagated outwards: for i in prange(...): with cython.parallel.parallel(): continue Here break should be trapped in the parallel block, and propagated to the for loop. """ c = self.begin_of_parallel_control_block_point # Firstly, always prefer errors over returning, continue or break if self.error_label_used: c.putln("const char *%s = NULL; int %s = 0, %s = 0;" % self.parallel_pos_info) c.putln("PyObject *%s = NULL, *%s = NULL, *%s = NULL;" % self.parallel_exc) code.putln( "if (%s) {" % Naming.parallel_exc_type) code.putln("/* This may have been overridden by a continue, " "break or return in another thread. Prefer the error. */") code.putln("%s = 4;" % Naming.parallel_why) code.putln( "}") if continue_: any_label_used = self.any_label_used else: any_label_used = self.breaking_label_used if any_label_used: # __pyx_parallel_why is used, declare and initialize c.putln("int %s;" % Naming.parallel_why) c.putln("%s = 0;" % Naming.parallel_why) code.putln( "if (%s) {" % Naming.parallel_why) for temp_cname, private_cname in self.parallel_private_temps: code.putln("%s = %s;" % (private_cname, temp_cname)) code.putln("switch (%s) {" % Naming.parallel_why) if continue_: code.put(" case 1: ") code.put_goto(code.continue_label) if break_: code.put(" case 2: ") code.put_goto(code.break_label) code.put(" case 3: ") code.put_goto(code.return_label) if self.error_label_used: code.globalstate.use_utility_code(restore_exception_utility_code) code.putln(" case 4:") self.restore_parallel_exception(code) code.put_goto(code.error_label) code.putln("}") # end switch code.putln( "}") # end if code.end_block() # end parallel control flow block self.redef_builtin_expect_apple_gcc_bug(code) # FIXME: improve with version number for OS X Lion buggy_platform_macro_condition = "(defined(__APPLE__) || defined(__OSX__))" have_expect_condition = "(defined(__GNUC__) && " \ "(__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))))" redef_condition = "(%s && %s)" % (buggy_platform_macro_condition, have_expect_condition) def undef_builtin_expect_apple_gcc_bug(self, code): """ A bug on OS X Lion disallows __builtin_expect macros. This code avoids them """ if not self.parent: code.undef_builtin_expect(self.redef_condition) def redef_builtin_expect_apple_gcc_bug(self, code): if not self.parent: code.redef_builtin_expect(self.redef_condition) class ParallelWithBlockNode(ParallelStatNode): """ This node represents a 'with cython.parallel.parallel():' block """ valid_keyword_arguments = ['num_threads'] num_threads = None def analyse_declarations(self, env): super(ParallelWithBlockNode, self).analyse_declarations(env) if self.args: error(self.pos, "cython.parallel.parallel() does not take " "positional arguments") def generate_execution_code(self, code): self.declare_closure_privates(code) self.setup_parallel_control_flow_block(code) code.putln("#ifdef _OPENMP") code.put("#pragma omp parallel ") if self.privates: privates = [e.cname for e in self.privates if not e.type.is_pyobject] code.put('private(%s)' % ', '.join(privates)) self.privatization_insertion_point = code.insertion_point() self.put_num_threads(code) code.putln("") code.putln("#endif /* _OPENMP */") code.begin_block() # parallel block self.begin_parallel_block(code) self.initialize_privates_to_nan(code) code.funcstate.start_collecting_temps() self.body.generate_execution_code(code) self.trap_parallel_exit(code) self.privatize_temps(code) self.end_parallel_block(code) code.end_block() # end parallel block continue_ = code.label_used(code.continue_label) break_ = code.label_used(code.break_label) self.restore_labels(code) self.end_parallel_control_flow_block(code, break_=break_, continue_=continue_) self.release_closure_privates(code) class ParallelRangeNode(ParallelStatNode): """ This node represents a 'for i in cython.parallel.prange():' construct. target NameNode the target iteration variable else_clause Node or None the else clause of this loop """ child_attrs = ['body', 'target', 'else_clause', 'args', 'num_threads', 'chunksize'] body = target = else_clause = args = None start = stop = step = None is_prange = True nogil = None schedule = None valid_keyword_arguments = ['schedule', 'nogil', 'num_threads', 'chunksize'] def __init__(self, pos, **kwds): super(ParallelRangeNode, self).__init__(pos, **kwds) # Pretend to be a ForInStatNode for control flow analysis self.iterator = PassStatNode(pos) def analyse_declarations(self, env): super(ParallelRangeNode, self).analyse_declarations(env) self.target.analyse_target_declaration(env) if self.else_clause is not None: self.else_clause.analyse_declarations(env) if not self.args or len(self.args) > 3: error(self.pos, "Invalid number of positional arguments to prange") return if len(self.args) == 1: self.stop, = self.args elif len(self.args) == 2: self.start, self.stop = self.args else: self.start, self.stop, self.step = self.args if hasattr(self.schedule, 'decode'): self.schedule = self.schedule.decode('ascii') if self.schedule not in (None, 'static', 'dynamic', 'guided', 'runtime'): error(self.pos, "Invalid schedule argument to prange: %s" % (self.schedule,)) def analyse_expressions(self, env): was_nogil = env.nogil if self.nogil: env.nogil = True if self.target is None: error(self.pos, "prange() can only be used as part of a for loop") return self self.target = self.target.analyse_target_types(env) if not self.target.type.is_numeric: # Not a valid type, assume one for now anyway if not self.target.type.is_pyobject: # nogil_check will catch the is_pyobject case error(self.target.pos, "Must be of numeric type, not %s" % self.target.type) self.index_type = PyrexTypes.c_py_ssize_t_type else: self.index_type = self.target.type if not self.index_type.signed: warning(self.target.pos, "Unsigned index type not allowed before OpenMP 3.0", level=2) # Setup start, stop and step, allocating temps if needed self.names = 'start', 'stop', 'step' start_stop_step = self.start, self.stop, self.step for node, name in zip(start_stop_step, self.names): if node is not None: node.analyse_types(env) if not node.type.is_numeric: error(node.pos, "%s argument must be numeric" % name) continue if not node.is_literal: node = node.coerce_to_temp(env) setattr(self, name, node) # As we range from 0 to nsteps, computing the index along the # way, we need a fitting type for 'i' and 'nsteps' self.index_type = PyrexTypes.widest_numeric_type( self.index_type, node.type) if self.else_clause is not None: self.else_clause = self.else_clause.analyse_expressions(env) # Although not actually an assignment in this scope, it should be # treated as such to ensure it is unpacked if a closure temp, and to # ensure lastprivate behaviour and propagation. If the target index is # not a NameNode, it won't have an entry, and an error was issued by # ParallelRangeTransform if hasattr(self.target, 'entry'): self.assignments[self.target.entry] = self.target.pos, None node = super(ParallelRangeNode, self).analyse_expressions(env) if node.chunksize: if not node.schedule: error(node.chunksize.pos, "Must provide schedule with chunksize") elif node.schedule == 'runtime': error(node.chunksize.pos, "Chunksize not valid for the schedule runtime") elif (node.chunksize.type.is_int and node.chunksize.is_literal and node.chunksize.compile_time_value(env) <= 0): error(node.chunksize.pos, "Chunksize must not be negative") node.chunksize = node.chunksize.coerce_to( PyrexTypes.c_int_type, env).coerce_to_temp(env) if node.nogil: env.nogil = was_nogil node.is_nested_prange = node.parent and node.parent.is_prange if node.is_nested_prange: parent = node while parent.parent and parent.parent.is_prange: parent = parent.parent parent.assignments.update(node.assignments) parent.privates.update(node.privates) parent.assigned_nodes.extend(node.assigned_nodes) return node def nogil_check(self, env): names = 'start', 'stop', 'step', 'target' nodes = self.start, self.stop, self.step, self.target for name, node in zip(names, nodes): if node is not None and node.type.is_pyobject: error(node.pos, "%s may not be a Python object " "as we don't have the GIL" % name) def generate_execution_code(self, code): """ Generate code in the following steps 1) copy any closure variables determined thread-private into temporaries 2) allocate temps for start, stop and step 3) generate a loop that calculates the total number of steps, which then computes the target iteration variable for every step: for i in prange(start, stop, step): ... becomes nsteps = (stop - start) / step; i = start; #pragma omp parallel for lastprivate(i) for (temp = 0; temp < nsteps; temp++) { i = start + step * temp; ... } Note that accumulation of 'i' would have a data dependency between iterations. Also, you can't do this for (i = start; i < stop; i += step) ... as the '<' operator should become '>' for descending loops. 'for i from x < i < y:' does not suffer from this problem as the relational operator is known at compile time! 4) release our temps and write back any private closure variables """ self.declare_closure_privates(code) # This can only be a NameNode target_index_cname = self.target.entry.cname # This will be used as the dict to format our code strings, holding # the start, stop , step, temps and target cnames fmt_dict = { 'target': target_index_cname, } # Setup start, stop and step, allocating temps if needed start_stop_step = self.start, self.stop, self.step defaults = '0', '0', '1' for node, name, default in zip(start_stop_step, self.names, defaults): if node is None: result = default elif node.is_literal: result = node.get_constant_c_result_code() else: node.generate_evaluation_code(code) result = node.result() fmt_dict[name] = result fmt_dict['i'] = code.funcstate.allocate_temp(self.index_type, False) fmt_dict['nsteps'] = code.funcstate.allocate_temp(self.index_type, False) # TODO: check if the step is 0 and if so, raise an exception in a # 'with gil' block. For now, just abort code.putln("if (%(step)s == 0) abort();" % fmt_dict) self.setup_parallel_control_flow_block(code) # parallel control flow block self.control_flow_var_code_point = code.insertion_point() # Note: nsteps is private in an outer scope if present code.putln("%(nsteps)s = (%(stop)s - %(start)s) / %(step)s;" % fmt_dict) # The target iteration variable might not be initialized, do it only if # we are executing at least 1 iteration, otherwise we should leave the # target unaffected. The target iteration variable is firstprivate to # shut up compiler warnings caused by lastprivate, as the compiler # erroneously believes that nsteps may be <= 0, leaving the private # target index uninitialized code.putln("if (%(nsteps)s > 0)" % fmt_dict) code.begin_block() # if block self.generate_loop(code, fmt_dict) code.end_block() # end if block self.restore_labels(code) if self.else_clause: if self.breaking_label_used: code.put("if (%s < 2)" % Naming.parallel_why) code.begin_block() # else block code.putln("/* else */") self.else_clause.generate_execution_code(code) code.end_block() # end else block # ------ cleanup ------ self.end_parallel_control_flow_block(code) # end parallel control flow block # And finally, release our privates and write back any closure # variables for temp in start_stop_step + (self.chunksize, self.num_threads): if temp is not None: temp.generate_disposal_code(code) temp.free_temps(code) code.funcstate.release_temp(fmt_dict['i']) code.funcstate.release_temp(fmt_dict['nsteps']) self.release_closure_privates(code) def generate_loop(self, code, fmt_dict): if self.is_nested_prange: code.putln("#if 0") else: code.putln("#ifdef _OPENMP") if not self.is_parallel: code.put("#pragma omp for") self.privatization_insertion_point = code.insertion_point() reduction_codepoint = self.parent.privatization_insertion_point else: code.put("#pragma omp parallel") self.privatization_insertion_point = code.insertion_point() reduction_codepoint = self.privatization_insertion_point code.putln("") code.putln("#endif /* _OPENMP */") code.begin_block() # pragma omp parallel begin block # Initialize the GIL if needed for this thread self.begin_parallel_block(code) if self.is_nested_prange: code.putln("#if 0") else: code.putln("#ifdef _OPENMP") code.put("#pragma omp for") for entry, (op, lastprivate) in self.privates.iteritems(): # Don't declare the index variable as a reduction if op and op in "+*-&^|" and entry != self.target.entry: if entry.type.is_pyobject: error(self.pos, "Python objects cannot be reductions") else: #code.put(" reduction(%s:%s)" % (op, entry.cname)) # This is the only way reductions + nesting works in gcc4.5 reduction_codepoint.put( " reduction(%s:%s)" % (op, entry.cname)) else: if entry == self.target.entry: code.put(" firstprivate(%s)" % entry.cname) code.put(" lastprivate(%s)" % entry.cname) continue if not entry.type.is_pyobject: if lastprivate: private = 'lastprivate' else: private = 'private' code.put(" %s(%s)" % (private, entry.cname)) if self.schedule: if self.chunksize: chunksize = ", %s" % self.evaluate_before_block(code, self.chunksize) else: chunksize = "" code.put(" schedule(%s%s)" % (self.schedule, chunksize)) self.put_num_threads(reduction_codepoint) code.putln("") code.putln("#endif /* _OPENMP */") code.put("for (%(i)s = 0; %(i)s < %(nsteps)s; %(i)s++)" % fmt_dict) code.begin_block() # for loop block guard_around_body_codepoint = code.insertion_point() # Start if guard block around the body. This may be unnecessary, but # at least it doesn't spoil indentation code.begin_block() code.putln("%(target)s = %(start)s + %(step)s * %(i)s;" % fmt_dict) self.initialize_privates_to_nan(code, exclude=self.target.entry) if self.is_parallel: code.funcstate.start_collecting_temps() self.body.generate_execution_code(code) self.trap_parallel_exit(code, should_flush=True) self.privatize_temps(code) if self.breaking_label_used: # Put a guard around the loop body in case return, break or # exceptions might be used guard_around_body_codepoint.putln("if (%s < 2)" % Naming.parallel_why) code.end_block() # end guard around loop body code.end_block() # end for loop block if self.is_parallel: # Release the GIL and deallocate the thread state self.end_parallel_block(code) code.end_block() # pragma omp parallel end block class CnameDecoratorNode(StatNode): """ This node is for the cname decorator in CythonUtilityCode: @cname('the_cname') cdef func(...): ... In case of a cdef class the cname specifies the objstruct_cname. node the node to which the cname decorator is applied cname the cname the node should get """ child_attrs = ['node'] def analyse_declarations(self, env): self.node.analyse_declarations(env) node = self.node if isinstance(node, CompilerDirectivesNode): node = node.body.stats[0] self.is_function = isinstance(node, FuncDefNode) is_struct_or_enum = isinstance(node, (CStructOrUnionDefNode, CEnumDefNode)) e = node.entry if self.is_function: e.cname = self.cname e.func_cname = self.cname e.used = True if e.pyfunc_cname and '.' in e.pyfunc_cname: e.pyfunc_cname = self.mangle(e.pyfunc_cname) elif is_struct_or_enum: e.cname = e.type.cname = self.cname else: scope = node.scope e.cname = self.cname e.type.objstruct_cname = self.cname + '_obj' e.type.typeobj_cname = Naming.typeobj_prefix + self.cname e.type.typeptr_cname = self.cname + '_type' e.type.scope.namespace_cname = e.type.typeptr_cname e.as_variable.cname = py_object_type.cast_code(e.type.typeptr_cname) scope.scope_prefix = self.cname + "_" for name, entry in scope.entries.iteritems(): if entry.func_cname: entry.func_cname = self.mangle(entry.cname) if entry.pyfunc_cname: entry.pyfunc_cname = self.mangle(entry.pyfunc_cname) def mangle(self, cname): if '.' in cname: # remove __pyx_base from func_cname cname = cname.split('.')[-1] return '%s_%s' % (self.cname, cname) def analyse_expressions(self, env): self.node = self.node.analyse_expressions(env) return self def generate_function_definitions(self, env, code): "Ensure a prototype for every @cname method in the right place" if self.is_function and env.is_c_class_scope: # method in cdef class, generate a prototype in the header h_code = code.globalstate['utility_code_proto'] if isinstance(self.node, DefNode): self.node.generate_function_header( h_code, with_pymethdef=False, proto_only=True) else: from . import ModuleNode entry = self.node.entry cname = entry.cname entry.cname = entry.func_cname ModuleNode.generate_cfunction_declaration( entry, env.global_scope(), h_code, definition=True) entry.cname = cname self.node.generate_function_definitions(env, code) def generate_execution_code(self, code): self.node.generate_execution_code(code) #------------------------------------------------------------------------------------ # # Runtime support code # #------------------------------------------------------------------------------------ if Options.gcc_branch_hints: branch_prediction_macros = """ /* Test for GCC > 2.95 */ #if defined(__GNUC__) \ && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) #else /* !__GNUC__ or GCC < 2.95 */ #define likely(x) (x) #define unlikely(x) (x) #endif /* __GNUC__ */ """ else: branch_prediction_macros = """ #define likely(x) (x) #define unlikely(x) (x) """ #------------------------------------------------------------------------------------ printing_utility_code = UtilityCode.load_cached("Print", "Printing.c") printing_one_utility_code = UtilityCode.load_cached("PrintOne", "Printing.c") #------------------------------------------------------------------------------------ # Exception raising code # # Exceptions are raised by __Pyx_Raise() and stored as plain # type/value/tb in PyThreadState->curexc_*. When being caught by an # 'except' statement, curexc_* is moved over to exc_* by # __Pyx_GetException() restore_exception_utility_code = UtilityCode.load_cached("PyErrFetchRestore", "Exceptions.c") raise_utility_code = UtilityCode.load_cached("RaiseException", "Exceptions.c") get_exception_utility_code = UtilityCode.load_cached("GetException", "Exceptions.c") swap_exception_utility_code = UtilityCode.load_cached("SwapException", "Exceptions.c") reset_exception_utility_code = UtilityCode.load_cached("SaveResetException", "Exceptions.c") traceback_utility_code = UtilityCode.load_cached("AddTraceback", "Exceptions.c") #------------------------------------------------------------------------------------ get_exception_tuple_utility_code = UtilityCode(proto=""" static PyObject *__Pyx_GetExceptionTuple(void); /*proto*/ """, # I doubt that calling __Pyx_GetException() here is correct as it moves # the exception from tstate->curexc_* to tstate->exc_*, which prevents # exception handlers later on from receiving it. impl = """ static PyObject *__Pyx_GetExceptionTuple(void) { PyObject *type = NULL, *value = NULL, *tb = NULL; if (__Pyx_GetException(&type, &value, &tb) == 0) { PyObject* exc_info = PyTuple_New(3); if (exc_info) { Py_INCREF(type); Py_INCREF(value); Py_INCREF(tb); PyTuple_SET_ITEM(exc_info, 0, type); PyTuple_SET_ITEM(exc_info, 1, value); PyTuple_SET_ITEM(exc_info, 2, tb); return exc_info; } } return NULL; } """, requires=[get_exception_utility_code])
{ "content_hash": "8bfb82ea41a7b0d685aed1c664415f30", "timestamp": "", "source": "github", "line_count": 8851, "max_line_length": 145, "avg_line_length": 40.44616427522314, "alnum_prop": 0.5664224319741668, "repo_name": "encukou/cython", "id": "5b32a2b3d708129d2414b62d0197aa8b23ce1e4d", "size": "357989", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Cython/Compiler/Nodes.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2098" }, { "name": "C", "bytes": "445986" }, { "name": "C++", "bytes": "16585" }, { "name": "CSS", "bytes": "11567" }, { "name": "Emacs Lisp", "bytes": "11931" }, { "name": "HTML", "bytes": "112723" }, { "name": "JavaScript", "bytes": "15703" }, { "name": "Makefile", "bytes": "4740" }, { "name": "PowerShell", "bytes": "3243" }, { "name": "Python", "bytes": "5186118" }, { "name": "Smalltalk", "bytes": "618" } ], "symlink_target": "" }
import sys import yaml def main(): # various smoke tests on an installed PyYAML with extension if not getattr(yaml, '_yaml', None): raise Exception('C extension is not available at `yaml._yaml`') print('embedded libyaml version is {0}'.format(yaml._yaml.get_version_string())) for loader, dumper in [(yaml.CLoader, yaml.CDumper), (yaml.Loader, yaml.Dumper)]: testyaml = 'dude: mar' loaded = yaml.load(testyaml, Loader=loader) dumped = yaml.dump(loaded, Dumper=dumper) if testyaml != dumped.strip(): raise Exception('roundtrip failed with {0}/{1}'.format(loader, dumper)) print('smoke test passed for {0}'.format(sys.executable)) if __name__ == '__main__': main()
{ "content_hash": "1b2a905ba905279cdc5c364321729e5c", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 85, "avg_line_length": 33.81818181818182, "alnum_prop": 0.6424731182795699, "repo_name": "yaml/pyyaml", "id": "7d799cea22152bbded142b85366f717a0524f943", "size": "744", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "packaging/build/smoketest.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "311" }, { "name": "Cython", "bytes": "68663" }, { "name": "Makefile", "bytes": "989" }, { "name": "Python", "bytes": "309784" }, { "name": "Shell", "bytes": "652" } ], "symlink_target": "" }
from django.conf import settings as _settings def settings(request): values = {} for v in _settings.VIEW_SETTINGS: try: values[v] = getattr(_settings, v) except AttributeError: # no SETTINGS in Django settings pass return {'settings': values} def base_template(request): """ This context processor sets the base_template that should be used as the base template. Allows for different bases for ajax and html requests. """ base_ajax = getattr(settings, 'BASE_AJAX_TEMPLATE', 'base_ajax.html') base_html = getattr(settings, 'BASE_HTML_TEMPLATE', 'base.html') if request.is_ajax(): template = base_ajax else: template = base_html return { 'base_template' : template }
{ "content_hash": "3db9626a44580f82e7eb105bdb1bfbcc", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 73, "avg_line_length": 25.677419354838708, "alnum_prop": 0.6256281407035176, "repo_name": "ff0000/generator-red-django", "id": "e11e1f39f3b4ff11c3d4baa2c1475f3459591092", "size": "1012", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "app/templates/project/apps/ff0000/context_processors.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3780" }, { "name": "HTML", "bytes": "3480" }, { "name": "JavaScript", "bytes": "26202" }, { "name": "Python", "bytes": "19046" }, { "name": "Shell", "bytes": "1172" } ], "symlink_target": "" }
import socket from oslo_config import cfg from oslo_log import log as oslo_logging from cloudbaseinit.osutils import factory as osutils_factory from cloudbaseinit.plugins.common import base from cloudbaseinit.utils import dhcp opts = [ cfg.BoolOpt('ntp_use_dhcp_config', default=False, help='Configures NTP client time synchronization using ' 'the NTP servers provided via DHCP'), ] CONF = cfg.CONF CONF.register_opts(opts) LOG = oslo_logging.getLogger(__name__) class NTPClientPlugin(base.BasePlugin): execution_stage = base.PLUGIN_STAGE_PRE_NETWORKING def verify_time_service(self, osutils): """Verify that the time service is up. Implementing this method is optional, it is mostly used by the Windows version of this plugin. """ @staticmethod def _unpack_ntp_hosts(ntp_option_data): chunks = [ntp_option_data[index: index + 4] for index in range(0, len(ntp_option_data), 4)] return list(map(socket.inet_ntoa, chunks)) def execute(self, service, shared_data): if CONF.ntp_use_dhcp_config: osutils = osutils_factory.get_os_utils() dhcp_hosts = osutils.get_dhcp_hosts_in_use() ntp_option_data = None for (_, dhcp_host) in dhcp_hosts: options_data = dhcp.get_dhcp_options(dhcp_host, [dhcp.OPTION_NTP_SERVERS]) if options_data: ntp_option_data = options_data.get(dhcp.OPTION_NTP_SERVERS) if ntp_option_data: break if not ntp_option_data: LOG.debug("Could not obtain the NTP configuration via DHCP") return base.PLUGIN_EXECUTE_ON_NEXT_BOOT, False ntp_hosts = self._unpack_ntp_hosts(ntp_option_data) self.verify_time_service(osutils) osutils.set_ntp_client_config(ntp_hosts) LOG.info('NTP client configured. Server(s): %s' % ntp_hosts) return base.PLUGIN_EXECUTION_DONE, False
{ "content_hash": "90f379e48238c28adf49580e2b5c39b8", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 79, "avg_line_length": 33.171875, "alnum_prop": 0.6081017428167688, "repo_name": "cmin764/cloudbase-init", "id": "0a8b894dc830709cb63b81bc47ad735df87d79f6", "size": "2739", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cloudbaseinit/plugins/common/ntpclient.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "789914" } ], "symlink_target": "" }
import pytest from supriya import servertools @pytest.fixture(scope='function') def server(request): def server_teardown(): server.quit() server = servertools.Server().boot() request.addfinalizer(server_teardown) return server def test_Bus_set_01(server): control_bus = servertools.Bus.control() control_bus.allocate() result = control_bus.get() assert result == 0.0 assert control_bus.value == result control_bus.set(0.5) result = control_bus.get() assert result == 0.5 assert control_bus.value == result control_bus.set(0.25) result = control_bus.get() assert result == 0.25 assert control_bus.value == result
{ "content_hash": "89bd565c4f7579de059af8d3e5d0a9b6", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 43, "avg_line_length": 22.419354838709676, "alnum_prop": 0.6661870503597123, "repo_name": "andrewyoung1991/supriya", "id": "06d663d519a2d4199ad63c3c9cb20c0e9e422ab2", "size": "721", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "supriya/tools/servertools/test/test_Bus_set.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "6712" }, { "name": "CSS", "bytes": "446" }, { "name": "HTML", "bytes": "1083" }, { "name": "JavaScript", "bytes": "6163" }, { "name": "Makefile", "bytes": "6775" }, { "name": "Python", "bytes": "2693776" } ], "symlink_target": "" }
import grpc from google.cloud.vision_v1p1beta1.proto import image_annotator_pb2 as google_dot_cloud_dot_vision__v1p1beta1_dot_proto_dot_image__annotator__pb2 class ImageAnnotatorStub(object): """Service that performs Google Cloud Vision API detection tasks over client images, such as face, landmark, logo, label, and text detection. The ImageAnnotator service returns detected entities from the images. """ def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.BatchAnnotateImages = channel.unary_unary( '/google.cloud.vision.v1p1beta1.ImageAnnotator/BatchAnnotateImages', request_serializer=google_dot_cloud_dot_vision__v1p1beta1_dot_proto_dot_image__annotator__pb2.BatchAnnotateImagesRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_vision__v1p1beta1_dot_proto_dot_image__annotator__pb2.BatchAnnotateImagesResponse.FromString, ) class ImageAnnotatorServicer(object): """Service that performs Google Cloud Vision API detection tasks over client images, such as face, landmark, logo, label, and text detection. The ImageAnnotator service returns detected entities from the images. """ def BatchAnnotateImages(self, request, context): """Run image detection and annotation for a batch of images. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_ImageAnnotatorServicer_to_server(servicer, server): rpc_method_handlers = { 'BatchAnnotateImages': grpc.unary_unary_rpc_method_handler( servicer.BatchAnnotateImages, request_deserializer=google_dot_cloud_dot_vision__v1p1beta1_dot_proto_dot_image__annotator__pb2.BatchAnnotateImagesRequest.FromString, response_serializer=google_dot_cloud_dot_vision__v1p1beta1_dot_proto_dot_image__annotator__pb2.BatchAnnotateImagesResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'google.cloud.vision.v1p1beta1.ImageAnnotator', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,))
{ "content_hash": "3891f882833960d924a6451cc76a5ec9", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 151, "avg_line_length": 44.755102040816325, "alnum_prop": 0.7555859553123575, "repo_name": "jonparrott/google-cloud-python", "id": "277708bd6e3be22ef9dab49a5a788cc8022678fc", "size": "2263", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "vision/google/cloud/vision_v1p1beta1/proto/image_annotator_pb2_grpc.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "3366" }, { "name": "PowerShell", "bytes": "7195" }, { "name": "Protocol Buffer", "bytes": "62009" }, { "name": "Python", "bytes": "3459300" }, { "name": "Shell", "bytes": "7548" } ], "symlink_target": "" }
import numpy as np import pandas as pd import xarray as xr import datetime import os def get_mjo(months=None, years=None, reload_data=False): '''MJO index from http://www.bom.gov.au/climate/mjo/graphics/rmm.74toRealtime.txt''' module_dir, fname = os.path.split(__file__) data_file = os.path.join(module_dir, 'data', 'rmm.74toRealtime.txt') data_file_exists = os.path.exists(data_file) col_names = ['year', 'month', 'day', 'RMM1', 'RMM2', 'phase', 'amplitude', 'Missing Value'] # read data file if data_file_exists and not reload_data: # load data from local drive df = pd.read_csv(data_file, skiprows=2, names=col_names, sep=r'\s+') else: # download data from the internet df = pd.read_csv( 'http://www.bom.gov.au/climate/mjo/graphics/rmm.74toRealtime.txt', skiprows=2, names=col_names, sep=r'\s+') # time index # year0, month0, day0 = df.loc[0, ['year', 'month', 'day']] # time = pd.date_range('{}-{}-{} 12:00:00'.format(year0, month0, day0), # periods=len(df), freq='D') # df.index = time years_, months_, days_ = (df.loc[:, 'year'], df.loc[:, 'month'], df.loc[:,'day']) # time = [] # for year, month, day in zip(years, months, days): # time.append(datetime.datetime(year, month, day, 12, 0, 0)) time = [datetime.datetime(year, month, day, 12, 0, 0) for year, month, day in zip(years_, months_, days_)] df.index = time # select columns df = df.loc[:, ['RMM1', 'RMM2', 'phase', 'amplitude']] # mask invalid Value df[df>=999] = np.nan # convert to xarray Dataset with dimension name "time" ds = xr.Dataset.from_dataframe(df) ds = ds.rename({'index': 'time'}) # select months if months is not None: L = False for month in months: L = L | (ds['time.month'] == month) ds = ds.sel(time=ds['time'][L]) # select years if years is not None: L = False for year in years: L = L | (ds['time.year'] == year) ds = ds.sel(time=ds['time'][L]) return ds
{ "content_hash": "b88d93aad32466b84c02cebcfb9e83ee", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 78, "avg_line_length": 34.12698412698413, "alnum_prop": 0.5693023255813954, "repo_name": "wy2136/climate_index", "id": "de61c146a639dc2256ffbbd90f4f9d7571e7720b", "size": "2289", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mjo.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Jupyter Notebook", "bytes": "1384203" }, { "name": "Python", "bytes": "13832" } ], "symlink_target": "" }
__all__ = ("FlashPolicyProtocol", "FlashPolicyFactory",) import re from twisted.python import log from twisted.internet import reactor from twisted.application.internet import TCPServer from twisted.internet.protocol import Protocol, Factory class FlashPolicyProtocol(Protocol): """ Flash Player 9 (version 9.0.124.0 and above) implements a strict new access policy for Flash applications that make Socket or XMLSocket connections to a remote host. It now requires the presence of a socket policy file on the server. We want this to support the Flash WebSockets bridge which is needed for older browser, in particular MSIE9/8. See: * `Autobahn WebSocket fallbacks example <https://github.com/tavendo/AutobahnPython/tree/master/examples/websocket/echo_wsfallbacks>`_ * `Flash policy files background <http://www.lightsphere.com/dev/articles/flash_socket_policy.html>`_ """ REQUESTPAT = re.compile("^\s*<policy-file-request\s*/>") REQUESTMAXLEN = 200 REQUESTTIMEOUT = 5 POLICYFILE = """<?xml version="1.0"?><cross-domain-policy><allow-access-from domain="*" to-ports="%d" /></cross-domain-policy>""" def __init__(self, allowedPort): """ Ctor. :param allowedPort: The port to which Flash player should be allowed to connect. :type allowedPort: int """ self.allowedPort = allowedPort self.received = "" self.dropConnection = None def connectionMade(self): ## DoS protection ## def dropConnection(): self.transport.abortConnection() self.dropConnection = None self.dropConnection = reactor.callLater(FlashPolicyProtocol.REQUESTTIMEOUT, dropConnection) def connectionLost(self, reason): if self.dropConnection: self.dropConnection.cancel() self.dropConnection = None def dataReceived(self, data): self.received += data if FlashPolicyProtocol.REQUESTPAT.match(self.received): ## got valid request: send policy file ## self.transport.write(FlashPolicyProtocol.POLICYFILE % self.allowedPort) self.transport.loseConnection() elif len(self.received) > FlashPolicyProtocol.REQUESTMAXLEN: ## possible DoS attack ## self.transport.abortConnection() else: ## need more data ## pass class FlashPolicyFactory(Factory): def __init__(self, allowedPort): """ Ctor. :param allowedPort: The port to which Flash player should be allowed to connect. :type allowedPort: int """ self.allowedPort = allowedPort def buildProtocol(self, addr): return FlashPolicyProtocol(self.allowedPort)
{ "content_hash": "6df2725dc9c229fb84b92bb3fdfa3f72", "timestamp": "", "source": "github", "line_count": 89, "max_line_length": 139, "avg_line_length": 31.573033707865168, "alnum_prop": 0.6576512455516014, "repo_name": "normanmaurer/autobahntestsuite-maven-plugin", "id": "5f6ca5ac45195bf0f2ccd8453a8152a59371201a", "size": "3592", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/resources/autobahn/flashpolicy.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "106" }, { "name": "C", "bytes": "70690" }, { "name": "C++", "bytes": "1291" }, { "name": "CSS", "bytes": "6075" }, { "name": "GAP", "bytes": "14120" }, { "name": "Gherkin", "bytes": "2218" }, { "name": "HTML", "bytes": "56655" }, { "name": "Java", "bytes": "24931" }, { "name": "JavaScript", "bytes": "9151" }, { "name": "Python", "bytes": "13888733" }, { "name": "Shell", "bytes": "1406" }, { "name": "Smarty", "bytes": "393" } ], "symlink_target": "" }
from neutron.api.rpc.handlers import dvr_rpc from neutron.common import constants as n_const from neutron.common import utils as n_utils from neutron.openstack.common import log as logging from neutron.plugins.openvswitch.common import constants LOG = logging.getLogger(__name__) # A class to represent a DVR-hosted subnet including vif_ports resident on # that subnet class LocalDVRSubnetMapping: def __init__(self, subnet, csnat_ofport=constants.OFPORT_INVALID): # set of commpute ports on on this dvr subnet self.compute_ports = {} self.subnet = subnet self.csnat_ofport = csnat_ofport self.dvr_owned = False def __str__(self): return ("subnet = %s compute_ports = %s csnat_port = %s" " is_dvr_owned = %s" % (self.subnet, self.get_compute_ofports(), self.get_csnat_ofport(), self.is_dvr_owned())) def get_subnet_info(self): return self.subnet def set_dvr_owned(self, owned): self.dvr_owned = owned def is_dvr_owned(self): return self.dvr_owned def add_compute_ofport(self, vif_id, ofport): self.compute_ports[vif_id] = ofport def remove_compute_ofport(self, vif_id): self.compute_ports.pop(vif_id, 0) def remove_all_compute_ofports(self): self.compute_ports.clear() def get_compute_ofports(self): return self.compute_ports def set_csnat_ofport(self, ofport): self.csnat_ofport = ofport def get_csnat_ofport(self): return self.csnat_ofport class OVSPort: def __init__(self, id, ofport, mac, device_owner): self.id = id self.mac = mac self.ofport = ofport self.subnets = set() self.device_owner = device_owner def __str__(self): return ("OVSPort: id = %s, ofport = %s, mac = %s," "device_owner = %s, subnets = %s" % (self.id, self.ofport, self.mac, self.device_owner, self.subnets)) def add_subnet(self, subnet_id): self.subnets.add(subnet_id) def remove_subnet(self, subnet_id): self.subnets.remove(subnet_id) def remove_all_subnets(self): self.subnets.clear() def get_subnets(self): return self.subnets def get_device_owner(self): return self.device_owner def get_mac(self): return self.mac def get_ofport(self): return self.ofport class OVSDVRNeutronAgent(dvr_rpc.DVRAgentRpcApiMixin): ''' Implements OVS-based DVR(Distributed Virtual Router), for overlay networks. ''' # history # 1.0 Initial version def __init__(self, context, plugin_rpc, integ_br, tun_br, patch_int_ofport=constants.OFPORT_INVALID, patch_tun_ofport=constants.OFPORT_INVALID, host=None, enable_tunneling=False, enable_distributed_routing=False): self.context = context self.plugin_rpc = plugin_rpc self.int_br = integ_br self.tun_br = tun_br self.patch_int_ofport = patch_int_ofport self.patch_tun_ofport = patch_tun_ofport self.host = host self.enable_tunneling = enable_tunneling self.enable_distributed_routing = enable_distributed_routing def reset_ovs_parameters(self, integ_br, tun_br, patch_int_ofport, patch_tun_ofport): '''Reset the openvswitch parameters''' if not (self.enable_tunneling and self.enable_distributed_routing): return self.int_br = integ_br self.tun_br = tun_br self.patch_int_ofport = patch_int_ofport self.patch_tun_ofport = patch_tun_ofport def setup_dvr_flows_on_integ_tun_br(self): '''Setup up initial dvr flows into br-int and br-tun''' if not (self.enable_tunneling and self.enable_distributed_routing): return LOG.debug("L2 Agent operating in DVR Mode") self.dvr_mac_address = None self.local_dvr_map = {} self.local_csnat_map = {} self.local_ports = {} self.registered_dvr_macs = set() # get the local DVR MAC Address try: details = self.plugin_rpc.get_dvr_mac_address_by_host( self.context, self.host) LOG.debug("L2 Agent DVR: Received response for " "get_dvr_mac_address_by_host() from " "plugin: %r", details) self.dvr_mac_address = details['mac_address'] except Exception: LOG.error(_("DVR: Failed to obtain local DVR Mac address")) self.enable_distributed_routing = False # switch all traffic using L2 learning self.int_br.add_flow(table=constants.LOCAL_SWITCHING, priority=1, actions="normal") return # Remove existing flows in integration bridge self.int_br.remove_all_flows() # Add a canary flow to int_br to track OVS restarts self.int_br.add_flow(table=constants.CANARY_TABLE, priority=0, actions="drop") # Insert 'drop' action as the default for Table DVR_TO_SRC_MAC self.int_br.add_flow(table=constants.DVR_TO_SRC_MAC, priority=1, actions="drop") # Insert 'normal' action as the default for Table LOCAL_SWITCHING self.int_br.add_flow(table=constants.LOCAL_SWITCHING, priority=1, actions="normal") dvr_macs = self.plugin_rpc.get_dvr_mac_address_list(self.context) LOG.debug("L2 Agent DVR: Received these MACs: %r", dvr_macs) for mac in dvr_macs: if mac['mac_address'] == self.dvr_mac_address: continue # Table 0 (default) will now sort DVR traffic from other # traffic depending on in_port self.int_br.add_flow(table=constants.LOCAL_SWITCHING, priority=2, in_port=self.patch_tun_ofport, dl_src=mac['mac_address'], actions="resubmit(,%s)" % constants.DVR_TO_SRC_MAC) # Table DVR_NOT_LEARN ensures unique dvr macs in the cloud # are not learnt, as they may # result in flow explosions self.tun_br.add_flow(table=constants.DVR_NOT_LEARN, priority=1, dl_src=mac['mac_address'], actions="output:%s" % self.patch_int_ofport) self.registered_dvr_macs.add(mac['mac_address']) self.tun_br.add_flow(priority=1, in_port=self.patch_int_ofport, actions="resubmit(,%s)" % constants.DVR_PROCESS) # table-miss should be sent to learning table self.tun_br.add_flow(table=constants.DVR_NOT_LEARN, priority=0, actions="resubmit(,%s)" % constants.LEARN_FROM_TUN) self.tun_br.add_flow(table=constants.DVR_PROCESS, priority=0, actions="resubmit(,%s)" % constants.PATCH_LV_TO_TUN) def dvr_mac_address_update(self, dvr_macs): if not (self.enable_tunneling and self.enable_distributed_routing): return LOG.debug("DVR Mac address update with host-mac: %s", dvr_macs) if not self.dvr_mac_address: LOG.debug("Self mac unknown, ignoring this " "dvr_mac_address_update() ") return dvr_host_macs = set() for entry in dvr_macs: if entry['mac_address'] == self.dvr_mac_address: continue dvr_host_macs.add(entry['mac_address']) if dvr_host_macs == self.registered_dvr_macs: LOG.debug("DVR Mac address already up to date") return dvr_macs_added = dvr_host_macs - self.registered_dvr_macs dvr_macs_removed = self.registered_dvr_macs - dvr_host_macs for oldmac in dvr_macs_removed: self.int_br.delete_flows(table=constants.LOCAL_SWITCHING, in_port=self.patch_tun_ofport, dl_src=oldmac) self.tun_br.delete_flows(table=constants.DVR_NOT_LEARN, dl_src=oldmac) LOG.debug("Removed DVR MAC flow for %s", oldmac) self.registered_dvr_macs.remove(oldmac) for newmac in dvr_macs_added: self.int_br.add_flow(table=constants.LOCAL_SWITCHING, priority=2, in_port=self.patch_tun_ofport, dl_src=newmac, actions="resubmit(,%s)" % constants.DVR_TO_SRC_MAC) self.tun_br.add_flow(table=constants.DVR_NOT_LEARN, priority=1, dl_src=newmac, actions="output:%s" % self.patch_int_ofport) LOG.debug("Added DVR MAC flow for %s", newmac) self.registered_dvr_macs.add(newmac) def is_dvr_router_interface(self, device_owner): return device_owner == n_const.DEVICE_OWNER_DVR_INTERFACE def process_tunneled_network(self, network_type, lvid, segmentation_id): if not (self.enable_tunneling and self.enable_distributed_routing): return self.tun_br.add_flow(table=constants.TUN_TABLE[network_type], priority=1, tun_id=segmentation_id, actions="mod_vlan_vid:%s," "resubmit(,%s)" % (lvid, constants.DVR_NOT_LEARN)) def _bind_distributed_router_interface_port(self, port, fixed_ips, device_owner, local_vlan): # since router port must have only one fixed IP, directly # use fixed_ips[0] subnet_uuid = fixed_ips[0]['subnet_id'] csnat_ofport = constants.OFPORT_INVALID ldm = None if subnet_uuid in self.local_dvr_map: ldm = self.local_dvr_map[subnet_uuid] csnat_ofport = ldm.get_csnat_ofport() if csnat_ofport == constants.OFPORT_INVALID: LOG.error(_("DVR: Duplicate DVR router interface detected " "for subnet %s"), subnet_uuid) return else: # set up LocalDVRSubnetMapping available for this subnet subnet_info = self.plugin_rpc.get_subnet_for_dvr(self.context, subnet_uuid) if not subnet_info: LOG.error(_("DVR: Unable to retrieve subnet information" " for subnet_id %s"), subnet_uuid) return LOG.debug("get_subnet_for_dvr for subnet %s returned with %s" % (subnet_uuid, subnet_info)) ldm = LocalDVRSubnetMapping(subnet_info) self.local_dvr_map[subnet_uuid] = ldm # DVR takes over ldm.set_dvr_owned(True) subnet_info = ldm.get_subnet_info() ip_subnet = subnet_info['cidr'] local_compute_ports = ( self.plugin_rpc.get_ports_on_host_by_subnet( self.context, self.host, subnet_uuid)) LOG.debug("DVR: List of ports received from " "get_ports_on_host_by_subnet %s", local_compute_ports) for prt in local_compute_ports: vif = self.int_br.get_vif_port_by_id(prt['id']) if not vif: continue ldm.add_compute_ofport(vif.vif_id, vif.ofport) if vif.vif_id in self.local_ports: # ensure if a compute port is already on # a different dvr routed subnet # if yes, queue this subnet to that port ovsport = self.local_ports[vif.vif_id] ovsport.add_subnet(subnet_uuid) else: # the compute port is discovered first here that its on # a dvr routed subnet queue this subnet to that port ovsport = OVSPort(vif.vif_id, vif.ofport, vif.vif_mac, prt['device_owner']) ovsport.add_subnet(subnet_uuid) self.local_ports[vif.vif_id] = ovsport # create rule for just this vm port self.int_br.add_flow(table=constants.DVR_TO_SRC_MAC, priority=4, dl_vlan=local_vlan, dl_dst=ovsport.get_mac(), actions="strip_vlan,mod_dl_src:%s," "output:%s" % (subnet_info['gateway_mac'], ovsport.get_ofport())) # create rule to forward broadcast/multicast frames from dvr # router interface to appropriate local tenant ports ofports = ','.join(map(str, ldm.get_compute_ofports().values())) if csnat_ofport != constants.OFPORT_INVALID: ofports = str(csnat_ofport) + ',' + ofports if ofports: self.int_br.add_flow(table=constants.DVR_TO_SRC_MAC, priority=2, proto='ip', dl_vlan=local_vlan, nw_dst=ip_subnet, actions="strip_vlan,mod_dl_src:%s," "output:%s" % (subnet_info['gateway_mac'], ofports)) self.tun_br.add_flow(table=constants.DVR_PROCESS, priority=3, dl_vlan=local_vlan, proto='arp', nw_dst=subnet_info['gateway_ip'], actions="drop") self.tun_br.add_flow(table=constants.DVR_PROCESS, priority=2, dl_vlan=local_vlan, dl_dst=port.vif_mac, actions="drop") self.tun_br.add_flow(table=constants.DVR_PROCESS, priority=1, dl_vlan=local_vlan, dl_src=port.vif_mac, actions="mod_dl_src:%s,resubmit(,%s)" % (self.dvr_mac_address, constants.PATCH_LV_TO_TUN)) # the dvr router interface is itself a port, so capture it # queue this subnet to that port. A subnet appears only once as # a router interface on any given router ovsport = OVSPort(port.vif_id, port.ofport, port.vif_mac, device_owner) ovsport.add_subnet(subnet_uuid) self.local_ports[port.vif_id] = ovsport def _bind_port_on_dvr_subnet(self, port, fixed_ips, device_owner, local_vlan): # Handle new compute port added use-case subnet_uuid = None for ips in fixed_ips: if ips['subnet_id'] not in self.local_dvr_map: continue subnet_uuid = ips['subnet_id'] ldm = self.local_dvr_map[subnet_uuid] if not ldm.is_dvr_owned(): # well this is CSNAT stuff, let dvr come in # and do plumbing for this vm later continue # This confirms that this compute port belongs # to a dvr hosted subnet. # Accommodate this VM Port into the existing rule in # the integration bridge LOG.debug("DVR: Plumbing compute port %s", port.vif_id) subnet_info = ldm.get_subnet_info() ip_subnet = subnet_info['cidr'] csnat_ofport = ldm.get_csnat_ofport() ldm.add_compute_ofport(port.vif_id, port.ofport) if port.vif_id in self.local_ports: # ensure if a compute port is already on a different # dvr routed subnet # if yes, queue this subnet to that port ovsport = self.local_ports[port.vif_id] ovsport.add_subnet(subnet_uuid) else: # the compute port is discovered first here that its # on a dvr routed subnet, queue this subnet to that port ovsport = OVSPort(port.vif_id, port.ofport, port.vif_mac, device_owner) ovsport.add_subnet(subnet_uuid) self.local_ports[port.vif_id] = ovsport # create a rule for this vm port self.int_br.add_flow(table=constants.DVR_TO_SRC_MAC, priority=4, dl_vlan=local_vlan, dl_dst=ovsport.get_mac(), actions="strip_vlan,mod_dl_src:%s," "output:%s" % (subnet_info['gateway_mac'], ovsport.get_ofport())) ofports = ','.join(map(str, ldm.get_compute_ofports().values())) if csnat_ofport != constants.OFPORT_INVALID: ofports = str(csnat_ofport) + ',' + ofports self.int_br.add_flow(table=constants.DVR_TO_SRC_MAC, priority=2, proto='ip', dl_vlan=local_vlan, nw_dst=ip_subnet, actions="strip_vlan,mod_dl_src:%s," " output:%s" % (subnet_info['gateway_mac'], ofports)) def _bind_centralized_snat_port_on_dvr_subnet(self, port, fixed_ips, device_owner, local_vlan): if port.vif_id in self.local_ports: # throw an error if CSNAT port is already on a different # dvr routed subnet ovsport = self.local_ports[port.vif_id] subs = list(ovsport.get_subnets()) LOG.error(_("Centralized-SNAT port %s already seen on "), port.vif_id) LOG.error(_("a different subnet %s"), subs[0]) return # since centralized-SNAT (CSNAT) port must have only one fixed # IP, directly use fixed_ips[0] subnet_uuid = fixed_ips[0]['subnet_id'] ldm = None subnet_info = None if subnet_uuid not in self.local_dvr_map: # no csnat ports seen on this subnet - create csnat state # for this subnet subnet_info = self.plugin_rpc.get_subnet_for_dvr(self.context, subnet_uuid) ldm = LocalDVRSubnetMapping(subnet_info, port.ofport) self.local_dvr_map[subnet_uuid] = ldm else: ldm = self.local_dvr_map[subnet_uuid] subnet_info = ldm.get_subnet_info() # Store csnat OF Port in the existing DVRSubnetMap ldm.set_csnat_ofport(port.ofport) # create ovsPort footprint for csnat port ovsport = OVSPort(port.vif_id, port.ofport, port.vif_mac, device_owner) ovsport.add_subnet(subnet_uuid) self.local_ports[port.vif_id] = ovsport self.int_br.add_flow(table=constants.DVR_TO_SRC_MAC, priority=4, dl_vlan=local_vlan, dl_dst=ovsport.get_mac(), actions="strip_vlan,mod_dl_src:%s," " output:%s" % (subnet_info['gateway_mac'], ovsport.get_ofport())) ofports = ','.join(map(str, ldm.get_compute_ofports().values())) ofports = str(ldm.get_csnat_ofport()) + ',' + ofports ip_subnet = subnet_info['cidr'] self.int_br.add_flow(table=constants.DVR_TO_SRC_MAC, priority=2, proto='ip', dl_vlan=local_vlan, nw_dst=ip_subnet, actions="strip_vlan,mod_dl_src:%s," " output:%s" % (subnet_info['gateway_mac'], ofports)) def bind_port_to_dvr(self, port, network_type, fixed_ips, device_owner, local_vlan_id): # a port coming up as distributed router interface if not (self.enable_tunneling and self.enable_distributed_routing): return if network_type not in constants.TUNNEL_NETWORK_TYPES: return if device_owner == n_const.DEVICE_OWNER_DVR_INTERFACE: self._bind_distributed_router_interface_port(port, fixed_ips, device_owner, local_vlan_id) if device_owner and n_utils.is_dvr_serviced(device_owner): self._bind_port_on_dvr_subnet(port, fixed_ips, device_owner, local_vlan_id) if device_owner == n_const.DEVICE_OWNER_ROUTER_SNAT: self._bind_centralized_snat_port_on_dvr_subnet(port, fixed_ips, device_owner, local_vlan_id) def _unbind_distributed_router_interface_port(self, port, local_vlan): ovsport = self.local_ports[port.vif_id] # removal of distributed router interface subnet_ids = ovsport.get_subnets() subnet_set = set(subnet_ids) # ensure we process for all the subnets laid on this removed port for sub_uuid in subnet_set: if sub_uuid not in self.local_dvr_map: continue ldm = self.local_dvr_map[sub_uuid] subnet_info = ldm.get_subnet_info() ip_subnet = subnet_info['cidr'] # DVR is no more owner ldm.set_dvr_owned(False) # remove all vm rules for this dvr subnet # clear of compute_ports altogether compute_ports = ldm.get_compute_ofports() for vif_id in compute_ports: ovsport = self.local_ports[vif_id] self.int_br.delete_flows(table=constants.DVR_TO_SRC_MAC, dl_vlan=local_vlan, dl_dst=ovsport.get_mac()) ldm.remove_all_compute_ofports() if ldm.get_csnat_ofport() != -1: # If there is a csnat port on this agent, preserve # the local_dvr_map state ofports = str(ldm.get_csnat_ofport()) self.int_br.add_flow(table=constants.DVR_TO_SRC_MAC, priority=2, proto='ip', dl_vlan=local_vlan, nw_dst=ip_subnet, actions="strip_vlan,mod_dl_src:%s," " output:%s" % (subnet_info['gateway_mac'], ofports)) else: # removed port is a distributed router interface self.int_br.delete_flows(table=constants.DVR_TO_SRC_MAC, proto='ip', dl_vlan=local_vlan, nw_dst=ip_subnet) # remove subnet from local_dvr_map as no dvr (or) csnat # ports available on this agent anymore self.local_dvr_map.pop(sub_uuid, None) self.tun_br.delete_flows(table=constants.DVR_PROCESS, dl_vlan=local_vlan, proto='arp', nw_dst=subnet_info['gateway_ip']) ovsport.remove_subnet(sub_uuid) self.tun_br.delete_flows(table=constants.DVR_PROCESS, dl_vlan=local_vlan, dl_dst=port.vif_mac) self.tun_br.delete_flows(table=constants.DVR_PROCESS, dl_vlan=local_vlan, dl_src=port.vif_mac) # release port state self.local_ports.pop(port.vif_id, None) def _unbind_port_on_dvr_subnet(self, port, local_vlan): ovsport = self.local_ports[port.vif_id] # This confirms that this compute port being removed belonged # to a dvr hosted subnet. # Accommodate this VM Port into the existing rule in # the integration bridge LOG.debug("DVR: Removing plumbing for compute port %s", port) subnet_ids = ovsport.get_subnets() # ensure we process for all the subnets laid on this port for sub_uuid in subnet_ids: if sub_uuid not in self.local_dvr_map: continue ldm = self.local_dvr_map[sub_uuid] subnet_info = ldm.get_subnet_info() ldm.remove_compute_ofport(port.vif_id) ofports = ','.join(map(str, ldm.get_compute_ofports().values())) ip_subnet = subnet_info['cidr'] # first remove this vm port rule self.int_br.delete_flows(table=constants.DVR_TO_SRC_MAC, dl_vlan=local_vlan, dl_dst=ovsport.get_mac()) if ldm.get_csnat_ofport() != -1: # If there is a csnat port on this agent, preserve # the local_dvr_map state ofports = str(ldm.get_csnat_ofport()) + ',' + ofports self.int_br.add_flow(table=constants.DVR_TO_SRC_MAC, priority=2, proto='ip', dl_vlan=local_vlan, nw_dst=ip_subnet, actions="strip_vlan,mod_dl_src:%s," " output:%s" % (subnet_info['gateway_mac'], ofports)) else: if ofports: self.int_br.add_flow(table=constants.DVR_TO_SRC_MAC, priority=2, proto='ip', dl_vlan=local_vlan, nw_dst=ip_subnet, actions="strip_vlan,mod_dl_src:%s," " output:%s" % (subnet_info['gateway_mac'], ofports)) else: # remove the flow altogether, as no ports (both csnat/ # compute) are available on this subnet in this # agent self.int_br.delete_flows(table=constants.DVR_TO_SRC_MAC, proto='ip', dl_vlan=local_vlan, nw_dst=ip_subnet) # release port state self.local_ports.pop(port.vif_id, None) def _unbind_centralized_snat_port_on_dvr_subnet(self, port, local_vlan): ovsport = self.local_ports[port.vif_id] # This confirms that this compute port being removed belonged # to a dvr hosted subnet. # Accommodate this VM Port into the existing rule in # the integration bridge LOG.debug("DVR: Removing plumbing for csnat port %s", port) sub_uuid = list(ovsport.get_subnets())[0] # ensure we process for all the subnets laid on this port if sub_uuid not in self.local_dvr_map: return ldm = self.local_dvr_map[sub_uuid] subnet_info = ldm.get_subnet_info() ip_subnet = subnet_info['cidr'] ldm.set_csnat_ofport(constants.OFPORT_INVALID) # then remove csnat port rule self.int_br.delete_flows(table=constants.DVR_TO_SRC_MAC, dl_vlan=local_vlan, dl_dst=ovsport.get_mac()) ofports = ','.join(map(str, ldm.get_compute_ofports().values())) if ofports: self.int_br.add_flow(table=constants.DVR_TO_SRC_MAC, priority=2, proto='ip', dl_vlan=local_vlan, nw_dst=ip_subnet, actions="strip_vlan,mod_dl_src:%s," " output:%s" % (subnet_info['gateway_mac'], ofports)) else: self.int_br.delete_flows(table=constants.DVR_TO_SRC_MAC, proto='ip', dl_vlan=local_vlan, nw_dst=ip_subnet) if not ldm.is_dvr_owned(): # if not owned by DVR (only used for csnat), remove this # subnet state altogether self.local_dvr_map.pop(sub_uuid, None) # release port state self.local_ports.pop(port.vif_id, None) def unbind_port_from_dvr(self, vif_port, local_vlan_id): if not (self.enable_tunneling and self.enable_distributed_routing): return # Handle port removed use-case if vif_port and vif_port.vif_id not in self.local_ports: LOG.debug("DVR: Non distributed port, ignoring %s", vif_port) return ovsport = self.local_ports[vif_port.vif_id] device_owner = ovsport.get_device_owner() if device_owner == n_const.DEVICE_OWNER_DVR_INTERFACE: self._unbind_distributed_router_interface_port(vif_port, local_vlan_id) if device_owner and n_utils.is_dvr_serviced(device_owner): self._unbind_port_on_dvr_subnet(vif_port, local_vlan_id) if device_owner == n_const.DEVICE_OWNER_ROUTER_SNAT: self._unbind_centralized_snat_port_on_dvr_subnet(vif_port, local_vlan_id)
{ "content_hash": "b790ea4622ba3c709cb097e2b10a8175", "timestamp": "", "source": "github", "line_count": 702, "max_line_length": 79, "avg_line_length": 44.396011396011396, "alnum_prop": 0.49903741256497464, "repo_name": "cboling/SDNdbg", "id": "3d89fe0b199f3631550c6adb34a6bf805c3373cf", "size": "31825", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "docs/old-stuff/pydzcvr/doc/neutron/plugins/openvswitch/agent/ovs_dvr_neutron_agent.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "6099" }, { "name": "HTML", "bytes": "567814" }, { "name": "JavaScript", "bytes": "545293" }, { "name": "Makefile", "bytes": "11370" }, { "name": "PHP", "bytes": "328" }, { "name": "Python", "bytes": "295132" }, { "name": "Shell", "bytes": "10978" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('web', '0003_remove_question_order'), ] operations = [ migrations.AlterOrderWithRespectTo( name='question', order_with_respect_to='questionnaire', ), migrations.AlterOrderWithRespectTo( name='selectiveanswer', order_with_respect_to='question', ), ]
{ "content_hash": "07aae07586973294d29bfa042c0b1fd5", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 50, "avg_line_length": 23.61904761904762, "alnum_prop": 0.6048387096774194, "repo_name": "otoyo/satisfactory-mobile", "id": "165435effce7f5db21c9edbb3518a5e7a3550b69", "size": "520", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "web/migrations/0004_auto_20150305_2117.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "948" }, { "name": "HTML", "bytes": "62525" }, { "name": "JavaScript", "bytes": "4438" }, { "name": "Python", "bytes": "70916" } ], "symlink_target": "" }
"""A helper request handler for request handlers that receive data.""" import logging from dashboard import request_handler from dashboard import utils class PostDataHandler(request_handler.RequestHandler): """Helper class to handle common functionality for dealing with slaves.""" def post(self): """Checks the IP of the request against the white list. Real sub-class handlers should override this and use _CheckIpAgainstWhitelist; this is provided here for convenience in tests. """ self._CheckIpAgainstWhitelist() def _CheckIpAgainstWhitelist(self): """Checks the remote address of the request against the IP whitelist. Returns: True if whitelisted, False otherwise. """ whitelist = utils.GetIpWhitelist() if not whitelist or self.request.remote_addr in whitelist: return True # Try to log some info about the post data that is not whitelisted. # This could be totally bogus data, so ignore huge postdata and swallow # exceptions. try: data_param = self.request.get('data') if data_param and len(data_param) < 10000: # Log the start of the data; it may give clues about who is sending # the data and who to contact. logging.warn('Received data: %s...', data_param[:200]) except Exception: # pylint: disable=broad-except pass self.ReportError( 'IP address %s not in IP whitelist!' % self.request.remote_addr, 403) return False
{ "content_hash": "8da5adcb1edbb16ca441d4a84ec29096", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 77, "avg_line_length": 35.095238095238095, "alnum_prop": 0.7008141112618724, "repo_name": "scottmcmaster/catapult", "id": "486045a59d95996469f51c84486f426531593696", "size": "1637", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "dashboard/dashboard/post_data_handler.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "18609" }, { "name": "HTML", "bytes": "23194439" }, { "name": "JavaScript", "bytes": "35437" }, { "name": "Python", "bytes": "1291102" } ], "symlink_target": "" }
import telnetlib import time import socket import sys TELNET_PORT = 23 TELNET_TIMEOUT = 6 def send_command(remote_conn, cmd): cmd = cmd.rstrip() remote_conn.write(cmd + '\n') time.sleep(1) return remote_conn.read_very_eager() def login(remote_conn, username, password): output = remote_conn.read_until("sername:", TELNET_TIMEOUT) remote_conn.write(username + '\n') output += remote_conn.read_until("ssword:", TELNET_TIMEOUT) remote_conn.write(password + '\n') return output def telnet_connect(ip_addr): try: return telnetlib.Telnet(ip_addr, TELNET_PORT, TELNET_TIMEOUT) except socket.timeout: sys.exit("Connection timed-out") def main(): ip_addr = '50.76.53.27' username = 'pyclass' password = '88newclass' remote_conn = telnet_connect(ip_addr) login(remote_conn, username, password) time.sleep(1) output = remote_conn.read_very_eager() output = send_command(remote_conn, 'term length 0') output = send_command(remote_conn, 'show version') print output remote_conn.close() if __name__ == "__main__": main()
{ "content_hash": "cc72a716fd2d86b0ca814686a79cd1b2", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 69, "avg_line_length": 23.5, "alnum_prop": 0.6560283687943262, "repo_name": "cdieken/python_class", "id": "9d68ca956452e0e61831deeb2467f0bd9c7226a4", "size": "1171", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "class2/telnetTest.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Groff", "bytes": "199" }, { "name": "Python", "bytes": "368123" } ], "symlink_target": "" }
import datetime import json import re import yaml from django import template from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.template.defaultfilters import date from django.utils.html import escape, strip_tags from django.utils.safestring import mark_safe from markdown import markdown as md register = template.Library() @register.filter() def boolean_as_icon(value): html = '<i class="fas fa-check text-success"></i>' if not value: html = '<i class="fas fa-times text-danger"></i>' return mark_safe(html) @register.simple_tag def get_status(text): text = text.lower() if text in ("delete", "deleted", "remove", "removed"): return "danger" if text in ("change", "changed", "update", "updated"): return "warning" if text in ("add", "added", "create", "created"): return "success" return "info" @register.filter() def as_link(value): if not hasattr(value, "get_absolute_url"): return value return mark_safe(f'<a href="{value.get_absolute_url()}">{value}</a>') @register.filter() def render_bandwidth_speed(speed): """ Renders speeds given in Mbps. """ if not speed: return "" if speed >= 1000000 and speed % 1000000 == 0: return f"{int(speed / 1000000)} Tbps" elif speed >= 1000 and speed % 1000 == 0: return f"{int(speed / 1000)} Gbps" elif speed >= 1000: return f"{float(speed) / 1000} Gbps" else: return f"{speed} Mbps" @register.filter() def render_none(value): if value is None or value == "": return mark_safe('<span class="text-muted">&mdash;</span>') return as_link(value) @register.filter() def contains(value, arg): """ Test whether a value contains any of a given set of strings. `arg` should be a comma-separated list of strings. """ return any(s in value for s in arg.split(",")) @register.filter() def notcontains(value, arg): """ Test whether a value does not contain any of a given set of strings. `arg` should be a comma-separated list of strings. """ for s in arg.split(","): if s in value: return False return True @register.filter(is_safe=True) def markdown(value, escape_html=False): """ Render text as Markdown. """ # Strip HTML tags and render Markdown html = md(strip_tags(value), extensions=["fenced_code", "tables"]) if escape_html: html = escape(html) return mark_safe(html) @register.simple_tag() def querystring(request, **kwargs): """ Append or update the page number in a querystring. """ querydict = request.GET.copy() for k, v in kwargs.items(): if v is not None: querydict[k] = v elif k in querydict: querydict.pop(k) querystring = querydict.urlencode(safe="/") return "?" + querystring if querystring else "" @register.filter() def render_json(value): """ Render a dictionary as formatted JSON. """ return json.dumps(value, indent=4, sort_keys=True) @register.filter() def render_yaml(value): """ Render a dictionary as formatted YAML. """ return yaml.dump( json.loads(json.dumps(value)), explicit_start=True, explicit_end=False, indent=2 ) @register.filter() def title_with_uppers(value): """ Render a title without touching to letter already being uppercased. """ if not isinstance(value, str): value = str(value) return " ".join([word[0].upper() + word[1:] for word in value.split()]) @register.filter() def meta(instance, attr): """ Returns the specified `Meta` attribute of a model (Django does not allow that by default). """ return getattr(instance._meta, attr, "") @register.filter() def content_type(instance): """ Returns the `ContentType` for the given object. """ return ContentType.objects.get_for_model(instance) @register.filter() def content_type_id(instance): """ Return the `ContentType` ID for the given object. """ content_type = ContentType.objects.get_for_model(instance) if content_type: return content_type.pk else: return None @register.inclusion_tag("utils/templatetags/tag.html") def tag(tag, url_name=None): """ Render a tag and a URL to filter by it if the base URL is provided. """ return {"tag": tag, "url_name": url_name} @register.filter() def foreground_color(value): """ Return black (#000000) or white (#ffffff) given a background color in RRGGBB format. """ value = value.lower().strip("#") if not re.match("^[0-9a-f]{6}$", value): return "" r, g, b = [int(value[c : c + 2], 16) for c in (0, 2, 4)] if r * 0.299 + g * 0.587 + b * 0.114 > 186: return "#000000" else: return "#ffffff" @register.filter() def get_docs(model): """ Render and return documentation for the given model. """ path = f"{settings.DOCS_DIR}/models/{model._meta.app_label}/{model._meta.model_name}.md" try: with open(path, encoding="utf-8") as docfile: content = docfile.read() except FileNotFoundError: return f"Unable to load documentation, file not found: {path}" except IOError: return f"Unable to load documentation, error reading file: {path}" return mark_safe(markdown(content)) @register.filter(expects_localtime=True) def date_span(date_value): """ Returns the date in a HTML span formatted as short date with a long date format as the title. """ if not date_value: return "" if type(date_value) is str: date_value = datetime.datetime.strptime(date_value, "%Y-%m-%d").date() if type(date_value) is datetime.date: long = date(date_value, settings.DATE_FORMAT) short = date(date_value, settings.SHORT_DATE_FORMAT) else: long = date(date_value, settings.DATETIME_FORMAT) short = date(date_value, settings.SHORT_DATETIME_FORMAT) return mark_safe(f'<span title="{long}">{short}</span>') @register.simple_tag(takes_context=True) def missing_sessions(context, autonomous_system): if "context_as" not in context: return False ix = autonomous_system.get_shared_internet_exchange_points(context["context_as"]) for i in ix: if autonomous_system.get_missing_peering_sessions(context["context_as"], i): return True return False @register.filter def doc_version(version): if "-dev" in version: return "latest" else: return version
{ "content_hash": "036bcc59d0b9ea590331f2e6b631469b", "timestamp": "", "source": "github", "line_count": 259, "max_line_length": 92, "avg_line_length": 25.714285714285715, "alnum_prop": 0.633033033033033, "repo_name": "respawner/peering-manager", "id": "c3278a9114f79779debd9e216e655a2f5e47c723", "size": "6660", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "utils/templatetags/helpers.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "147540" }, { "name": "HTML", "bytes": "199981" }, { "name": "JavaScript", "bytes": "691695" }, { "name": "Python", "bytes": "1075126" }, { "name": "Shell", "bytes": "3446" } ], "symlink_target": "" }
from unittest import TestCase import numpy as np import pytest from diffprivlib.validation import clip_to_norm class TestClipToNorm(TestCase): def test_incorrect_parameterisation(self): with self.assertRaises(TypeError): clip_to_norm([1, 2, 3], 1) with self.assertRaises(ValueError): clip_to_norm(np.array([1, 2, 3]), 1) with self.assertRaises(TypeError): clip_to_norm(np.ones((5, 1)), complex(1, 2)) with self.assertRaises(TypeError): clip_to_norm(np.ones((5, 1)), "1") with self.assertRaises(ValueError): clip_to_norm(np.ones((5, 1)), 0) with self.assertRaises(ValueError): clip_to_norm(np.ones((5, 1)), -1) def test_simple(self): X = np.ones(shape=(5, 1)) X2 = clip_to_norm(X, 0.5) self.assertTrue(np.all(X2 == X/2)) X3 = clip_to_norm(X, 2) self.assertTrue(np.all(X3 == X)) X4 = X.copy() X4[0, 0] = 2 X5 = clip_to_norm(X4, 1) self.assertTrue(np.all(X5 == X)) @pytest.mark.filterwarnings('ignore: numpy.ufunc size changed') def test_iris(self): from sklearn import datasets dataset = datasets.load_iris() X_train, y_train = dataset.data, dataset.target norms = np.linalg.norm(X_train, axis=1) clip = (norms[0] + norms[1]) / 2 X_clipped = clip_to_norm(X_train, clip) clipped_norms = np.linalg.norm(X_clipped, axis=1) self.assertLessEqual(clipped_norms[0], norms[0]) self.assertLessEqual(clipped_norms[1], norms[1]) self.assertTrue(np.isclose(clipped_norms[0], clip) or np.isclose(clipped_norms[1], clip))
{ "content_hash": "1b3a98ea23555d061b8f5d67607fd9ed", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 97, "avg_line_length": 31.072727272727274, "alnum_prop": 0.5915740198946753, "repo_name": "IBM/differential-privacy-library", "id": "c309df0e1e09960c40977511ea69342406b9c93d", "size": "1709", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "tests/test_clip_to_norm.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "580374" } ], "symlink_target": "" }
def parse_environment(raw_environment): environment = {} for line in raw_environment.split('\n'): line = line.strip() if not line: continue if line.startswith('#'): continue try: key, value = line.split(':', 1) except ValueError: raise ValueError('Environment must be in key: value format') environment[key] = value.strip() return environment
{ "content_hash": "c172983425454ba918a1f4460fd33cf8", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 72, "avg_line_length": 26.705882352941178, "alnum_prop": 0.5572687224669604, "repo_name": "mhahn/stacker", "id": "bce73e9ec0d06c8b95b4477eba117ab6e049d3b7", "size": "456", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "stacker/environment.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Makefile", "bytes": "187" }, { "name": "Python", "bytes": "250329" }, { "name": "Shell", "bytes": "216" } ], "symlink_target": "" }
__author__ = 'Daniel Dittenhafer' __version__ = '0.1' __date__ = '2015-03-23' import pandas as pd import gviz_data_table as gv import numpy import os from datetime import datetime def ToGvizDataTable(dataframe): table = gv.Table() dt = dataframe.dtypes # Loop to add the columns from the dataframe to the gviz datatable for col in dataframe.columns.values: gvdt = dt[col] if dt[col] == object: # Skip gvdt = str elif dt[col] == "float64": gvdt = float elif dt[col] == "datetime64[ns]": gvdt = datetime elif dt[col] == "int64": gvdt = int # If a datatype was specified, then add the column if gvdt != None: table.add_column(col, gvdt) for row in dataframe.iterrows(): vals = row[1].values newVals = [] for v in vals: nv = v if (type(v) is float) and numpy.isnan(v): nv = None elif type(v) is long: nv = int(v) elif type(v) is pd.tslib.Timestamp: nv = datetime(v.year, v.month, v.day) newVals.append(nv) table.append(newVals) return table def parseDateYearMonth(year, month): """ Helper method for converting individual year and month columns into a first of the month date. :param year: :param month: :return: the first of the month, year datetime object """ if type(year) is str: year = int(year) if type(month) is str: month = int(month) if numpy.isnan(year) or numpy.isnan(month): dt = None else: dt = datetime(year, month, 1) return dt def main(): """Our cheap unit test main function.""" #dataFile = "C:\Code\R\IS608-VizAnalytics\FinalProject\Data\Natality, 2007-2013-StateCounty.txt" #dataFile = "C:\Code\R\IS608-VizAnalytics\FinalProject\Data\LA-Natality-Combined.csv" #dataFile = "C:\Code\R\IS608-VizAnalytics\FinalProject\Data\LA-Natality-Census-Combined.csv" #dataFile = "C:\Code\R\IS608-VizAnalytics\FinalProject\Data\LA-Natality-Census-Age-Combined.csv" fileFieldMap = { "C:\Code\R\IS608-VizAnalytics\FinalProject\Data\LA-Natality-Census-Combined.csv" : ["State", "Date", "UnemploymentRate", "BirthsPer1000Pop"], "C:\Code\R\IS608-VizAnalytics\FinalProject\Data\LA-Natality-Census-Age-Combined.csv" : ["Age.of.Mother", "Date", "UnemploymentRate", "BirthsPer1000Pop"]} # "StateAgeOfMother", "State" for k, v in fileFieldMap.iteritems(): # Load the data and prep the Date column data = pd.read_table(k, sep=",", parse_dates={'Date': ["Year.Code", "Month.Code"]}, date_parser=parseDateYearMonth) data["Date"] = pd.to_datetime(data["Date"]) # Aggregate the births dataStateSum = data.groupby(v)["Births"].sum().reset_index() #dataStateSum = data.reset_index() print(dataStateSum.head()) # Call our helper function dt = ToGvizDataTable(dataStateSum) # Convert to the JSON encoding dtJson = dt.encode() # Save to a file with open(os.path.splitext(k)[0] + ".json", "w") as text_file: text_file.write(dtJson) # This is the main of the program. if __name__ == "__main__": main()
{ "content_hash": "25f393eb2d42427f39850e8e4e686e8c", "timestamp": "", "source": "github", "line_count": 102, "max_line_length": 195, "avg_line_length": 33, "alnum_prop": 0.5971479500891266, "repo_name": "dwdii/PanGviz", "id": "1766c94cc33901350be21b7d247cf13214eae410", "size": "3366", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pangviz.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "3366" } ], "symlink_target": "" }
class Output(object): """Represents a generic output for a solver""" def __init__(self, output=None, errors=None): self._output = output # Variable in which results are stored self._errors = errors # The errors thrown by the solver def get_errors(self): """Get error string""" return self._errors def get_output(self): """Get output string""" return self._output def set_errors(self, errors): """Set error string""" self._errors=errors def set_output(self, output): """Set output string""" self._output=output def _parse(self): """This method have to be implemented by subclasses to parse a solver output""" pass
{ "content_hash": "e5033d32719e093f8bac0fcf249bce26", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 87, "avg_line_length": 29.53846153846154, "alnum_prop": 0.5807291666666666, "repo_name": "SimoneLucia/EmbASP-Python", "id": "11549abc24e35eab43aa3384d4ba7b8a1187d681", "size": "768", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "base/Output.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "85601" } ], "symlink_target": "" }
"""Tests mnist batchnormalization used as learned scale factor.""" # to run, THRESHOLD=0.05 WITH_BN=1 EPOCHS=5 TRAIN=1 python example_mnist_bn.py from __future__ import absolute_import from __future__ import division from __future__ import print_function from collections import defaultdict import os import numpy as np from six.moves import zip from tensorflow.keras import callbacks import tensorflow.keras.backend as K from tensorflow.keras.datasets import mnist from tensorflow.keras.layers import * from tensorflow.keras.models import Model from tensorflow.keras.optimizers import * from tensorflow.keras.utils import to_categorical from qkeras import * np.random.seed(42) TRAIN = 1 NB_EPOCH = 2 BATCH_SIZE = 64 VERBOSE = 1 NB_CLASSES = 10 OPTIMIZER = Adam(lr=0.0001) VALIDATION_SPLIT = 0.1 WITH_BN = 1 THRESHOLD = 0.1 class LearningRateAdjuster(callbacks.Callback): def __init__(self): self.learning_rate_factor = 1.0 pass def on_epoch_end(self, epochs, logs): max_variance = -1 for layer in self.model.layers: if layer.__class__.__name__ in [ "BatchNormalization", "QBatchNormalization" ]: variance = np.max(layer.get_weights()[-1]) if variance > max_variance: max_variance = variance if max_variance > 32 and self.learning_rate_factor < 100: learning_rate = K.get_value(self.model.optimizer.learning_rate) self.learning_rate_factor /= 2.0 print("***** max_variance is {} / lr is {} *****".format( max_variance, learning_rate)) K.eval(K.update( self.model.optimizer.learning_rate, learning_rate / 2.0 )) lra = LearningRateAdjuster() (x_train, y_train), (x_test, y_test) = mnist.load_data() x_train = x_train.reshape(x_train.shape + (1,)).astype("float32") x_test = x_test.reshape(x_test.shape + (1,)).astype("float32") x_train /= 256.0 x_test /= 256.0 print(x_train.shape[0], "train samples") print(x_test.shape[0], "test samples") print(y_train[0:10]) y_train = to_categorical(y_train, NB_CLASSES) y_test = to_categorical(y_test, NB_CLASSES) x = x_in = Input(x_train.shape[1:], name="input") #x = QActivation("quantized_relu_po2(4,1)", name="acti")(x) x = QConv2D( 128, (3, 3), strides=1, kernel_quantizer=ternary(threshold=THRESHOLD), #quantized_po2(4, 1), bias_quantizer=quantized_bits(4,2,0) if not WITH_BN else None, bias_range=4 if not WITH_BN else None, use_bias=not WITH_BN, name="conv2d_0_m")(x) if WITH_BN: x = QBatchNormalization( gamma_quantizer=quantized_relu_po2(4,8), variance_quantizer=quantized_relu_po2(6), beta_quantizer=quantized_po2(4, 4), gamma_range=8, beta_range=4, name="bn0")(x) x = QActivation("quantized_relu(3,1)", name="act0_m")(x) x = MaxPooling2D(2, 2, name="mp_0")(x) x = QConv2D( 256, (3, 3), strides=1, kernel_quantizer=ternary(threshold=THRESHOLD), #quantized_bits(2,0,1), bias_quantizer=quantized_bits(4,2,1) if not WITH_BN else None, bias_range=4 if not WITH_BN else None, use_bias=not WITH_BN, name="conv2d_1_m")(x) if WITH_BN: x = QBatchNormalization( gamma_quantizer=quantized_relu_po2(4,8), variance_quantizer=quantized_relu_po2(6), beta_quantizer=quantized_po2(4, 4), gamma_range=8, beta_range=4, name="bn1")(x) x = QActivation("quantized_relu(3,1)", name="act1_m")(x) x = MaxPooling2D(2, 2, name="mp_1")(x) x = QConv2D( 128, (3, 3), strides=1, kernel_quantizer=ternary(threshold=THRESHOLD), #quantized_bits(2,0,1), bias_quantizer=quantized_bits(4,2,1) if not WITH_BN else None, bias_range=4 if not WITH_BN else None, use_bias=not WITH_BN, name="conv2d_2_m")(x) if WITH_BN: x = QBatchNormalization( gamma_quantizer=quantized_relu_po2(4,8), variance_quantizer=quantized_relu_po2(6), beta_quantizer=quantized_po2(4, 4), gamma_range=8, beta_range=4, name="bn2")(x) x = QActivation("quantized_relu(3,1)", name="act2_m")(x) x = MaxPooling2D(2, 2, name="mp_2")(x) x = Flatten()(x) x = QDense( NB_CLASSES, kernel_quantizer=quantized_ulaw(4, 0, 1), bias_quantizer=quantized_bits(4, 0, 1), name="dense")( x) x = Activation("softmax", name="softmax")(x) model = Model(inputs=[x_in], outputs=[x]) model.summary() model.compile( loss="categorical_crossentropy", optimizer=OPTIMIZER, metrics=["accuracy"]) if TRAIN: history = model.fit( x_train, y_train, batch_size=BATCH_SIZE, epochs=NB_EPOCH, initial_epoch=1, verbose=VERBOSE, validation_split=VALIDATION_SPLIT, callbacks=[]) #lra]) outputs = [] output_names = [] for layer in model.layers: if layer.__class__.__name__ in [ "QActivation", "QBatchNormalization", "Activation", "QDense", "QConv2D", "QDepthwiseConv2D" ]: output_names.append(layer.name) outputs.append(layer.output) model_debug = Model(inputs=[x_in], outputs=outputs) outputs = model_debug.predict(x_train) print("{:30} {: 8.4f} {: 8.4f}".format( "input", np.min(x_train), np.max(x_train))) for n, p in zip(output_names, outputs): print("{:30} {: 8.4f} {: 8.4f}".format(n, np.min(p), np.max(p)), end="") layer = model.get_layer(n) for i, weights in enumerate(layer.get_weights()): if layer.get_quantizers()[i]: weights = K.eval(layer.get_quantizers()[i](K.constant(weights))) print(" ({: 8.4f} {: 8.4f})".format(np.min(weights), np.max(weights)), end="") print("") score = model.evaluate(x_test, y_test, verbose=False) print("Test score:", score[0]) print("Test accuracy:", score[1]) print_qstats(model)
{ "content_hash": "b4992a88f254ff9765eb4215dc97b575", "timestamp": "", "source": "github", "line_count": 192, "max_line_length": 79, "avg_line_length": 29.598958333333332, "alnum_prop": 0.6475453105754003, "repo_name": "google/qkeras", "id": "48fd53df44e2a7343a30001b1b571e1517bb80ec", "size": "6340", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "examples/example_mnist_bn.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Jupyter Notebook", "bytes": "129705" }, { "name": "Python", "bytes": "1004364" } ], "symlink_target": "" }
import pandas as pd import pytest from woodwork.column_schema import ColumnSchema from woodwork.logical_types import Boolean, Datetime, Timedelta from featuretools.feature_base import Feature from featuretools.primitives.base.transform_primitive_base import ( make_trans_primitive ) # Check the custom trans primitives description def test_description_make_trans_primitives(): def pd_time_since(array, moment): return (moment - pd.DatetimeIndex(array)).values TimeSince = make_trans_primitive( function=pd_time_since, input_types=[ [ColumnSchema(logical_type=Datetime, semantic_tags={'time_index'})], [ColumnSchema(logical_type=Datetime)] ], return_type=ColumnSchema(logical_type=Timedelta), uses_calc_time=True, name="time_since" ) def pd_time_since(array, moment): """Calculates time since the cutoff time.""" return (moment - pd.DatetimeIndex(array)).values TimeSince2 = make_trans_primitive( function=pd_time_since, input_types=[ [ColumnSchema(logical_type=Datetime, semantic_tags={'time_index'})], [ColumnSchema(logical_type=Datetime)] ], return_type=ColumnSchema(logical_type=Timedelta), uses_calc_time=True, name="time_since" ) TimeSince3 = make_trans_primitive( function=pd_time_since, input_types=[ [ColumnSchema(logical_type=Datetime, semantic_tags={'time_index'})], [ColumnSchema(logical_type=Datetime)] ], return_type=ColumnSchema(logical_type=Timedelta), description="Calculates time since the cutoff time.", name="time_since" ) assert TimeSince.__doc__ != TimeSince2.__doc__ assert TimeSince2.__doc__ == TimeSince3.__doc__ def test_make_transform_restricts_time_keyword(): make_trans_primitive( lambda x, time=False: x, [ColumnSchema(logical_type=Datetime)], ColumnSchema(semantic_tags={'numeric'}), name="AllowedPrimitive", description="This primitive should be accepted", uses_calc_time=True) error_text = "'time' is a restricted keyword. Please use a different keyword." with pytest.raises(ValueError, match=error_text): make_trans_primitive( lambda x, time=False: x, [ColumnSchema(logical_type=Datetime)], ColumnSchema(semantic_tags={'numeric'}), name="BadPrimitive", description="This primitive should error") def test_make_transform_restricts_time_arg(): make_trans_primitive( lambda time: time, [ColumnSchema(logical_type=Datetime)], ColumnSchema(semantic_tags={'numeric'}), name="AllowedPrimitive", description="This primitive should be accepted", uses_calc_time=True) error_text = "'time' is a restricted keyword. Please use a different keyword." with pytest.raises(ValueError, match=error_text): make_trans_primitive( lambda time: time, [ColumnSchema(logical_type=Datetime)], ColumnSchema(semantic_tags={'numeric'}), name="BadPrimitive", description="This primitive should erorr") def test_make_transform_sets_kwargs_correctly(es): def pd_is_in(array, list_of_outputs=None): if list_of_outputs is None: list_of_outputs = [] return pd.Series(array).isin(list_of_outputs) def isin_generate_name(self, base_feature_names): return u"%s.isin(%s)" % (base_feature_names[0], str(self.kwargs['list_of_outputs'])) IsIn = make_trans_primitive( pd_is_in, [ColumnSchema()], ColumnSchema(logical_type=Boolean), name="is_in", description="For each value of the base feature, checks whether it is " "in a list that is provided.", cls_attributes={"generate_name": isin_generate_name}) isin_1_list = ["toothpaste", "coke_zero"] isin_1_base_f = Feature(es['log'].ww['product_id']) isin_1 = Feature(isin_1_base_f, primitive=IsIn(list_of_outputs=isin_1_list)) isin_2_list = ["coke_zero"] isin_2_base_f = Feature(es['log'].ww['session_id']) isin_2 = Feature(isin_2_base_f, primitive=IsIn(list_of_outputs=isin_2_list)) assert isin_1_base_f == isin_1.base_features[0] assert isin_1_list == isin_1.primitive.kwargs['list_of_outputs'] assert isin_2_base_f == isin_2.base_features[0] assert isin_2_list == isin_2.primitive.kwargs['list_of_outputs']
{ "content_hash": "b8be7e98f8f88062fe15ad695b2b43e4", "timestamp": "", "source": "github", "line_count": 124, "max_line_length": 83, "avg_line_length": 36.975806451612904, "alnum_prop": 0.6386041439476554, "repo_name": "Featuretools/featuretools", "id": "77a5ecbbcd6a3089a5b01cde135d3237c82dc3ce", "size": "4585", "binary": false, "copies": "1", "ref": "refs/heads/latest-dep-update-03d11f0", "path": "featuretools/tests/primitive_tests/test_make_trans_primitives.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Jupyter Notebook", "bytes": "3340" }, { "name": "Makefile", "bytes": "736" }, { "name": "Python", "bytes": "921333" }, { "name": "Shell", "bytes": "511" } ], "symlink_target": "" }
"""Read and write notebook files as XML. Authors: * Brian Granger """ #----------------------------------------------------------------------------- # Copyright (C) 2008-2011 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from base64 import encodestring, decodestring import warnings from xml.etree import ElementTree as ET from IPython.utils.py3compat import unicode_type from .rwbase import NotebookReader, NotebookWriter from .nbbase import ( new_code_cell, new_text_cell, new_worksheet, new_notebook, new_output, new_metadata ) #----------------------------------------------------------------------------- # Code #----------------------------------------------------------------------------- def indent(elem, level=0): i = "\n" + level*" " if len(elem): if not elem.text or not elem.text.strip(): elem.text = i + " " if not elem.tail or not elem.tail.strip(): elem.tail = i for elem in elem: indent(elem, level+1) if not elem.tail or not elem.tail.strip(): elem.tail = i else: if level and (not elem.tail or not elem.tail.strip()): elem.tail = i def _get_text(e, tag): sub_e = e.find(tag) if sub_e is None: return None else: return sub_e.text def _set_text(nbnode, attr, parent, tag): if attr in nbnode: e = ET.SubElement(parent, tag) e.text = nbnode[attr] def _get_int(e, tag): sub_e = e.find(tag) if sub_e is None: return None else: return int(sub_e.text) def _set_int(nbnode, attr, parent, tag): if attr in nbnode: e = ET.SubElement(parent, tag) e.text = unicode_type(nbnode[attr]) def _get_bool(e, tag): sub_e = e.find(tag) if sub_e is None: return None else: return bool(int(sub_e.text)) def _set_bool(nbnode, attr, parent, tag): if attr in nbnode: e = ET.SubElement(parent, tag) if nbnode[attr]: e.text = u'1' else: e.text = u'0' def _get_binary(e, tag): sub_e = e.find(tag) if sub_e is None: return None else: return decodestring(sub_e.text) def _set_binary(nbnode, attr, parent, tag): if attr in nbnode: e = ET.SubElement(parent, tag) e.text = encodestring(nbnode[attr]) class XMLReader(NotebookReader): def reads(self, s, **kwargs): root = ET.fromstring(s) return self.to_notebook(root, **kwargs) def to_notebook(self, root, **kwargs): warnings.warn('The XML notebook format is no longer supported, ' 'please convert your notebooks to JSON.', DeprecationWarning) nbname = _get_text(root,u'name') nbauthor = _get_text(root,u'author') nbemail = _get_text(root,u'email') nblicense = _get_text(root,u'license') nbcreated = _get_text(root,u'created') nbsaved = _get_text(root,u'saved') worksheets = [] for ws_e in root.find(u'worksheets').getiterator(u'worksheet'): wsname = _get_text(ws_e,u'name') cells = [] for cell_e in ws_e.find(u'cells').getiterator(): if cell_e.tag == u'codecell': input = _get_text(cell_e,u'input') prompt_number = _get_int(cell_e,u'prompt_number') collapsed = _get_bool(cell_e,u'collapsed') language = _get_text(cell_e,u'language') outputs = [] for output_e in cell_e.find(u'outputs').getiterator(u'output'): output_type = _get_text(output_e,u'output_type') output_text = _get_text(output_e,u'text') output_png = _get_binary(output_e,u'png') output_jpeg = _get_binary(output_e,u'jpeg') output_svg = _get_text(output_e,u'svg') output_html = _get_text(output_e,u'html') output_latex = _get_text(output_e,u'latex') output_json = _get_text(output_e,u'json') output_javascript = _get_text(output_e,u'javascript') out_prompt_number = _get_int(output_e,u'prompt_number') etype = _get_text(output_e,u'etype') evalue = _get_text(output_e,u'evalue') traceback = [] traceback_e = output_e.find(u'traceback') if traceback_e is not None: for frame_e in traceback_e.getiterator(u'frame'): traceback.append(frame_e.text) if len(traceback) == 0: traceback = None output = new_output(output_type=output_type,output_png=output_png, output_text=output_text, output_svg=output_svg, output_html=output_html, output_latex=output_latex, output_json=output_json, output_javascript=output_javascript, output_jpeg=output_jpeg, prompt_number=out_prompt_number, etype=etype, evalue=evalue, traceback=traceback ) outputs.append(output) cc = new_code_cell(input=input,prompt_number=prompt_number, language=language,outputs=outputs,collapsed=collapsed) cells.append(cc) if cell_e.tag == u'htmlcell': source = _get_text(cell_e,u'source') rendered = _get_text(cell_e,u'rendered') cells.append(new_text_cell(u'html', source=source, rendered=rendered)) if cell_e.tag == u'markdowncell': source = _get_text(cell_e,u'source') rendered = _get_text(cell_e,u'rendered') cells.append(new_text_cell(u'markdown', source=source, rendered=rendered)) ws = new_worksheet(name=wsname,cells=cells) worksheets.append(ws) md = new_metadata(name=nbname) nb = new_notebook(metadata=md,worksheets=worksheets) return nb _reader = XMLReader() reads = _reader.reads read = _reader.read to_notebook = _reader.to_notebook
{ "content_hash": "15ec5992b1ec6375e6db1f76b671e956", "timestamp": "", "source": "github", "line_count": 189, "max_line_length": 94, "avg_line_length": 36.07936507936508, "alnum_prop": 0.5002199736031676, "repo_name": "omni5cience/django-inlineformfield", "id": "f9ca12496f8581e700fdad50980a9e5002289958", "size": "6819", "binary": false, "copies": "13", "ref": "refs/heads/master", "path": ".tox/py27/lib/python2.7/site-packages/IPython/nbformat/v2/nbxml.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "43622" }, { "name": "Groff", "bytes": "3667" }, { "name": "HTML", "bytes": "108126" }, { "name": "JavaScript", "bytes": "853457" }, { "name": "Python", "bytes": "10506732" }, { "name": "Shell", "bytes": "3801" }, { "name": "Smarty", "bytes": "21023" } ], "symlink_target": "" }
""" Exceptions and handlers for tools. FIXME: These are used by tool scripts, not the framework, and should not live in this package. """ class UCSCLimitException( Exception ): pass class UCSCOutWrapper( object ): """File-like object that throws an exception if it encounters the UCSC limit error lines""" def __init__( self, other ): self.other = iter( other ) # Need one line of lookahead to be sure we are hitting the limit message self.lookahead = None def __iter__( self ): return self def next( self ): if self.lookahead is None: line = self.other.next() else: line = self.lookahead self.lookahead = None if line.startswith( "----------" ): next_line = self.other.next() if next_line.startswith( "Reached output limit" ): raise UCSCLimitException( next_line.strip() ) else: self.lookahead = next_line return line def readline(self): return self.next()
{ "content_hash": "8d049344fea268bcaf4f13d78758ed78", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 95, "avg_line_length": 32.39393939393939, "alnum_prop": 0.5855940130963517, "repo_name": "dbcls/dbcls-galaxy", "id": "81479a60bc2504b07668101db34139126434859e", "size": "1069", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "lib/galaxy/tools/exception_handling.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "646729" }, { "name": "Perl", "bytes": "40970" }, { "name": "Python", "bytes": "2646651" }, { "name": "Ruby", "bytes": "145028" }, { "name": "Shell", "bytes": "21457" } ], "symlink_target": "" }
from test_framework.test_framework import InfinitumTestFramework from test_framework.util import * class InvalidateTest(InfinitumTestFramework): def __init__(self): super().__init__() self.setup_clean_chain = True self.num_nodes = 3 def setup_network(self): self.nodes = [] self.is_network_split = False self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"])) self.nodes.append(start_node(1, self.options.tmpdir, ["-debug"])) self.nodes.append(start_node(2, self.options.tmpdir, ["-debug"])) def run_test(self): print("Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:") print("Mine 4 blocks on Node 0") self.nodes[0].generate(4) assert(self.nodes[0].getblockcount() == 4) besthash = self.nodes[0].getbestblockhash() print("Mine competing 6 blocks on Node 1") self.nodes[1].generate(6) assert(self.nodes[1].getblockcount() == 6) print("Connect nodes to force a reorg") connect_nodes_bi(self.nodes,0,1) sync_blocks(self.nodes[0:2]) assert(self.nodes[0].getblockcount() == 6) badhash = self.nodes[1].getblockhash(2) print("Invalidate block 2 on node 0 and verify we reorg to node 0's original chain") self.nodes[0].invalidateblock(badhash) newheight = self.nodes[0].getblockcount() newhash = self.nodes[0].getbestblockhash() if (newheight != 4 or newhash != besthash): raise AssertionError("Wrong tip for node0, hash %s, height %d"%(newhash,newheight)) print("\nMake sure we won't reorg to a lower work chain:") connect_nodes_bi(self.nodes,1,2) print("Sync node 2 to node 1 so both have 6 blocks") sync_blocks(self.nodes[1:3]) assert(self.nodes[2].getblockcount() == 6) print("Invalidate block 5 on node 1 so its tip is now at 4") self.nodes[1].invalidateblock(self.nodes[1].getblockhash(5)) assert(self.nodes[1].getblockcount() == 4) print("Invalidate block 3 on node 2, so its tip is now 2") self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3)) assert(self.nodes[2].getblockcount() == 2) print("..and then mine a block") self.nodes[2].generate(1) print("Verify all nodes are at the right height") time.sleep(5) for i in range(3): print(i,self.nodes[i].getblockcount()) assert(self.nodes[2].getblockcount() == 3) assert(self.nodes[0].getblockcount() == 4) node1height = self.nodes[1].getblockcount() if node1height < 4: raise AssertionError("Node 1 reorged to a lower height: %d"%node1height) if __name__ == '__main__': InvalidateTest().main()
{ "content_hash": "fd2c6b796f282f98cc5e43f1b058fa21", "timestamp": "", "source": "github", "line_count": 67, "max_line_length": 95, "avg_line_length": 42.28358208955224, "alnum_prop": 0.6191316625485351, "repo_name": "fcecin/infinitum", "id": "76877528d3eedd2625eb7735bb04539050af7e5a", "size": "3081", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "qa/rpc-tests/invalidateblock.py", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "679983" }, { "name": "C++", "bytes": "4544909" }, { "name": "CSS", "bytes": "1127" }, { "name": "Groff", "bytes": "3870" }, { "name": "HTML", "bytes": "50621" }, { "name": "Java", "bytes": "2102" }, { "name": "M4", "bytes": "175841" }, { "name": "Makefile", "bytes": "96055" }, { "name": "Objective-C", "bytes": "3783" }, { "name": "Objective-C++", "bytes": "7244" }, { "name": "Protocol Buffer", "bytes": "2316" }, { "name": "Python", "bytes": "882788" }, { "name": "QMake", "bytes": "2022" }, { "name": "Shell", "bytes": "34286" } ], "symlink_target": "" }
''' NVVM is not supported in the simulator, but stubs are provided to allow tests to import correctly. ''' class NvvmSupportError(ImportError): pass class NVVM(object): def __init__(self): raise NvvmSupportError('NVVM not supported in the simulator') CompilationUnit = None llvm_to_ptx = None set_cuda_kernel = None fix_data_layout = None get_arch_option = None SUPPORTED_CC = None LibDevice = None NvvmError = None def is_available(): return False
{ "content_hash": "33686df09116a16b7873f1ccec2a2445", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 77, "avg_line_length": 20.565217391304348, "alnum_prop": 0.7230443974630021, "repo_name": "stefanseefeld/numba", "id": "ce1b3897ba2f822352bc6e330acac6215ff70a9c", "size": "473", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "numba/cuda/simulator/cudadrv/nvvm.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Batchfile", "bytes": "5535" }, { "name": "C", "bytes": "303376" }, { "name": "C++", "bytes": "17024" }, { "name": "Cuda", "bytes": "214" }, { "name": "HTML", "bytes": "98846" }, { "name": "Jupyter Notebook", "bytes": "110325" }, { "name": "Python", "bytes": "3946372" }, { "name": "Shell", "bytes": "2414" } ], "symlink_target": "" }
""" Peter Norvig's Python implementation http://norvig.com/spell-correct.html """ import re import collections def words(text): return re.findall('[a-z]+', text.lower()) def train(features): model = collections.defaultdict(lambda: 1) for f in features: model[f] += 1 return model NWORDS = train(words(file('big.txt').read())) alphabet = 'abcdefghijklmnopqrstuvwxyz' def edits1(word): splits = [(word[:i], word[i:]) for i in range(len(word) + 1)] deletes = [a + b[1:] for a, b in splits if b] transposes = [a + b[1] + b[0] + b[2:] for a, b in splits if len(b) > 1] replaces = [a + c + b[1:] for a, b in splits for c in alphabet if b] inserts = [a + c + b for a, b in splits for c in alphabet] return set(deletes + transposes + replaces + inserts) def known_edits2(word): return set(e2 for e1 in edits1(word) for e2 in edits1(e1) if e2 in NWORDS) def known(words): return set(w for w in words if w in NWORDS) def correct(word): candidates = (known([word]) or known(edits1(word)) or known_edits2(word) or [word]) return max(candidates, key=NWORDS.get)
{ "content_hash": "0fc9e533c6dbdb2cb7d97ec9aa9760cb", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 78, "avg_line_length": 25.148936170212767, "alnum_prop": 0.6116751269035533, "repo_name": "danielcnorris/haskell-spelling-corrector", "id": "8c2d8f82b8a23c25076c0330b8c8bd5f8b8dfb90", "size": "1182", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "speller.py", "mode": "33188", "license": "mit", "language": [ { "name": "Haskell", "bytes": "6172" }, { "name": "Python", "bytes": "1182" } ], "symlink_target": "" }
"""django_rest_framework_tutorial URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin from rest_framework.routers import DefaultRouter from musics.views import MusicViewSet from shares.views import ShareViewSet router = DefaultRouter() router.register(r'music', MusicViewSet, base_name='music') router.register(r'shares', ShareViewSet, base_name='share') urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^api/', include(router.urls, namespace='api'), name='api'), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) ]
{ "content_hash": "8b79d687cb08318435e77c7090718b74", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 82, "avg_line_length": 39.354838709677416, "alnum_prop": 0.7221311475409836, "repo_name": "twtrubiks/django-rest-framework-tutorial", "id": "1eb84ce9d5898bb5a58cb67a3cea86c45f4099e9", "size": "1220", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "django_rest_framework_tutorial/urls.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "17094" } ], "symlink_target": "" }
"""API renderers for accessing AFF4 objects.""" # pylint: disable=unused-import,g-bad-import-order from grr.lib import server_plugins # pylint: enable=unused-import,g-bad-import-order from grr.gui import api_aff4_object_renderers from grr.gui import api_call_renderers from grr.gui import api_value_renderers from grr.lib import aff4 from grr.lib import data_store from grr.lib import rdfvalue from grr.proto import api_pb2 class ApiAff4RendererArgs(rdfvalue.RDFProtoStruct): protobuf = api_pb2.ApiAff4RendererArgs class ApiAff4Renderer(api_call_renderers.ApiCallRenderer): """Renders AFF4 objects in JSON format. Query parameters interpretation depends on the type of the AFF4 object that's being fetched. See documentation on AFF4 object renderers for details. """ args_type = ApiAff4RendererArgs @classmethod def GetAdditionalArgsTypes(cls): results = {} for aff4_renderer_cls in (api_aff4_object_renderers. ApiAFF4ObjectRendererBase.classes.values()): results[aff4_renderer_cls.aff4_type] = aff4_renderer_cls.args_type return results additional_args_types = GetAdditionalArgsTypes def Render(self, args, token=None): aff4_object = aff4.FACTORY.Open(args.aff4_path, token=token) rendered_data = api_aff4_object_renderers.RenderAFF4Object( aff4_object, [x.args for x in args.additional_args]) return rendered_data class ApiAff4IndexRendererArgs(rdfvalue.RDFProtoStruct): protobuf = api_pb2.ApiAff4IndexRendererArgs class ApiAff4IndexRenderer(api_call_renderers.ApiCallRenderer): """Returns list of children objects for the object with a given path.""" args_type = ApiAff4IndexRendererArgs def Render(self, args, token=None): encoded_urns = [] aff4_path = rdfvalue.RDFURN(args.aff4_path) index_prefix = "index:dir/" for predicate, _, timestamp in data_store.DB.ResolveRegex( aff4_path, index_prefix + ".+", token=token, timestamp=data_store.DB.NEWEST_TIMESTAMP, limit=1000000): urn = aff4_path.Add(predicate[len(index_prefix):]) encoded_urns.append([api_value_renderers.RenderValue(urn), timestamp]) return encoded_urns
{ "content_hash": "a552a3d5569a2bdda80dd159c4feb924", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 74, "avg_line_length": 30.791666666666668, "alnum_prop": 0.7293640054127198, "repo_name": "bgalehouse/grr", "id": "a5ccccd93e3862da15d565412cc1d62c29a49f67", "size": "2239", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "gui/api_plugins/aff4.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "227" }, { "name": "Batchfile", "bytes": "14993" }, { "name": "C", "bytes": "9062" }, { "name": "C++", "bytes": "55149" }, { "name": "CSS", "bytes": "12047" }, { "name": "Groff", "bytes": "444" }, { "name": "HTML", "bytes": "48624" }, { "name": "JavaScript", "bytes": "230351" }, { "name": "Makefile", "bytes": "5863" }, { "name": "Protocol Buffer", "bytes": "181723" }, { "name": "Python", "bytes": "4855590" }, { "name": "Ruby", "bytes": "4931" }, { "name": "Shell", "bytes": "45459" } ], "symlink_target": "" }
class Solution(object): def canJump(self, nums): """ :type nums: List[int] :rtype: bool """ # greedy # https://leetcode.com/articles/jump-game/ length = len(nums) begin = length - 1 for i in reversed(range(length - 1)): if i + nums[i] >= begin: begin = i return not begin
{ "content_hash": "1ce097765f1b27037541c88d5d4d30d6", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 50, "avg_line_length": 27.357142857142858, "alnum_prop": 0.47780678851174935, "repo_name": "qiyuangong/leetcode", "id": "0e8b9ec3aef9df65071e8d96dddf424763f5e505", "size": "383", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "python/055_Jump_Game.py", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "12220" }, { "name": "Java", "bytes": "128259" }, { "name": "Python", "bytes": "439314" } ], "symlink_target": "" }
import unittest2 as unittest import ast import math import itertools from stencil_kernel import * from stencil_python_front_end import * from stencil_unroll_neighbor_iter import * from stencil_convert import * from asp.util import * class BasicTests(unittest.TestCase): def test1(self): class TestKernel(StencilKernel): def kernel(self, in_grid, out_grid): for x in out_grid.interior_points(): for y in in_grid.neighbors(x, 1): out_grid[x] = out_grid[x] + in_grid[y] kernel = TestKernel() in_grid = StencilGrid([10,10]) out_grid = StencilGrid([10,10]) in_grid[2,3] = 1 in_grid[4,3] = 2 in_grid[3,2] = 3 in_grid[3,4] = 4 # SPECIALIZE AND RUN KERNEL kernel.kernel(in_grid, out_grid) # CHECK SOME CONDITION ON OUTPUT self.assertEqual(out_grid[3,3], 10) if __name__ == '__main__': unittest.main()
{ "content_hash": "5db818526fe49ca2bfce809420d6752c", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 62, "avg_line_length": 28.647058823529413, "alnum_prop": 0.5903490759753593, "repo_name": "richardxia/asp-multilevel-debug", "id": "5cacbb263f6ecb71950d647bb6e69f3c2c3ea8f1", "size": "974", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "demo/demo_backups/test1_1.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C++", "bytes": "102604" }, { "name": "Objective-C", "bytes": "6902" }, { "name": "Python", "bytes": "551438" }, { "name": "Shell", "bytes": "7110" } ], "symlink_target": "" }
from connector import channel from google3.cloud.graphite.mmv2.services.google.vmwareengine import private_cloud_pb2 from google3.cloud.graphite.mmv2.services.google.vmwareengine import ( private_cloud_pb2_grpc, ) from typing import List class PrivateCloud(object): def __init__( self, name: str = None, create_time: str = None, update_time: str = None, delete_time: str = None, expire_time: str = None, labels: dict = None, state: str = None, network_config: dict = None, management_cluster: dict = None, description: str = None, conditions: list = None, hcx: dict = None, nsx: dict = None, vcenter: dict = None, project: str = None, location: str = None, service_account_file: str = "", ): channel.initialize() self.name = name self.labels = labels self.network_config = network_config self.management_cluster = management_cluster self.description = description self.project = project self.location = location self.service_account_file = service_account_file def apply(self): stub = private_cloud_pb2_grpc.VmwareengineAlphaPrivateCloudServiceStub( channel.Channel() ) request = private_cloud_pb2.ApplyVmwareengineAlphaPrivateCloudRequest() if Primitive.to_proto(self.name): request.resource.name = Primitive.to_proto(self.name) if Primitive.to_proto(self.labels): request.resource.labels = Primitive.to_proto(self.labels) if PrivateCloudNetworkConfig.to_proto(self.network_config): request.resource.network_config.CopyFrom( PrivateCloudNetworkConfig.to_proto(self.network_config) ) else: request.resource.ClearField("network_config") if PrivateCloudManagementCluster.to_proto(self.management_cluster): request.resource.management_cluster.CopyFrom( PrivateCloudManagementCluster.to_proto(self.management_cluster) ) else: request.resource.ClearField("management_cluster") if Primitive.to_proto(self.description): request.resource.description = Primitive.to_proto(self.description) if Primitive.to_proto(self.project): request.resource.project = Primitive.to_proto(self.project) if Primitive.to_proto(self.location): request.resource.location = Primitive.to_proto(self.location) request.service_account_file = self.service_account_file response = stub.ApplyVmwareengineAlphaPrivateCloud(request) self.name = Primitive.from_proto(response.name) self.create_time = Primitive.from_proto(response.create_time) self.update_time = Primitive.from_proto(response.update_time) self.delete_time = Primitive.from_proto(response.delete_time) self.expire_time = Primitive.from_proto(response.expire_time) self.labels = Primitive.from_proto(response.labels) self.state = PrivateCloudStateEnum.from_proto(response.state) self.network_config = PrivateCloudNetworkConfig.from_proto( response.network_config ) self.management_cluster = PrivateCloudManagementCluster.from_proto( response.management_cluster ) self.description = Primitive.from_proto(response.description) self.conditions = PrivateCloudConditionsArray.from_proto(response.conditions) self.hcx = PrivateCloudHcx.from_proto(response.hcx) self.nsx = PrivateCloudNsx.from_proto(response.nsx) self.vcenter = PrivateCloudVcenter.from_proto(response.vcenter) self.project = Primitive.from_proto(response.project) self.location = Primitive.from_proto(response.location) def delete(self): stub = private_cloud_pb2_grpc.VmwareengineAlphaPrivateCloudServiceStub( channel.Channel() ) request = private_cloud_pb2.DeleteVmwareengineAlphaPrivateCloudRequest() request.service_account_file = self.service_account_file if Primitive.to_proto(self.name): request.resource.name = Primitive.to_proto(self.name) if Primitive.to_proto(self.labels): request.resource.labels = Primitive.to_proto(self.labels) if PrivateCloudNetworkConfig.to_proto(self.network_config): request.resource.network_config.CopyFrom( PrivateCloudNetworkConfig.to_proto(self.network_config) ) else: request.resource.ClearField("network_config") if PrivateCloudManagementCluster.to_proto(self.management_cluster): request.resource.management_cluster.CopyFrom( PrivateCloudManagementCluster.to_proto(self.management_cluster) ) else: request.resource.ClearField("management_cluster") if Primitive.to_proto(self.description): request.resource.description = Primitive.to_proto(self.description) if Primitive.to_proto(self.project): request.resource.project = Primitive.to_proto(self.project) if Primitive.to_proto(self.location): request.resource.location = Primitive.to_proto(self.location) response = stub.DeleteVmwareengineAlphaPrivateCloud(request) @classmethod def list(self, project, location, service_account_file=""): stub = private_cloud_pb2_grpc.VmwareengineAlphaPrivateCloudServiceStub( channel.Channel() ) request = private_cloud_pb2.ListVmwareengineAlphaPrivateCloudRequest() request.service_account_file = service_account_file request.Project = project request.Location = location return stub.ListVmwareengineAlphaPrivateCloud(request).items def to_proto(self): resource = private_cloud_pb2.VmwareengineAlphaPrivateCloud() if Primitive.to_proto(self.name): resource.name = Primitive.to_proto(self.name) if Primitive.to_proto(self.labels): resource.labels = Primitive.to_proto(self.labels) if PrivateCloudNetworkConfig.to_proto(self.network_config): resource.network_config.CopyFrom( PrivateCloudNetworkConfig.to_proto(self.network_config) ) else: resource.ClearField("network_config") if PrivateCloudManagementCluster.to_proto(self.management_cluster): resource.management_cluster.CopyFrom( PrivateCloudManagementCluster.to_proto(self.management_cluster) ) else: resource.ClearField("management_cluster") if Primitive.to_proto(self.description): resource.description = Primitive.to_proto(self.description) if Primitive.to_proto(self.project): resource.project = Primitive.to_proto(self.project) if Primitive.to_proto(self.location): resource.location = Primitive.to_proto(self.location) return resource class PrivateCloudNetworkConfig(object): def __init__( self, network: str = None, service_network: str = None, management_cidr: str = None, ): self.network = network self.service_network = service_network self.management_cidr = management_cidr @classmethod def to_proto(self, resource): if not resource: return None res = private_cloud_pb2.VmwareengineAlphaPrivateCloudNetworkConfig() if Primitive.to_proto(resource.network): res.network = Primitive.to_proto(resource.network) if Primitive.to_proto(resource.service_network): res.service_network = Primitive.to_proto(resource.service_network) if Primitive.to_proto(resource.management_cidr): res.management_cidr = Primitive.to_proto(resource.management_cidr) return res @classmethod def from_proto(self, resource): if not resource: return None return PrivateCloudNetworkConfig( network=Primitive.from_proto(resource.network), service_network=Primitive.from_proto(resource.service_network), management_cidr=Primitive.from_proto(resource.management_cidr), ) class PrivateCloudNetworkConfigArray(object): @classmethod def to_proto(self, resources): if not resources: return resources return [PrivateCloudNetworkConfig.to_proto(i) for i in resources] @classmethod def from_proto(self, resources): return [PrivateCloudNetworkConfig.from_proto(i) for i in resources] class PrivateCloudManagementCluster(object): def __init__( self, cluster_id: str = None, node_type_id: str = None, node_count: int = None ): self.cluster_id = cluster_id self.node_type_id = node_type_id self.node_count = node_count @classmethod def to_proto(self, resource): if not resource: return None res = private_cloud_pb2.VmwareengineAlphaPrivateCloudManagementCluster() if Primitive.to_proto(resource.cluster_id): res.cluster_id = Primitive.to_proto(resource.cluster_id) if Primitive.to_proto(resource.node_type_id): res.node_type_id = Primitive.to_proto(resource.node_type_id) if Primitive.to_proto(resource.node_count): res.node_count = Primitive.to_proto(resource.node_count) return res @classmethod def from_proto(self, resource): if not resource: return None return PrivateCloudManagementCluster( cluster_id=Primitive.from_proto(resource.cluster_id), node_type_id=Primitive.from_proto(resource.node_type_id), node_count=Primitive.from_proto(resource.node_count), ) class PrivateCloudManagementClusterArray(object): @classmethod def to_proto(self, resources): if not resources: return resources return [PrivateCloudManagementCluster.to_proto(i) for i in resources] @classmethod def from_proto(self, resources): return [PrivateCloudManagementCluster.from_proto(i) for i in resources] class PrivateCloudConditions(object): def __init__(self, code: str = None, message: str = None): self.code = code self.message = message @classmethod def to_proto(self, resource): if not resource: return None res = private_cloud_pb2.VmwareengineAlphaPrivateCloudConditions() if Primitive.to_proto(resource.code): res.code = Primitive.to_proto(resource.code) if Primitive.to_proto(resource.message): res.message = Primitive.to_proto(resource.message) return res @classmethod def from_proto(self, resource): if not resource: return None return PrivateCloudConditions( code=Primitive.from_proto(resource.code), message=Primitive.from_proto(resource.message), ) class PrivateCloudConditionsArray(object): @classmethod def to_proto(self, resources): if not resources: return resources return [PrivateCloudConditions.to_proto(i) for i in resources] @classmethod def from_proto(self, resources): return [PrivateCloudConditions.from_proto(i) for i in resources] class PrivateCloudHcx(object): def __init__( self, fdqn: str = None, internal_ip: str = None, external_ip: str = None, version: str = None, ): self.fdqn = fdqn self.internal_ip = internal_ip self.external_ip = external_ip self.version = version @classmethod def to_proto(self, resource): if not resource: return None res = private_cloud_pb2.VmwareengineAlphaPrivateCloudHcx() if Primitive.to_proto(resource.fdqn): res.fdqn = Primitive.to_proto(resource.fdqn) if Primitive.to_proto(resource.internal_ip): res.internal_ip = Primitive.to_proto(resource.internal_ip) if Primitive.to_proto(resource.external_ip): res.external_ip = Primitive.to_proto(resource.external_ip) if Primitive.to_proto(resource.version): res.version = Primitive.to_proto(resource.version) return res @classmethod def from_proto(self, resource): if not resource: return None return PrivateCloudHcx( fdqn=Primitive.from_proto(resource.fdqn), internal_ip=Primitive.from_proto(resource.internal_ip), external_ip=Primitive.from_proto(resource.external_ip), version=Primitive.from_proto(resource.version), ) class PrivateCloudHcxArray(object): @classmethod def to_proto(self, resources): if not resources: return resources return [PrivateCloudHcx.to_proto(i) for i in resources] @classmethod def from_proto(self, resources): return [PrivateCloudHcx.from_proto(i) for i in resources] class PrivateCloudNsx(object): def __init__( self, fdqn: str = None, internal_ip: str = None, external_ip: str = None, version: str = None, ): self.fdqn = fdqn self.internal_ip = internal_ip self.external_ip = external_ip self.version = version @classmethod def to_proto(self, resource): if not resource: return None res = private_cloud_pb2.VmwareengineAlphaPrivateCloudNsx() if Primitive.to_proto(resource.fdqn): res.fdqn = Primitive.to_proto(resource.fdqn) if Primitive.to_proto(resource.internal_ip): res.internal_ip = Primitive.to_proto(resource.internal_ip) if Primitive.to_proto(resource.external_ip): res.external_ip = Primitive.to_proto(resource.external_ip) if Primitive.to_proto(resource.version): res.version = Primitive.to_proto(resource.version) return res @classmethod def from_proto(self, resource): if not resource: return None return PrivateCloudNsx( fdqn=Primitive.from_proto(resource.fdqn), internal_ip=Primitive.from_proto(resource.internal_ip), external_ip=Primitive.from_proto(resource.external_ip), version=Primitive.from_proto(resource.version), ) class PrivateCloudNsxArray(object): @classmethod def to_proto(self, resources): if not resources: return resources return [PrivateCloudNsx.to_proto(i) for i in resources] @classmethod def from_proto(self, resources): return [PrivateCloudNsx.from_proto(i) for i in resources] class PrivateCloudVcenter(object): def __init__( self, fdqn: str = None, internal_ip: str = None, external_ip: str = None, version: str = None, ): self.fdqn = fdqn self.internal_ip = internal_ip self.external_ip = external_ip self.version = version @classmethod def to_proto(self, resource): if not resource: return None res = private_cloud_pb2.VmwareengineAlphaPrivateCloudVcenter() if Primitive.to_proto(resource.fdqn): res.fdqn = Primitive.to_proto(resource.fdqn) if Primitive.to_proto(resource.internal_ip): res.internal_ip = Primitive.to_proto(resource.internal_ip) if Primitive.to_proto(resource.external_ip): res.external_ip = Primitive.to_proto(resource.external_ip) if Primitive.to_proto(resource.version): res.version = Primitive.to_proto(resource.version) return res @classmethod def from_proto(self, resource): if not resource: return None return PrivateCloudVcenter( fdqn=Primitive.from_proto(resource.fdqn), internal_ip=Primitive.from_proto(resource.internal_ip), external_ip=Primitive.from_proto(resource.external_ip), version=Primitive.from_proto(resource.version), ) class PrivateCloudVcenterArray(object): @classmethod def to_proto(self, resources): if not resources: return resources return [PrivateCloudVcenter.to_proto(i) for i in resources] @classmethod def from_proto(self, resources): return [PrivateCloudVcenter.from_proto(i) for i in resources] class PrivateCloudStateEnum(object): @classmethod def to_proto(self, resource): if not resource: return resource return private_cloud_pb2.VmwareengineAlphaPrivateCloudStateEnum.Value( "VmwareengineAlphaPrivateCloudStateEnum%s" % resource ) @classmethod def from_proto(self, resource): if not resource: return resource return private_cloud_pb2.VmwareengineAlphaPrivateCloudStateEnum.Name(resource)[ len("VmwareengineAlphaPrivateCloudStateEnum") : ] class Primitive(object): @classmethod def to_proto(self, s): if not s: return "" return s @classmethod def from_proto(self, s): return s
{ "content_hash": "93dd744bdbff9777c5bef39d0c5e7103", "timestamp": "", "source": "github", "line_count": 497, "max_line_length": 87, "avg_line_length": 35.04627766599597, "alnum_prop": 0.6469743943047422, "repo_name": "GoogleCloudPlatform/declarative-resource-client-library", "id": "0da58420d94be110965ea2d01512076208f26131", "size": "18017", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "python/services/vmwareengine/alpha/private_cloud.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "2560" }, { "name": "C++", "bytes": "3947" }, { "name": "Go", "bytes": "116489733" }, { "name": "Python", "bytes": "17240408" }, { "name": "Starlark", "bytes": "319733" } ], "symlink_target": "" }
import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.githubpages', 'sphinx.ext.napoleon' ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'LightFM' copyright = u'2016, Lyst (Maciej Kula)' author = u'Lyst (Maciej Kula)' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = u'1.9' # The full version, including alpha/beta/rc tags. release = u'1.9' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. # "<project> v<release> documentation" by default. #html_title = u'LightFM v1.8' # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. #html_last_updated_fmt = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'LightFMdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'LightFM.tex', u'LightFM Documentation', u'Lyst (Maciej Kula)', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'lightfm', u'LightFM Documentation', [author], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'LightFM', u'LightFM Documentation', author, 'LightFM', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # Compact attribute lists napoleon_use_ivar = True
{ "content_hash": "8034f58cd035ce0257cdb0971199d197", "timestamp": "", "source": "github", "line_count": 280, "max_line_length": 80, "avg_line_length": 32.57142857142857, "alnum_prop": 0.7041666666666667, "repo_name": "paoloRais/lightfm", "id": "7ae74bacb13559dc0c6b190fa7398e69379f6c74", "size": "9540", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/conf.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Makefile", "bytes": "1013" }, { "name": "Python", "bytes": "162085" } ], "symlink_target": "" }
""" WSGI config for dj17_testproject project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/ """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dj17_testproject.settings") from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
{ "content_hash": "41edde2ca7839565cd89608b1ea9219a", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 78, "avg_line_length": 29.071428571428573, "alnum_prop": 0.7813267813267813, "repo_name": "anentropic/kombu-django-json-serializer", "id": "ff1fca02eb3a603d2a14b51e57ee3113e52ecb97", "size": "407", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "testing/tests/py2-dj17_testproject/py2-dj17_testproject/wsgi.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "45049" } ], "symlink_target": "" }
r"""Generate python docs for tf.lite. # How to run ``` python build_docs.py --output_dir=/path/to/output ``` """ import pathlib from absl import app from absl import flags import tensorflow as tf from tensorflow_docs.api_generator import generate_lib flags.DEFINE_string('output_dir', '/tmp/lite_api/', 'The path to output the files to') flags.DEFINE_string('code_url_prefix', 'https://github.com/tensorflow/tensorflow/blob/master/', 'The url prefix for links to code.') flags.DEFINE_bool('search_hints', True, 'Include metadata search hints in the generated files') flags.DEFINE_string('site_path', 'lite/api_docs/python', 'Path prefix in the _toc.yaml') FLAGS = flags.FLAGS def main(_): doc_generator = generate_lib.DocGenerator( root_title='TensorFlow Lite', py_modules=[('tf.lite', tf.lite)], base_dir=str(pathlib.Path(tf.__file__).parent), code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, callbacks=[]) doc_generator.build(output_dir=FLAGS.output_dir) if __name__ == '__main__': app.run(main)
{ "content_hash": "c0960169e85c90c8b39048a10558be2d", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 76, "avg_line_length": 24.22, "alnum_prop": 0.6374896779521056, "repo_name": "tensorflow/tensorflow-pywrap_saved_model", "id": "e019a791888a5bbabaab6306e8d6771356ef8ae1", "size": "1900", "binary": false, "copies": "11", "ref": "refs/heads/master", "path": "tensorflow/lite/g3doc/tools/build_py_api_docs.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "36962" }, { "name": "C", "bytes": "1392153" }, { "name": "C#", "bytes": "13584" }, { "name": "C++", "bytes": "125860957" }, { "name": "CMake", "bytes": "182324" }, { "name": "Cython", "bytes": "5003" }, { "name": "Dockerfile", "bytes": "416133" }, { "name": "Go", "bytes": "2123155" }, { "name": "HTML", "bytes": "4686483" }, { "name": "Java", "bytes": "1074438" }, { "name": "Jupyter Notebook", "bytes": "792906" }, { "name": "LLVM", "bytes": "6536" }, { "name": "MLIR", "bytes": "11347297" }, { "name": "Makefile", "bytes": "2760" }, { "name": "Objective-C", "bytes": "172666" }, { "name": "Objective-C++", "bytes": "300208" }, { "name": "Pawn", "bytes": "5552" }, { "name": "Perl", "bytes": "7536" }, { "name": "Python", "bytes": "42738981" }, { "name": "Roff", "bytes": "5034" }, { "name": "Ruby", "bytes": "9214" }, { "name": "Shell", "bytes": "621427" }, { "name": "Smarty", "bytes": "89545" }, { "name": "SourcePawn", "bytes": "14625" }, { "name": "Starlark", "bytes": "7720442" }, { "name": "Swift", "bytes": "78435" }, { "name": "Vim Snippet", "bytes": "58" } ], "symlink_target": "" }
from toontown.coghq.SpecImports import * GlobalEntities = {1000: {'type': 'levelMgr', 'name': 'LevelMgr', 'comment': '', 'parentEntId': 0, 'cogLevel': 0, 'farPlaneDistance': 1500, 'modelFilename': 'phase_10/models/lawbotHQ/LawbotCourtroom3', 'wantDoors': 1}, 1001: {'type': 'editMgr', 'name': 'EditMgr', 'parentEntId': 0, 'insertEntity': None, 'removeEntity': None, 'requestNewEntity': None, 'requestSave': None}, 0: {'type': 'zone', 'name': 'UberZone', 'comment': '', 'parentEntId': 0, 'scale': 1, 'description': '', 'visibility': []}} Scenario0 = {} levelSpec = {'globalEntities': GlobalEntities, 'scenarios': [Scenario0]}
{ "content_hash": "d9057a389115394b1facba14bc625380", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 69, "avg_line_length": 29.307692307692307, "alnum_prop": 0.5511811023622047, "repo_name": "linktlh/Toontown-journey", "id": "6cbc91f0e05b41f5469bd79987fb68505152a870", "size": "762", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "toontown/coghq/LawbotLegFactorySpec.py", "mode": "33261", "license": "apache-2.0", "language": [], "symlink_target": "" }
import pygame, sys, os import CONST, SWSmenu, GameTime from pygame.locals import * from CONST import * from SWSmenu import * from GameTime import * """ IDEAS: -make bools such as isInMenu, startedGame/isInAction """ pygame.init() clock = pygame.time.Clock() screen = pygame.display.set_mode(WINDOW_SIZE) while True: clock.tick(60) for event in pygame.event.get(): if event.type == QUIT or (event.type == KEYDOWN and event.key == K_q): pygame.quit() sys.exit() print "Welcome to the Main Menu" menu = Menu() menu.titleMenu(screen) print "GAME HAS STARTED!!" gametime = GameTime() gametime.playGame(screen) pygame.display.update()
{ "content_hash": "e5bc3cef1428742374aad3e290cf0cae", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 78, "avg_line_length": 18.743589743589745, "alnum_prop": 0.6361149110807114, "repo_name": "iPatso/PyGameProjs", "id": "7c476401e128fb7cdbcfc727541b791a23a53083", "size": "731", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "PyGames/SWSpace/main.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "433468" } ], "symlink_target": "" }
import functools __all__ = ['partial' , 'Layout'] def partial(handler): """ Mark the handler as a view partial (set response Content-Type). """ @functools.wraps(handler) def _handler(*args, **kwargs): response = handler(*args, **kwargs) response.content_type = 'x-application/partial' return response return _handler class Layout: """ The layout component The layout component provide a two step view rendering pattern implementation on top of the services provided by a `views` component or a compatible one. Using this component the task of setting a default look and feel for the entire application is eased by allowing all content generated by the application to share a default HTML page template. """ content_type = 'text/html' template_name = 'layout.html' status_codes = [ 200, ] def __init__(self, render): """ Setup the component. It takes as only argument the :func:`aurora.webcomponents.views.render` service implementation used by the :func:`after_handle` service. :param render: The render service """ self.render = render # # stubs for component dependencies # def render(self, template_name: str, **context) -> str: """ Render a template into a `webob.Response` object with context. :param template_name: The relative template name without the last extension. :param context: The context mapping. :return: The rendered content. """ raise NotImplementedError() # # services provided by the component # def post_dispatch(self, response): """ Wrap the response body using a layout template. This service is intended to be registered as a :func:`aurora.webapp.infrastructure.Application.after_handle` event listener and this behaviour is only activated if the response content type is the `x-application/partial` string. :param response: The Web response object. """ if 'x-application/partial' in response.content_type and \ response.status_int in self.status_codes: response.text = self.render(self.template_name, content=response.text) response.content_type = self.content_type
{ "content_hash": "d9f289910794337f52032de0ba7dc22d", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 79, "avg_line_length": 31.051948051948052, "alnum_prop": 0.6394813885403596, "repo_name": "yeiniel/aurora", "id": "22669962029fd9929c15f941d4ad8722b606f725", "size": "3964", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aurora/webcomponents/layout.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "150299" } ], "symlink_target": "" }
''' (c) 2011, 2012 Georgia Tech Research Corporation This source code is released under the New BSD license. Please see http://wiki.quantsoftware.org/index.php?title=QSTK_License for license details. Created on May 14, 2012 @author: John Cornwell @contact: John@lucenaresearch.com @summary: ''' # Python imports import unittest # 3rd party imports # QSTK imports class Test(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_import(self): # Silly example to test current error in loading utils import qstkutil.utils as utils self.assertTrue(True) if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testName'] unittest.main()
{ "content_hash": "de95d3f68e9e92bce35f16e8eaa0b45d", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 67, "avg_line_length": 17, "alnum_prop": 0.6644385026737968, "repo_name": "wogsland/QSTK", "id": "6e11ed35fa09fbf999fb4ed728dcebb48a93702e", "size": "748", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "build/lib.linux-x86_64-2.7/QSTK/qstkutil/tests/test_utils.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "HTML", "bytes": "46134" }, { "name": "Python", "bytes": "668253" }, { "name": "Shell", "bytes": "2768" } ], "symlink_target": "" }
import unittest from TASSELpy.TASSELbridge import TASSELbridge from TASSELpy.test.net.maizegenetics.trait.TraitTest import TraitTest from TASSELpy.test.net.maizegenetics.trait.PhenotypeTest import PhenotypeTest from TASSELpy.test.net.maizegenetics.trait.MarkerPhenotypeTest import MarkerPhenotypeTest class traitTestSuite(unittest.TestSuite): def __init__(self): super(traitTestSuite, self).__init__() self.addTest(unittest.makeSuite(TraitTest)) self.addTest(unittest.makeSuite(PhenotypeTest)) self.addTest(unittest.makeSuite(MarkerPhenotypeTest)) if __name__ == "__main__": runner = unittest.TextTestRunner() runner.run(traitTestSuite()) TASSELbridge.stop()
{ "content_hash": "0d4f720778f754f3bc78296d2997a7ab", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 89, "avg_line_length": 41.705882352941174, "alnum_prop": 0.765867418899859, "repo_name": "er432/TASSELpy", "id": "6c2a188f3c17b40d14a279481267fc340b7cc566", "size": "709", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "TASSELpy/test/net/maizegenetics/trait/traitTestSuite.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "947691" }, { "name": "Shell", "bytes": "6705" } ], "symlink_target": "" }
"""empty message Revision ID: 37f460a1f4d2 Revises: 1584139036cb Create Date: 2019-02-28 14:53:48.334389 """ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects.postgresql import JSONB # revision identifiers, used by Alembic. revision = '37f460a1f4d2' down_revision = '1584139036cb' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('file', sa.Column('id', sa.Integer(), nullable=False), sa.Column('filename', sa.String(length=50), nullable=True), sa.Column('uri', sa.String(length=512), nullable=True), sa.Column('filemime', sa.String(length=255), nullable=True), sa.Column('filesize', sa.Integer(), nullable=True), sa.Column('filename_original', sa.String(length=200), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('theme', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=100), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('slug', sa.String(length=100), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('author_id', sa.Integer(), nullable=False), sa.Column('status', sa.String(length=15), nullable=False), sa.Column('config', JSONB(), nullable=True), sa.ForeignKeyConstraint(['author_id'], ['userprofile.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug'), sa.UniqueConstraint('title') ) op.create_table('project', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=100), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('slug', sa.String(length=100), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('author_id', sa.Integer(), nullable=False), sa.Column('status', sa.String(length=15), nullable=False), sa.Column('theme_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['author_id'], ['userprofile.id'], ), sa.ForeignKeyConstraint(['theme_id'], ['theme.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug') ) op.create_table('field', sa.Column('id', sa.Integer(), nullable=False), sa.Column('label', sa.String(length=100), nullable=True), sa.Column('type', sa.String(length=50), nullable=True), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('parent_id', sa.Integer(), nullable=True), sa.Column('author_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['parent_id'], ['field.id'], ), sa.ForeignKeyConstraint(['project_id'], ['project.id'], ), sa.ForeignKeyConstraint(['author_id'], ['userprofile.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('audio', sa.Column('id', sa.Integer(), nullable=False), sa.Column('file_id', sa.Integer(), nullable=False), sa.Column('duration', sa.Integer(), nullable=True), sa.Column('audio_format', sa.String(length=50), nullable=True), sa.ForeignKeyConstraint(['file_id'], ['file.id'], ), sa.ForeignKeyConstraint(['id'], ['field.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('image', sa.Column('id', sa.Integer(), nullable=False), sa.Column('file_id', sa.Integer(), nullable=False), sa.Column('width', sa.Integer(), nullable=True), sa.Column('height', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['file_id'], ['file.id'], ), sa.ForeignKeyConstraint(['id'], ['field.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('number', sa.Column('id', sa.Integer(), nullable=False), sa.Column('value', sa.Numeric(), nullable=True), sa.ForeignKeyConstraint(['id'], ['field.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('photosphere', sa.Column('id', sa.Integer(), nullable=False), sa.Column('file_id', sa.Integer(), nullable=False), sa.Column('width', sa.Integer(), nullable=True), sa.Column('height', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['file_id'], ['file.id'], ), sa.ForeignKeyConstraint(['id'], ['field.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('position', sa.Column('id', sa.Integer(), nullable=False), sa.Column('x', sa.Numeric(), nullable=True), sa.Column('y', sa.Numeric(), nullable=True), sa.Column('z', sa.Numeric(), nullable=True), sa.Column('w', sa.Numeric(), nullable=True), sa.ForeignKeyConstraint(['id'], ['field.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('text', sa.Column('id', sa.Integer(), nullable=False), sa.Column('value', sa.String(length=200), nullable=True), sa.ForeignKeyConstraint(['id'], ['field.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('video', sa.Column('id', sa.Integer(), nullable=False), sa.Column('file_id', sa.Integer(), nullable=False), sa.Column('duration', sa.Integer(), nullable=True), sa.Column('width', sa.Integer(), nullable=True), sa.Column('height', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['file_id'], ['file.id'], ), sa.ForeignKeyConstraint(['id'], ['field.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('videosphere', sa.Column('id', sa.Integer(), nullable=False), sa.Column('file_id', sa.Integer(), nullable=False), sa.Column('duration', sa.Integer(), nullable=True), sa.Column('width', sa.Integer(), nullable=True), sa.Column('height', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['file_id'], ['file.id'], ), sa.ForeignKeyConstraint(['id'], ['field.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('tags', sa.Column('id', sa.Integer(), nullable=False), sa.Column('tagname', sa.String(length=100), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('favoritor_assoc', sa.Column('favoriter', sa.Integer(), nullable=True), sa.Column('favorited_project', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['favorited_project'], ['project.id'], ), sa.ForeignKeyConstraint(['favoriter'], ['userprofile.id'], ) ) op.create_table('tag_assoc', sa.Column('tag', sa.Integer(), nullable=True), sa.Column('project', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['project'], ['project.id'], ), sa.ForeignKeyConstraint(['tag'], ['tags.id'], ) ) op.drop_table('projects') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('projects', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('project_name', sa.VARCHAR(length=80), nullable=False), sa.Column('created_by', sa.INTEGER(), nullable=False), sa.Column('parent_id', sa.INTEGER(), nullable=True), sa.ForeignKeyConstraint(['created_by'], ['userprofile.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('project_name') ) op.add_column('tag_assoc', sa.Column('article', sa.INTEGER(), nullable=True)) op.drop_table('videosphere') op.drop_table('video') op.drop_table('text') op.drop_table('position') op.drop_table('photosphere') op.drop_table('number') op.drop_table('image') op.drop_table('audio') op.drop_table('field') op.drop_table('theme') op.drop_table('file') op.drop_table('tags') op.drop_table('tag_assoc') op.drop_table('favoritor_assoc') op.drop_table('project') # ### end Alembic commands ###
{ "content_hash": "a98a78feed0442e5dbc37e5a1b0a1de0", "timestamp": "", "source": "github", "line_count": 190, "max_line_length": 81, "avg_line_length": 41.136842105263156, "alnum_prop": 0.6443193449334698, "repo_name": "viewportvr/daysinvr", "id": "fbcb129fcbad8bc2ced74fcffd6596e2409f9dba", "size": "7816", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "backend/migrations/versions/37f460a1f4d2_.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "64842" }, { "name": "JavaScript", "bytes": "408410" } ], "symlink_target": "" }
import json import os import sys import traitlets from . import sklearn from . import generate from . import catboost from . import lightgbm from . import xgboost def lmap(f, values): return list(map(f, values)) def lmapstar(f, values): return [f(*k) for k in values] def to_trait(name, trait): return dict( name=name, has_default=trait.default_value is traitlets.Undefined, default=None if trait.default_value is traitlets.Undefined else trait.default_value, type=str(type(trait).__name__), help=trait.help, ) def to_cls(cls): return dict( classname=cls.__name__, snake_name=cls.__dict__.get( "snake_name", generate.camel_to_underscore(cls.__name__) ), version=cls.__dict__.get("_version", "1.0.0"), module=cls.__module__, traits=lmapstar(to_trait, cls.class_traits().items()), doc=cls.__doc__ ) def main(args=sys.argv): spec = lmap(to_cls, generate.registry) json_data = json.dumps(spec, indent=4, sort_keys=True) path = os.path.join(os.path.dirname(__file__), "spec.json") if len(sys.argv) > 1: path = sys.argv[1] with open(path, "w") as f: f.write(json_data) if __name__ == "__main__": main()
{ "content_hash": "029ed6784c2a54d3d5cc927c8e3d28bd", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 68, "avg_line_length": 22.396551724137932, "alnum_prop": 0.5950731331793687, "repo_name": "maartenbreddels/vaex", "id": "7cd5fa595f4daa473274be89655e941f94e9b0b1", "size": "1299", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/vaex-ml/vaex/ml/spec.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1888" }, { "name": "C++", "bytes": "81166" }, { "name": "CSS", "bytes": "6604" }, { "name": "GLSL", "bytes": "6204" }, { "name": "HTML", "bytes": "177613" }, { "name": "JavaScript", "bytes": "1489136" }, { "name": "Makefile", "bytes": "432" }, { "name": "PHP", "bytes": "33807" }, { "name": "Python", "bytes": "1893232" }, { "name": "Shell", "bytes": "4639" } ], "symlink_target": "" }
"""Adapted from https://stackoverflow.com/a/21912744/812183""" from __future__ import absolute_import from __future__ import unicode_literals import sys from collections import OrderedDict import yaml MAP_TYPE = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG def load_map(loader, node): loader.flatten_mapping(node) return OrderedDict(loader.construct_pairs(node)) class OrderedLoader(getattr(yaml, 'CSafeLoader', yaml.SafeLoader)): pass OrderedLoader.add_constructor(MAP_TYPE, load_map) def dump_map(dumper, data): return dumper.represent_mapping(MAP_TYPE, data.items()) class OrderedDumper(getattr(yaml, 'CSafeDumper', yaml.SafeDumper)): pass OrderedDumper.add_representer(OrderedDict, dump_map) # in python3.6+, dicts have order by default if sys.version_info >= (3, 6): # pragma: no cover (py36+) OrderedDumper.add_representer(dict, dump_map) def ordered_load(stream): """yaml.load which respects order for dictionaries in the yaml file. :param stream: string or streamlike object. """ return yaml.load(stream, Loader=OrderedLoader) def ordered_dump(obj, **kwargs): """yaml.dump which respects order for dictionaries in the yaml object. :param obj: Yaml dumpable object """ return yaml.dump(obj, Dumper=OrderedDumper, **kwargs)
{ "content_hash": "64fbfd5738759ed3c34e82792e9bb6c0", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 74, "avg_line_length": 24.735849056603772, "alnum_prop": 0.7315026697177727, "repo_name": "asottile/aspy.yaml", "id": "b97a815098cba00d23a7549244c33990ca5bcc65", "size": "1311", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aspy/yaml/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "2906" } ], "symlink_target": "" }
import unittest from pants.base.generator import TemplateData class TemplateDataTest(unittest.TestCase): def setUp(self): self.data = TemplateData(foo="bar", baz=42) def test_member_access(self): try: self.data.bip self.fail("Access to undefined template data slots should raise") except AttributeError: # expected pass def test_member_mutation(self): try: self.data.baz = 1 / 137 self.fail("Mutation of a template data's slots should not be allowed") except AttributeError: # expected pass def test_extend(self): self.assertEqual(self.data.extend(jake=0.3), TemplateData(baz=42, foo="bar", jake=0.3)) def test_equals(self): self.assertEqual(self.data, TemplateData(baz=42).extend(foo="bar"))
{ "content_hash": "72ab09c5018ee5147e41f9822e61e1ac", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 95, "avg_line_length": 29.066666666666666, "alnum_prop": 0.6146788990825688, "repo_name": "tdyas/pants", "id": "dc52d2dbe95ca603efdc3040bf2ff4b6e7647792", "size": "1004", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/python/pants_test/base/test_generator.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "655" }, { "name": "C++", "bytes": "2010" }, { "name": "CSS", "bytes": "9444" }, { "name": "Dockerfile", "bytes": "5596" }, { "name": "GAP", "bytes": "1283" }, { "name": "Gherkin", "bytes": "919" }, { "name": "Go", "bytes": "2765" }, { "name": "HTML", "bytes": "44381" }, { "name": "Java", "bytes": "518180" }, { "name": "JavaScript", "bytes": "22906" }, { "name": "Python", "bytes": "7955590" }, { "name": "Rust", "bytes": "1031208" }, { "name": "Scala", "bytes": "106520" }, { "name": "Shell", "bytes": "109904" }, { "name": "Starlark", "bytes": "502255" }, { "name": "Thrift", "bytes": "2953" } ], "symlink_target": "" }
import sys, os, inspect from abc import ABCMeta, abstractmethod from ctypes import * import math MY_DIR = os.path.dirname(os.path.abspath(inspect.getframeinfo(inspect.currentframe())[0])) HELPER_DIR = os.path.abspath(os.path.join(MY_DIR, '..', 'helpers')) sys.path.append(HELPER_DIR) import dhlog from dhcore import * from dhapp import * class _API: is_init = False @staticmethod def init(debug = False): if _API.is_init: return postfix = '' if debug: postfix = '-dbg' if sys.platform == 'win32': shlib = 'dheng' + postfix + '.dll' elif sys.platform == 'linux': shlib = 'libdheng' + postfix + '.so' # load library try: dhenglib = cdll.LoadLibrary(shlib) except: dhlog.Log.warn(str(sys.exc_info()[1])) dhlog.Log.fatal('could not load dynamic library %s' % shlib) sys.exit(-1) dhlog.Log.msgline('module "%s" loaded' % shlib, dhlog.TERM_GREEN) # engine.h _API.eng_init = dhenglib.eng_init _API.eng_init.restype = c_int _API.eng_init.argtypes = [POINTER(InitParams)] _API.eng_release = dhenglib.eng_release _API.eng_update = dhenglib.eng_update _API.eng_send_guimsgs = dhenglib.eng_send_guimsgs _API.eng_send_guimsgs.argtypes = [c_byte, c_uint] _API.eng_get_frametime = dhenglib.eng_get_frametime _API.eng_get_frametime.restype = c_float _API.eng_get_sharedir = dhenglib.eng_get_sharedir _API.eng_get_sharedir.restype = c_char_p # scene-mgr.h _API.scn_create_scene = dhenglib.scn_create_scene _API.scn_create_scene.restype = c_uint _API.scn_create_scene.argtypes = [c_char_p] _API.scn_destroy_scene = dhenglib.scn_destroy_scene _API.scn_destroy_scene.argtypes = [c_uint] _API.scn_findscene = dhenglib.scn_findscene _API.scn_findscene.restype = c_uint _API.scn_findscene.argtypes = [c_char_p] _API.scn_create_obj = dhenglib.scn_create_obj _API.scn_create_obj.restype = c_void_p _API.scn_create_obj.argtypes = [c_uint, c_char_p, c_uint] _API.scn_destroy_obj = dhenglib.scn_destroy_obj _API.scn_destroy_obj.argtypes = [c_void_p] _API.scn_findobj = dhenglib.scn_findobj _API.scn_findobj.restype = c_uint _API.scn_findobj.argtypes = [c_uint, c_char_p] _API.scn_getobj = dhenglib.scn_getobj _API.scn_getobj.restype = c_void_p _API.scn_getobj.argtypes = [c_uint, c_uint] _API.scn_clear = dhenglib.scn_clear _API.scn_clear.argtypes = [c_uint] _API.scn_setactive = dhenglib.scn_setactive _API.scn_setactive.argtypes = [c_uint] _API.scn_getactive = dhenglib.scn_getactive _API.scn_getactive.restype = c_uint _API.scn_setsize = dhenglib.scn_setsize _API.scn_setsize.argtypes = [c_uint, POINTER(Vec3), POINTER(Vec3)] _API.scn_getsize = dhenglib.scn_getsize _API.scn_getsize.argtypes = [c_uint, POINTER(Vec3), POINTER(Vec3)] # gfx.h _API.gfx_set_gridcallback = dhenglib.gfx_set_gridcallback _API.gfx_set_gridcallback.argtypes = [c_int] _API.gfx_resize = dhenglib.gfx_resize _API.gfx_resize.argtypes = [c_uint, c_uint] # cmp-mgr.h _API.cmp_findtype = dhenglib.cmp_findtype _API.cmp_findtype.restype = c_void_p _API.cmp_findtype.argtypes = [c_ushort] _API.cmp_getname = dhenglib.cmp_getname _API.cmp_getname.restype = c_char_p _API.cmp_getname.argtypes = [c_void_p] _API.cmp_create_instance = dhenglib.cmp_create_instance _API.cmp_create_instance.restype = c_ulonglong _API.cmp_create_instance.argtypes = [c_void_p, c_void_p, c_uint, c_ulonglong, c_uint] _API.cmp_destroy_instance = dhenglib.cmp_destroy_instance _API.cmp_destroy_instance.argtypes = [c_ulonglong] _API.cmp_findinstance_bytype_inobj = dhenglib.cmp_findinstance_bytype_inobj _API.cmp_findinstance_bytype_inobj.restype = c_ulonglong _API.cmp_findinstance_bytype_inobj.argtypes = [c_void_p, c_ushort] _API.cmp_debug_add = dhenglib.cmp_debug_add _API.cmp_debug_add.argtypes = [c_ulonglong] _API.cmp_debug_remove = dhenglib.cmp_debug_remove _API.cmp_debug_remove.argtypes = [c_ulonglong] _API.cmp_value_set4f = dhenglib.cmp_value_set4f _API.cmp_value_set4f.restype = c_int _API.cmp_value_set4f.argtypes = [c_ulonglong, c_char_p, POINTER(c_float)] _API.cmp_value_get4f = dhenglib.cmp_value_get4f _API.cmp_value_get4f.restype = c_int _API.cmp_value_get4f.argtypes = [POINTER(c_float), c_ulonglong, c_char_p] _API.cmp_value_setf = dhenglib.cmp_value_setf _API.cmp_value_setf.restype = c_int _API.cmp_value_setf.argtypes = [c_ulonglong, c_char_p, c_float] _API.cmp_value_getf = dhenglib.cmp_value_getf _API.cmp_value_getf.restype = c_int _API.cmp_value_getf.argtypes = [POINTER(c_float), c_ulonglong, c_char_p] _API.cmp_value_setb = dhenglib.cmp_value_setb _API.cmp_value_setb.restype = c_int _API.cmp_value_setb.argtypes = [c_ulonglong, c_char_p, c_int] _API.cmp_value_getb = dhenglib.cmp_value_getb _API.cmp_value_getb.restype = c_int _API.cmp_value_getb.argtypes = [POINTER(c_int), c_ulonglong, c_char_p] _API.cmp_value_setui = dhenglib.cmp_value_setui _API.cmp_value_setui.restype = c_int _API.cmp_value_setui.argtypes = [c_ulonglong, c_char_p, c_uint] _API.cmp_value_getui = dhenglib.cmp_value_getui _API.cmp_value_getui.restype = c_int _API.cmp_value_getui.argtypes = [POINTER(c_uint), c_ulonglong, c_char_p] _API.cmp_value_set3f = dhenglib.cmp_value_set3f _API.cmp_value_set3f.restype = c_int _API.cmp_value_set3f.argtypes = [c_ulonglong, c_char_p, POINTER(c_float)] _API.cmp_value_get3f = dhenglib.cmp_value_get3f _API.cmp_value_get3f.restype = c_int _API.cmp_value_get3f.argtypes = [POINTER(c_float), c_ulonglong, c_char_p] _API.cmp_value_set2f = dhenglib.cmp_value_set2f _API.cmp_value_set2f.restype = c_int _API.cmp_value_set2f.argtypes = [c_ulonglong, c_char_p, POINTER(c_float)] _API.cmp_value_get2f = dhenglib.cmp_value_get2f _API.cmp_value_get2f.restype = c_int _API.cmp_value_get2f.argtypes = [POINTER(c_float), c_ulonglong, c_char_p] _API.cmp_value_sets = dhenglib.cmp_value_sets _API.cmp_value_sets.restype = c_int _API.cmp_value_sets.argtypes = [c_ulonglong, c_char_p, c_char_p] _API.cmp_value_gets = dhenglib.cmp_value_gets _API.cmp_value_gets.restype = c_int _API.cmp_value_gets.argtypes = [c_char_p, c_uint, c_ulonglong, c_char_p] # cmp-xform.h _API.cmp_xform_setpos = dhenglib.cmp_xform_setpos _API.cmp_xform_setpos.argtypes = [c_void_p, POINTER(Vec3)] _API.cmp_xform_setrot_quat = dhenglib.cmp_xform_setrot_quat _API.cmp_xform_setrot_quat.argtypes = [c_void_p, POINTER(Quat)] _API.cmp_xform_getpos = dhenglib.cmp_xform_getpos _API.cmp_xform_getpos.restype = POINTER(Vec3) _API.cmp_xform_getpos.argtypes = [c_void_p, POINTER(Vec3)] _API.cmp_xform_getrot = dhenglib.cmp_xform_getrot _API.cmp_xform_getrot.restype = POINTER(Quat) _API.cmp_xform_getrot.argtypes = [c_void_p, POINTER(Quat)] # cmp-anim.h _API.cmp_anim_getclipname = dhenglib.cmp_anim_getclipname _API.cmp_anim_getclipname.restype = c_char_p _API.cmp_anim_getclipname.argtypes = [c_ulonglong, c_uint] _API.cmp_anim_isplaying = dhenglib.cmp_anim_isplaying _API.cmp_anim_isplaying.restype = c_uint _API.cmp_anim_isplaying.argtypes = [c_ulonglong] _API.cmp_anim_getclipcnt = dhenglib.cmp_anim_getclipcnt _API.cmp_anim_getclipcnt.restype = c_uint _API.cmp_anim_getclipcnt.argtypes = [c_ulonglong] _API.cmp_anim_getframecnt = dhenglib.cmp_anim_getframecnt _API.cmp_anim_getframecnt.restype = c_uint _API.cmp_anim_getframecnt.argtypes = [c_ulonglong] _API.cmp_anim_getfps = dhenglib.cmp_anim_getfps _API.cmp_anim_getfps.restype = c_uint _API.cmp_anim_getfps.argtypes = [c_ulonglong] _API.cmp_anim_getcurframe = dhenglib.cmp_anim_getcurframe _API.cmp_anim_getcurframe.restype = c_uint _API.cmp_anim_getcurframe.argtypes = [c_ulonglong] _API.cmp_anim_getbonecnt = dhenglib.cmp_anim_getbonecnt _API.cmp_anim_getbonecnt.restype = c_uint _API.cmp_anim_getbonecnt.argtypes = [c_ulonglong] _API.cmp_anim_getbonename = dhenglib.cmp_anim_getbonename _API.cmp_anim_getbonename.restype = c_char_p _API.cmp_anim_getbonename.argtypes = [c_ulonglong, c_uint] # cmp-animchar.h _API.cmp_animchar_getparamtype = dhenglib.cmp_animchar_getparamtype _API.cmp_animchar_getparamtype.restype = c_uint _API.cmp_animchar_getparamtype.argtypes = [c_ulonglong, c_char_p] _API.cmp_animchar_getparamb = dhenglib.cmp_animchar_getparamb _API.cmp_animchar_getparamb.restype = c_int _API.cmp_animchar_getparamb.argtypes = [c_ulonglong, c_char_p] _API.cmp_animchar_getparami = dhenglib.cmp_animchar_getparami _API.cmp_animchar_getparami.restype = c_int _API.cmp_animchar_getparami.argtypes = [c_ulonglong, c_char_p] _API.cmp_animchar_getparamf = dhenglib.cmp_animchar_getparamf _API.cmp_animchar_getparamf.restype = c_float _API.cmp_animchar_getparamf.argtypes = [c_ulonglong, c_char_p] _API.cmp_animchar_setparamb = dhenglib.cmp_animchar_setparamb _API.cmp_animchar_setparamb.argtypes = [c_ulonglong, c_char_p, c_int] _API.cmp_animchar_setparami = dhenglib.cmp_animchar_setparami _API.cmp_animchar_setparami.argtypes = [c_ulonglong, c_char_p, c_int] _API.cmp_animchar_setparamf = dhenglib.cmp_animchar_setparamf _API.cmp_animchar_setparamf.argtypes = [c_ulonglong, c_char_p, c_float] # world-mgr.h _API.wld_set_var = dhenglib.wld_set_var _API.wld_set_var.argtypes = [c_uint, c_uint, POINTER(Variant)] _API.wld_get_var = dhenglib.wld_get_var _API.wld_get_var.restype = POINTER(Variant) _API.wld_get_var.argtypes = [c_uint, c_uint] _API.wld_find_var = dhenglib.wld_find_var _API.wld_find_var.restype = c_uint _API.wld_find_var.argtypes = [c_uint, c_char_p] _API.wld_find_section = dhenglib.wld_find_section _API.wld_find_section.restype = c_uint _API.wld_find_section.argtypes = [c_char_p] _API.is_init = True class Engine: is_init = False __active_scene = None @staticmethod def send_keys(ch, vkey): _API.eng_send_guimsgs(ch, vkey) @staticmethod def update(): if Engine.is_init: ft = _API.eng_get_frametime() Input.update(ft) World.update_objects(ft) if Engine.__active_scene != None: Engine.__active_scene.update_objects(ft) _API.eng_update() @staticmethod def init(conf): r = _API.eng_init(conf.params) if IS_FAIL(r): raise Exception(Errors.last_error()) _API.gfx_set_gridcallback(c_int(True)) # register components Component.register('transform', 0x7887, Transform) Component.register('camera', 0x8b72, Camera) Component.register('bounds', 0x8bbd, Bounds) Component.register('model', 0x4e9b, Model) Component.register('animation', 0x068b, Animation) Component.register('animator', 0x99e4, Animator) Component.register('rigidbody', 0xbc2d, RigidBody) Component.register('light', 0x4e0e, Light) Engine.is_init = True @staticmethod def release(): _API.eng_release() Engine.is_init = False @staticmethod def set_active_scene(scene, caller_scene=False): if not caller_scene: scene.activate() else: Engine.__active_scene = scene @staticmethod def get_share_dir(): return _API.eng_get_sharedir().decode() @staticmethod def resize_view(width, height): if Engine.is_init: _API.gfx_resize(c_uint(width), c_uint(height)) class Component: __cmps = dict() def __init__(self, name, cmp_type, owner_obj): self._name = name self._type = cmp_type self._owner_obj = owner_obj c = _API.cmp_findtype(c_ushort(cmp_type)) if c == None: raise Exception('specified component "%s" does not exist' % name) self._cmp = _API.cmp_findinstance_bytype_inobj(owner_obj.objptr, c_ushort(cmp_type)) if self._cmp == INVALID_HANDLE: self._cmp = _API.cmp_create_instance(c, owner_obj.objptr, c_uint(0), c_ulonglong(INVALID_HANDLE), c_uint(0)) if self._cmp == INVALID_HANDLE: raise Exception('could not create component "%s"' % name) def __get_internalname(self): c = _API.cmp_findtype(c_ushort(self.type)) if c != None: return _API.cmp_getname(c) internal_name = property(__get_internalname) def __get_name(self): return self._name name = property(__get_name) def __get_internaltype(self): return self._type internal_type = property(__get_internaltype) def __get_ownerobj(self): return self._owner_obj owner_obj = property(__get_ownerobj) def destroy(self): if self._cmp != INVALID_HANDLE: _API.cmp_destroy_instance(self._cmp) self_cmp = INVALID_HANDLE def debug(self, dbg = True): if dbg: _API.cmp_debug_add(self._cmp) else: _API.cmp_debug_remove(self._cmp) @staticmethod def register(name, cmp_type, cls_type): Component.__cmps[name] = (cmp_type, cls_type) @staticmethod def create(name, owner_obj): if name in Component.__cmps: citem = Component.__cmps[name] return citem[1](name, citem[0], owner_obj) else: raise Exception('component by name "%s" is not registered' % name) class Transform(Component): def __init__(self, name, cmp_type, owner_obj): super().__init__(name, cmp_type, owner_obj) def __set_position(self, pos): _API.cmp_xform_setpos(self._owner_obj.objptr, byref(pos)) def __get_position(self): pos = Vec3() _API.cmp_xform_getpos(self._owner_obj.objptr, byref(pos)) return pos position = property(__get_position, __set_position) def __set_rotation(self, quat): _API.cmp_xform_setrot_quat(self._owner_obj.objptr, byref(quat)) def __get_rotation(self): quat = Quat() _API.cmp_xform_getrot(self._owner_obj.objptr, byref(quat)) return quat rotation = property(__get_rotation, __set_rotation) class Bounds(Component): def __init__(self, name, cmp_type, owner_obj): super().__init__(name, cmp_type, owner_obj) def __set_sphere(self, s): sft = c_float*4 sf = sft() sf[0].value = s.x sf[1].value = s.y sf[2].value = s.z sf[3].value = s.w _API.cmp_value_set4f(self._cmp, to_cstr('sphere'), sf) def __get_sphere(self): sf = c_float*4 _API.cmp_value_get4f(sf, self._cmp, to_cstr('sphere')) return Vec4(sf[0].value, sf[1].value, sf[2].value, sf[3].value) sphere = property(__get_sphere, __set_sphere) class Camera(Component): def __init__(self, name, cmp_type, owner_obj): super().__init__(name, cmp_type, owner_obj) def __get_fov(self): f = c_float() _API.cmp_value_getf(byref(f), self._cmp, to_cstr('fov')) return f.value def __set_fov(self, fov): _API.cmp_value_setf(self._cmp, to_cstr('fov'), c_float(fov)) fov = property(__get_fov, __set_fov) def __get_nearclip(self): f = c_float() _API.cmp_value_getf(byref(f), self._cmp, to_cstr('near_distance')) return f.value def __set_nearclip(self, d): _API.cmp_value_setf(self._cmp, to_cstr('near_distance'), c_float(fov)) near_clip = property(__get_nearclip, __set_nearclip) def __get_farclip(self): f = c_float() _API.cmp_value_getf(byref(f), self._cmp, to_cstr('far_distance')) return f.value def __set_farclip(self, d): _API.cmp_value_setf(self._cmp, to_cstr('far_distance'), c_float(fov)) far_clip = property(__get_farclip, __set_farclip) def __get_maxpitch(self): f = c_float() _API.cmp_value_getf(byref(f), self._cmp, to_cstr('max_pitch')) return f.value def __set_maxpitch(self, pitch): _API.cmp_value_setf(self._cmp, to_cstr('max_pitch'), c_float(fov)) max_pitch = property(__get_maxpitch, __set_maxpitch) def __get_minpitch(self): f = c_float() _API.cmp_value_getf(byref(f), self._cmp, to_cstr('min_pitch')) return f.value def __set_minpitch(self, pitch): _API.cmp_value_setf(self._cmp, to_cstr('min_pitch'), c_float(fov)) min_pitch = property(__get_minpitch, __set_minpitch) def __get_active(self): b = c_int(0) _API.cmp_value_getb(byref(b), self._cmp, to_cstr('active')) return bool(b.value) def __set_active(self, value): _API.cmp_value_setb(self._cmp, to_cstr('active'), c_int(value)) active = property(__get_active, __set_active) class Model(Component): def __init__(self, name, cmp_type, owner_obj): super().__init__(name, cmp_type, owner_obj) def __get_filepath(self): s = create_string_buffer(128) _API.cmp_value_gets(s, c_uint(128), self._cmp, to_cstr('filepath')) return s.value.decode() def __set_filepath(self, fpath): r = _API.cmp_value_sets(self._cmp, to_cstr('filepath'), to_cstr(fpath)) if IS_FAIL(r): raise Exception(Errors.last_error()) filepath = property(__get_filepath, __set_filepath) def __get_excludeshadows(self): b = c_int(0) _API.cmp_value_getb(byref(b), self._cmp, to_cstr('exclude_shadows')) return bool(b.value) def __set_excludeshadows(self, excl): _API.cmp_value_setb(self._cmp, to_cstr('exclude_shadows'), c_uint(excl)) exclude_shadows = property(__get_excludeshadows, __set_excludeshadows) class Animation(Component): def __init__(self, name, cmp_type, owner_obj): super().__init__(name, cmp_type, owner_obj) def __get_filepath(self): s = create_string_buffer(128) _API.cmp_value_gets(s, c_uint(128), self._cmp, to_cstr('filepath')) return s.value.decode() def __set_filepath(self, fpath): r = _API.cmp_value_sets(self._cmp, to_cstr('filepath'), to_cstr(fpath)) if IS_FAIL(r): raise Exception(Errors.last_error()) filepath = property(__get_filepath, __set_filepath) def __get_playrate(self): f = c_float() _API.cmp_value_getf(byref(f), self._cmp, to_cstr('play_rate')) return f.value def __set_playrate(self, rate): _API.cmp_value_setf(self._cmp, to_cstr('play_rate'), c_float(rate)) play_rate = property(__get_playrate, __set_playrate) def __get_clipname(self): s = create_string_buffer(128) _API.cmp_value_gets(byref(s), self._cmp, to_cstr('clip_name')) return s.value.decode() def __set_clipname(self, clip_name): _API.cmp_value_sets(self._cmp, to_cstr('clip_name'), to_cstr(clip_name)) clip_name = property(__get_clipname, __set_clipname) def __get_frame(self): return _API.cmp_anim_getcurframe(self._cmp) def __set_frame(self, value): _API.cmp_value_setui(self._cmp, to_cstr('frame_idx'), c_uint(value)) frame = property(__get_frame, __set_frame) def __get_isplaying(self): return bool(_API.cmp_anim_isplaying(self._cmp)) is_playing = property(__get_isplaying) def __get_clips(self): clip_cnt = _API.cmp_anim_getbonecnt(self._cmp) clips = [] for i in range(0, clip_cnt): clips.append(_API.cmp_anim_getclipname(self._cmp, c_uint(i)).decode()) clips = property(__get_clips) def __get_bones(self): bone_cnt = _API.cmp_anim_getbonecnt(self._cmp) bones = [] for i in range(0, bone_cnt): bones.append(_API.cmp_anim_getbonename(self._cmp, c_uint(i)).decode()) bones = property(__get_bones) def __get_fps(self): return _API.cmp_anim_getfps(self._cmp) fps = property(__get_fps) def __get_framecnt(self): return _API.cmp_anim_getframecnt(self._cmp) frame_count = property(__get_framecnt) class Animator(Component): class ParamType: UNKNOWN = 0 INT = 1 FLOAT = 2 BOOLEAN = 3 def __init__(self, name, cmp_type, owner_obj): super().__init__(name, cmp_type, owner_obj) def __get_filepath(self): s = create_string_buffer(128) _API.cmp_value_gets(s, c_uint(128), self._cmp, to_cstr('filepath')) return s.value.decode() def __set_filepath(self, fpath): r = _API.cmp_value_sets(self._cmp, to_cstr('filepath'), to_cstr(fpath)) if IS_FAIL(r): raise Exception(Errors.last_error()) filepath = property(__get_filepath, __set_filepath) def get_param(self, name): cname = to_cstr(name) t = _API.cmp_animchar_getparamtype(self._cmp, cname) if t == Animator.ParamType.UNKNOWN: raise Exception('unknown parameter "%s"' % name) if t == Animator.ParamType.INT: return _API.cmp_animchar_getparami(self._cmp, cname) elif t == Animator.ParamType.FLOAT: return _API.cmp_animchar_getparamf(self._cmp, cname) elif t == Animator.ParamType.BOOLEAN: return bool(_API.cmp_animchar_getparamb(self._cmp, cname)) def set_param(self, name, value): cname = to_cstr(name) t = _API.cmp_animchar_getparamtype(self._cmp, cname) if t == Animator.ParamType.UNKNOWN: raise Exception('unknown parameter "%s"' % name) if t == Animator.ParamType.INT: _API.cmp_animchar_setparami(self._cmp, cname, c_int(value)) elif t == Animator.ParamType.FLOAT: return _API.cmp_animchar_setparamf(self._cmp, cname, c_float(value)) elif t == Animator.ParamType.BOOLEAN: return _API.cmp_animchar_setparamb(self._cmp, cname, c_int(value)) class RigidBody(Component): def __init__(self, name, cmp_type, owner_obj): super().__init__(name, cmp_type, owner_obj) def __get_filepath(self): s = create_string_buffer(128) _API.cmp_value_gets(s, c_uint(128), self._cmp, to_cstr('filepath')) return s.value.decode() def __set_filepath(self, fpath): r = _API.cmp_value_sets(self._cmp, to_cstr('filepath'), to_cstr(fpath)) if IS_FAIL(r): raise Exception(Errors.last_error()) filepath = property(__get_filepath, __set_filepath) def __get_kinematic(self): b = c_int() _API.cmp_value_getb(byref(b), self._cmp, to_cstr('kinematic')) return bool(b.value) def __set_kinematic(self, value): _API.cmp_value_setb(self._cmp, to_cstr('kinematic'), c_int(value)) kinematic = property(__get_kinematic, __set_kinematic) def __get_disablegravity(self): b = c_int() _API.cmp_value_getb(byref(b), self._cmp, to_cstr('disablegravity')) return bool(b.value) def __set_disablegravity(self, value): _API.cmp_value_setb(self._cmp, to_cstr('disablegravity'), c_uint(value)) disable_gravity = property(__get_disablegravity, __set_disablegravity) class Light(Component): class Type: POINT = 2 SPOT = 3 def __init__(self, name, cmp_type, owner_obj): super().__init__(name, cmp_type, owner_obj) def __get_type(self): n = c_uint() _API.cmp_value_getui(byref(n), self._cmp, to_cstr('type')) return n.value def __set_type(self, t): _API.cmp_value_setui(self._cmp, to_cstr('type'), c_uint(t)) type = property(__get_type, __set_type) def __get_color(self): fv = c_float*4 _API.cmp_value_get4f(fv, self._cmp, to_cstr('color')) return Color(fv[0].value, fv[1].value, fv[2].value, fv[3].value) def __set_color(self, c): fvt = c_float*4 fv = fvt() fv[0] = c.r fv[1] = c.g fv[2] = c.b fv[3] = c.a _API.cmp_value_set4f(self._cmp, to_cstr('color'), fv) color = property(__get_color, __set_color) def __get_intensity(self): f = c_float() _API.cmp_value_getf(byref(f), self._cmp, to_cstr('intensity')) return f.value def __set_intensity(self, f): _API.cmp_value_setf(self._cmp, to_cstr('intensity'), c_float(f)) intensity = property(__get_intensity, __set_intensity) def __get_attennear(self): f = c_float() _API.cmp_value_getf(byref(f), self._cmp, to_cstr('atten_near')) def __set_attennear(self, n): _API.cmp_value_setf(self._cmp, to_cstr('atten_near'), c_float(n)) atten_near = property(__get_attennear, __set_attennear) def __get_attenfar(self): f = c_float() _API.cmp_value_getf(byref(f), self._cmp, to_cstr('atten_far')) def __set_attenfar(self, n): _API.cmp_value_setf(self._cmp, to_cstr('atten_far'), c_float(n)) atten_far = property(__get_attenfar, __set_attenfar) def __get_attennarrow(self): f = c_float() _API.cmp_value_getf(byref(f), self._cmp, to_cstr('atten_narrow')) def __set_attennarrow(self, n): _API.cmp_value_setf(self._cmp, to_cstr('atten_narrow'), c_float(n)) atten_narrow = property(__get_attennarrow, __set_attennarrow) def __get_attenfar(self): f = c_float() _API.cmp_value_getf(byref(f), self._cmp, to_cstr('atten_far')) def __set_attenfar(self, n): _API.cmp_value_setf(self._cmp, to_cstr('atten_far'), c_float(n)) atten_far = property(__get_attenfar, __set_attenfar) def __get_lodscheme(self): s = create_string_buffer(128) _API.cmp_value_gets(s, c_uint(128), self._cmp, to_cstr('lod_scheme')) return s.value.decode() def __set_lodscheme(self, fpath): r = _API.cmp_value_sets(self._cmp, to_cstr('lod_scheme'), to_cstr(fpath)) if IS_FAIL(r): raise Exception(Errors.last_error()) lod_scheme = property(__get_lodscheme, __set_lodscheme) class Behavior(metaclass=ABCMeta): @abstractmethod def init(self, game_obj): pass @abstractmethod def update(self, dt): pass class OrbitCam(Behavior): def init(self, game_obj): self._obj = game_obj self._xform = game_obj.transform self.target = Vec3() self.sensivity = 0.2 self._distance = 10 self._x = 0 self._y = 0 self._lockpos = Vec2() self._leftbtn_dwn = False self._rightbtn_dwn = False return True def update(self, dt): if Input.is_mousedown(MouseKey.LEFT): mpos = Input.get_mousepos() if not self._leftbtn_dwn: self._leftbtn_dwn = True self._lockpos = mpos.copy() Input.lock_cursor() delta_pos = (mpos - self._lockpos)*self.sensivity self._x += delta_pos.x self._y += delta_pos.y self._lockpos = mpos.copy() else: self._leftbtn_dwn = False if Input.is_mousedown(MouseKey.RIGHT): mpos = Input.get_mousepos() if not self._rightbtn_dwn: self._rightbtn_dwn = True self._lockpos = mpos.copy() Input.lock_cursor() delta_pos = (mpos - self._lockpos)*self.sensivity self._distance += delta_pos.y self._lockpos = mpos.copy() else: self._rightbtn_dwn = False if (not self._rightbtn_dwn) and (not self._leftbtn_dwn): Input.unlock_cursor() q1 = Quat() q1.from_axis(Vec3(0, 1, 0), Math.to_rad(self._x)) q2 = Quat() q2.from_axis(Vec3(1, 0, 0), Math.to_rad(self._y)) q = q2*q1 self._xform.rotation = q m = Matrix3() m.rotate_quat(q) self._xform.position = Vec3(0, 0, -self._distance)*m + self.target class GameObject: class Type: MODEL = (1<<0) PARTICLE = (1<<1) LIGHT = (1<<2) DECAL = (1<<3) CAMERA = (1<<4) TRIGGER = (1<<5) ENV = (1<<6) def __init__(self, scene, obj_name, obj_type): self.__name = obj_name self.__cmps = dict() self.__behaviors = dict() self.__scene = scene if scene != None: self.__obj = _API.scn_create_obj(c_uint(scene.ID), to_cstr(obj_name), c_uint(obj_type)) else: self.__obj = _API.scn_create_obj(c_uint(INVALID_INDEX), to_cstr(obj_name), c_uint(obj_type)) if self.__obj == None: raise Exception('creating object failed') self.__create_components(obj_type) def __create_components(self, obj_type): self.add_component('transform') self.add_component('bounds') if obj_type == GameObject.Type.CAMERA: self.add_component('camera') elif obj_type == GameObject.Type.MODEL: self.add_component('model') elif obj_type == GameObject.Type.LIGHT: self.add_component('light') def destroy(self, scene_caller=False): if Engine.is_init and self.__obj != None: if scene_caller: _API.scn_destroy_obj(self.__obj) self.__obj = None elif self.__scene != None: self.__scene.destroy_object(self) elif self.__scene == None: World.destroy_object(self) def update_behaviors(self, dt): for b in self.__behaviors.values(): b.update(dt) def add_component(self, name): if self.__obj == None: raise Exception('object is NULL') self.__cmps[name] = Component.create(name, self) def add_behavior(self, behavior, name): if behavior.init(self): self.__behaviors[name] = behavior def get_behavior(self, name): try: return self.__behaviors[name] except KeyError: raise def __getattr__(self, name): if self.__obj == None: raise Exception('object is NULL') try: return self.__cmps[name] except KeyError: raise AttributeError('component "%s" does not exist in GameObject "%s"' % (name, obj_name)) def __get_name(self): if self.__obj == None: raise Exception('object is NULL') return self.__name name = property(__get_name) def __get_objptr(self): return self.__obj objptr = property(__get_objptr) def __get_scene(self): return self.__scene scene = property(__get_scene) class _WorldMeta(type): _vars = dict() def _find_var(self, section, name): fullname = str.join('.', (section, name)) if fullname in self._vars: item = self._vars[fullname] v = _API.wld_get_var(c_uint(item[0]), c_uint(item[1])) else: sec_id = _API.wld_find_section(to_cstr(section)) if sec_id == 0: raise Exception('section "%s" does not exist' % section) var_id = _API.wld_find_var(c_uint(sec_id), to_cstr(name)) if var_id == 0: raise Exception('variable "%s" does not exist' % fullname) self._vars[fullname] = (sec_id, var_id) v = _API.wld_get_var(c_uint(sec_id), c_uint(var_id)) return v.contents def _get_lightdir(self): return self._find_var('light', 'dir').get_value() def _set_lightdir(self, v): self._find_var('light', 'dir').set_value(v) light_dir = property(_get_lightdir, _set_lightdir) def _get_lightcolor(self): return self._find_var('light', 'color').get_value() def _set_lightcolor(self, c): self._find_var('light', 'color').set_value(c) light_color = property(_get_lightcolor, _set_lightcolor) def _get_lightintensity(self): return self._find_var('light', 'intensity').get_value() def _set_lightintensity(self, i): self._find_var('light', 'intensity').set_value(i) light_intensity = property(_get_lightintensity, _set_lightintensity) def _get_ambient_groundcolor(self): return self._find_var('ambient', 'ground-color').get_value() def _set_ambient_groundcolor(self, v): self._find_var('ambient', 'ground-color').set_value(v) ambient_groundcolor = property(_get_ambient_groundcolor, _set_ambient_groundcolor) def _get_ambient_skycolor(self): return self._find_var('ambient', 'sky-color').get_value() def _set_ambient_skycolor(self, v): self._find_var('ambient', 'sky-color').set_value(v) ambient_skycolor = property(_get_ambient_skycolor, _set_ambient_skycolor) def _get_ambient_skyvector(self): return self._find_var('ambient', 'sky-vector').get_value() def _set_ambient_skyvector(self, v): self._find_var('ambient', 'sky-vector').set_value(v) ambient_skyvector = property(_get_ambient_skyvector, _set_ambient_skyvector) def _get_ambient_intensity(self): return self._find_var('ambient', 'intensity').get_value() def _set_ambient_intensity(self, v): self._find_var('ambient', 'intensity').set_value(v) ambient_intensity = property(_get_ambient_intensity, _set_ambient_intensity) def _get_physics_gravity(self): return self._find_var('physics', 'gravity-vector').get_value() def _set_physics_gravity(self, v): self._find_var('physics', 'gravity-vector').set_value(v) physics_gravity = property(_get_physics_gravity, _set_physics_gravity) class World(metaclass=_WorldMeta): __objs = dict() @staticmethod def create_object(name, obj_type): try: if name in World.__objs: raise Exception('object already exists') obj = GameObject(None, name, obj_type) except: raise else: World.__objs[name] = obj return obj @staticmethod def update_objects(dt): for obj in World.__objs.values(): obj.update_behaviors(dt) @staticmethod def destroy_object(obj): if Engine.is_init: if type(obj) is GameObject: if obj.name in World.__objs: self.__objs[obj.name].destroy(scene_caller=True) del self.__objs else: raise Exception('not a valid object type') @staticmethod def clear(): _API.scn_clear(INVALID_INDEX) class Scene: __scenes = dict() def __init__(self, name=None): # create or fetche named scene if name in Scene.__scenes: raise Exception('scene already exists') self.__id = _API.scn_create_scene(to_cstr(name)) if self.__id == 0: raise Exception('could not create scene "%s"' % name) self.__objs = dict() self.__name = name Scene.__scenes[name] = self def destroy(self): if Engine.is_init and self.__id != 0: _API.scn_destroy_scene(c_uint(self.__id)) self.__id = 0 def create_object(self, name, obj_type): if self.__id == 0: raise Exception('scene is not valid') try: if name in self.__objs: raise Exception('object already exists') obj = GameObject(self, name, obj_type) except: raise else: self.__objs[name] = obj return obj def create_model(self, name): if self.__id == 0: raise Exception('scene is not valid') return self.create_object(name, GameObject.Type.MODEL) def update_objects(self, dt): for obj in self.__objs.values(): obj.update_behaviors(dt) def destroy_object(self, obj): if self.__id == 0: raise Exception('scene is not valid') if Engine.is_init: if type(obj) is GameObject: if obj.name in self.__objs: self.__objs[obj.name].destroy(scene_caller=True) del self.__objs else: raise Exception('not a valid object type') def clear(self): if self.__id == 0: raise Exception('scene is not valid') _API.scn_clear(self.__id) def activate(self): if self.__id == 0: raise Exception('scene is not valid') _API.scn_setactive(self.__id) Engine.set_active_scene(self, caller_scene=True) def __get_active(self): if self.__id == 0: raise Exception('scene is not valid') return _API.scn_getactive(self.__id) == self.__id active = property(__get_active) def __get_id(self): if self.__id == 0: raise Exception('scene is not valid') return self.__id ID = property(__get_id) def find_object(self, name): if self.__id == 0: raise Exception('scene is not valid') return self.__objs[name] @staticmethod def find(name): return __scenes[name] _API.init(debug = ('--debug' in sys.argv))
{ "content_hash": "b5145ef60dbb2db445e10b2a6d87c986", "timestamp": "", "source": "github", "line_count": 1076, "max_line_length": 103, "avg_line_length": 35.44516728624535, "alnum_prop": 0.5872466504103411, "repo_name": "UPO33/darkhammer", "id": "e2afbd27a812f3c4fa06c36a5f178502cc72a97e", "size": "38139", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/pytools/dheng/dheng.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Batchfile", "bytes": "119" }, { "name": "C", "bytes": "1509849" }, { "name": "C++", "bytes": "682318" }, { "name": "CSS", "bytes": "91" }, { "name": "GLSL", "bytes": "77443" }, { "name": "HTML", "bytes": "183677" }, { "name": "JavaScript", "bytes": "254826" }, { "name": "Lua", "bytes": "1028" }, { "name": "Makefile", "bytes": "234" }, { "name": "Objective-C", "bytes": "9548" }, { "name": "Python", "bytes": "199409" }, { "name": "Shell", "bytes": "143" } ], "symlink_target": "" }
import os, sys import setuptools import pkg_resources from setuptools import setup, Command classifiers = ['Development Status :: 6 - Mature', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities'] + [ ('Programming Language :: Python :: %s' % x) for x in '2 2.6 2.7 3 3.2 3.3 3.4'.split()] with open('README.rst') as fd: long_description = fd.read() def get_version(): p = os.path.join(os.path.dirname( os.path.abspath(__file__)), "_pytest", "__init__.py") with open(p) as f: for line in f.readlines(): if "__version__" in line: return line.strip().split("=")[-1].strip(" '") raise ValueError("could not read version") def has_environment_marker_support(): """ Tests that setuptools has support for PEP-426 environment marker support. The first known release to support it is 0.7 (and the earliest on PyPI seems to be 0.7.2 so we're using that), see: http://pythonhosted.org/setuptools/history.html#id142 References: * https://wheel.readthedocs.org/en/latest/index.html#defining-conditional-dependencies * https://www.python.org/dev/peps/pep-0426/#environment-markers """ try: return pkg_resources.parse_version(setuptools.__version__) >= pkg_resources.parse_version('0.7.2') except Exception as exc: sys.stderr.write("Could not test setuptool's version: %s\n" % exc) return False def main(): install_requires = ['py>=1.4.29', 'pluggy>=0.3.0,<0.4.0'] extras_require = {} if has_environment_marker_support(): extras_require[':python_version=="2.6" or python_version=="3.0" or python_version=="3.1"'] = ['argparse'] extras_require[':sys_platform=="win32"'] = ['colorama'] else: if sys.version_info < (2, 7) or (3,) <= sys.version_info < (3, 2): install_requires.append('argparse') if sys.platform == 'win32': install_requires.append('colorama') setup( name='pytest', description='pytest: simple powerful testing with Python', long_description=long_description, use_scm_version={'write_to': '_pytest/__init__.py'}, url='http://pytest.org', license='MIT license', platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'], author='Holger Krekel, Benjamin Peterson, Ronny Pfannschmidt, Floris Bruynooghe and others', author_email='holger at merlinux.eu', entry_points=make_entry_points(), classifiers=classifiers, cmdclass={'test': PyTest}, # the following should be enabled for release install_requires=install_requires, extras_require=extras_require, setup_requires=['setuptools_scm'], packages=['_pytest', '_pytest.assertion'], py_modules=['pytest'], zip_safe=False, ) def cmdline_entrypoints(versioninfo, platform, basename): target = 'pytest:main' if platform.startswith('java'): points = {'py.test-jython': target} else: if basename.startswith('pypy'): points = {'py.test-%s' % basename: target} else: # cpython points = {'py.test-%s.%s' % versioninfo[:2] : target} points['py.test'] = target return points def make_entry_points(): basename = os.path.basename(sys.executable) points = cmdline_entrypoints(sys.version_info, sys.platform, basename) keys = list(points.keys()) keys.sort() l = ['%s = %s' % (x, points[x]) for x in keys] return {'console_scripts': l} class PyTest(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import subprocess PPATH = [x for x in os.environ.get('PYTHONPATH', '').split(':') if x] PPATH.insert(0, os.getcwd()) os.environ['PYTHONPATH'] = ':'.join(PPATH) errno = subprocess.call([sys.executable, 'pytest.py', '--ignore=doc']) raise SystemExit(errno) if __name__ == '__main__': main()
{ "content_hash": "4a9ac8322855aecc42cac6149c658edc", "timestamp": "", "source": "github", "line_count": 123, "max_line_length": 113, "avg_line_length": 36.10569105691057, "alnum_prop": 0.5935600090069804, "repo_name": "doordash/pytest", "id": "69b6f5e5c3c01043fbed99ab391a1b83afe5de0b", "size": "4441", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "setup.py", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "916" }, { "name": "PowerShell", "bytes": "5987" }, { "name": "Python", "bytes": "1075350" }, { "name": "Shell", "bytes": "282" } ], "symlink_target": "" }
"""Tests for datastore helper.""" from __future__ import absolute_import import errno import random import sys import unittest from builtins import map from socket import error as SocketError from mock import MagicMock # Protect against environments where apitools library is not available. # pylint: disable=wrong-import-order, wrong-import-position try: from apache_beam.testing.test_utils import patch_retry from apache_beam.io.gcp.datastore.v1 import fake_datastore from apache_beam.io.gcp.datastore.v1 import helper from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.proto.datastore.v1 import query_pb2 from google.cloud.proto.datastore.v1.entity_pb2 import Key from google.rpc import code_pb2 from googledatastore.connection import RPCError from googledatastore import helper as datastore_helper except (ImportError, TypeError): datastore_helper = None # pylint: enable=wrong-import-order, wrong-import-position @unittest.skipIf(sys.version_info[0] == 3, 'v1/helper does not support Python 3 TODO: BEAM-4543') @unittest.skipIf(datastore_helper is None, 'GCP dependencies are not installed') class HelperTest(unittest.TestCase): def setUp(self): self._mock_datastore = MagicMock() self._query = query_pb2.Query() self._query.kind.add().name = 'dummy_kind' patch_retry(self, helper) self._retriable_errors = [ RPCError("dummy", code_pb2.INTERNAL, "failed"), SocketError(errno.ECONNRESET, "Connection Reset"), SocketError(errno.ETIMEDOUT, "Timed out") ] self._non_retriable_errors = [ RPCError("dummy", code_pb2.UNAUTHENTICATED, "failed"), SocketError(errno.EADDRNOTAVAIL, "Address not available") ] def permanent_retriable_datastore_failure(self, req): raise RPCError("dummy", code_pb2.UNAVAILABLE, "failed") def transient_retriable_datastore_failure(self, req): if self._transient_fail_count: self._transient_fail_count -= 1 raise random.choice(self._retriable_errors) else: return datastore_pb2.RunQueryResponse() def non_retriable_datastore_failure(self, req): raise random.choice(self._non_retriable_errors) def test_query_iterator(self): self._mock_datastore.run_query.side_effect = ( self.permanent_retriable_datastore_failure) query_iterator = helper.QueryIterator("project", None, self._query, self._mock_datastore) self.assertRaises(RPCError, iter(query_iterator).next) self.assertEqual(6, len(self._mock_datastore.run_query.call_args_list)) def test_query_iterator_with_transient_failures(self): self._mock_datastore.run_query.side_effect = ( self.transient_retriable_datastore_failure) query_iterator = helper.QueryIterator("project", None, self._query, self._mock_datastore) fail_count = 5 self._transient_fail_count = fail_count for _ in query_iterator: pass self.assertEqual(fail_count + 1, len(self._mock_datastore.run_query.call_args_list)) def test_query_iterator_with_non_retriable_failures(self): self._mock_datastore.run_query.side_effect = ( self.non_retriable_datastore_failure) query_iterator = helper.QueryIterator("project", None, self._query, self._mock_datastore) self.assertRaises(tuple(map(type, self._non_retriable_errors)), iter(query_iterator).next) self.assertEqual(1, len(self._mock_datastore.run_query.call_args_list)) def test_query_iterator_with_single_batch(self): num_entities = 100 batch_size = 500 self.check_query_iterator(num_entities, batch_size, self._query) def test_query_iterator_with_multiple_batches(self): num_entities = 1098 batch_size = 500 self.check_query_iterator(num_entities, batch_size, self._query) def test_query_iterator_with_exact_batch_multiple(self): num_entities = 1000 batch_size = 500 self.check_query_iterator(num_entities, batch_size, self._query) def test_query_iterator_with_query_limit(self): num_entities = 1098 batch_size = 500 self._query.limit.value = 1004 self.check_query_iterator(num_entities, batch_size, self._query) def test_query_iterator_with_large_query_limit(self): num_entities = 1098 batch_size = 500 self._query.limit.value = 10000 self.check_query_iterator(num_entities, batch_size, self._query) def check_query_iterator(self, num_entities, batch_size, query): """A helper method to test the QueryIterator. Args: num_entities: number of entities contained in the fake datastore. batch_size: the number of entities returned by fake datastore in one req. query: the query to be executed """ entities = fake_datastore.create_entities(num_entities) self._mock_datastore.run_query.side_effect = \ fake_datastore.create_run_query(entities, batch_size) query_iterator = helper.QueryIterator("project", None, self._query, self._mock_datastore) i = 0 for entity in query_iterator: self.assertEqual(entity, entities[i].entity) i += 1 limit = query.limit.value if query.HasField('limit') else sys.maxsize self.assertEqual(i, min(num_entities, limit)) def test_is_key_valid(self): key = entity_pb2.Key() # Complete with name, no ancestor datastore_helper.add_key_path(key, 'kind', 'name') self.assertTrue(helper.is_key_valid(key)) key = entity_pb2.Key() # Complete with id, no ancestor datastore_helper.add_key_path(key, 'kind', 12) self.assertTrue(helper.is_key_valid(key)) key = entity_pb2.Key() # Incomplete, no ancestor datastore_helper.add_key_path(key, 'kind') self.assertFalse(helper.is_key_valid(key)) key = entity_pb2.Key() # Complete with name and ancestor datastore_helper.add_key_path(key, 'kind', 'name', 'kind2', 'name2') self.assertTrue(helper.is_key_valid(key)) key = entity_pb2.Key() # Complete with id and ancestor datastore_helper.add_key_path(key, 'kind', 'name', 'kind2', 123) self.assertTrue(helper.is_key_valid(key)) key = entity_pb2.Key() # Incomplete with ancestor datastore_helper.add_key_path(key, 'kind', 'name', 'kind2') self.assertFalse(helper.is_key_valid(key)) key = entity_pb2.Key() self.assertFalse(helper.is_key_valid(key)) def test_compare_path_with_different_kind(self): p1 = Key.PathElement() p1.kind = 'dummy1' p2 = Key.PathElement() p2.kind = 'dummy2' self.assertLess(helper.compare_path(p1, p2), 0) def test_compare_path_with_different_id(self): p1 = Key.PathElement() p1.kind = 'dummy' p1.id = 10 p2 = Key.PathElement() p2.kind = 'dummy' p2.id = 15 self.assertLess(helper.compare_path(p1, p2), 0) def test_compare_path_with_different_name(self): p1 = Key.PathElement() p1.kind = 'dummy' p1.name = "dummy1" p2 = Key.PathElement() p2.kind = 'dummy' p2.name = 'dummy2' self.assertLess(helper.compare_path(p1, p2), 0) def test_compare_path_of_different_type(self): p1 = Key.PathElement() p1.kind = 'dummy' p1.id = 10 p2 = Key.PathElement() p2.kind = 'dummy' p2.name = 'dummy' self.assertLess(helper.compare_path(p1, p2), 0) def test_key_comparator_with_different_partition(self): k1 = Key() k1.partition_id.namespace_id = 'dummy1' k2 = Key() k2.partition_id.namespace_id = 'dummy2' self.assertRaises(ValueError, helper.key_comparator, k1, k2) def test_key_comparator_with_single_path(self): k1 = Key() k2 = Key() p1 = k1.path.add() p2 = k2.path.add() p1.kind = p2.kind = 'dummy' self.assertEqual(helper.key_comparator(k1, k2), 0) def test_key_comparator_with_multiple_paths_1(self): k1 = Key() k2 = Key() p11 = k1.path.add() p12 = k1.path.add() p21 = k2.path.add() p11.kind = p12.kind = p21.kind = 'dummy' self.assertGreater(helper.key_comparator(k1, k2), 0) def test_key_comparator_with_multiple_paths_2(self): k1 = Key() k2 = Key() p11 = k1.path.add() p21 = k2.path.add() p22 = k2.path.add() p11.kind = p21.kind = p22.kind = 'dummy' self.assertLess(helper.key_comparator(k1, k2), 0) def test_key_comparator_with_multiple_paths_3(self): k1 = Key() k2 = Key() p11 = k1.path.add() p12 = k1.path.add() p21 = k2.path.add() p22 = k2.path.add() p11.kind = p12.kind = p21.kind = p22.kind = 'dummy' self.assertEqual(helper.key_comparator(k1, k2), 0) def test_key_comparator_with_multiple_paths_4(self): k1 = Key() k2 = Key() p11 = k1.path.add() p12 = k2.path.add() p21 = k2.path.add() p11.kind = p12.kind = 'dummy' # make path2 greater than path1 p21.kind = 'dummy1' self.assertLess(helper.key_comparator(k1, k2), 0) if __name__ == '__main__': unittest.main()
{ "content_hash": "301e804ff9417ca7a43aa83c55775ab1", "timestamp": "", "source": "github", "line_count": 277, "max_line_length": 80, "avg_line_length": 32.92057761732852, "alnum_prop": 0.6641079065687027, "repo_name": "markflyhigh/incubator-beam", "id": "e71255a7159e9d54206b040607a6efb0f7c9337c", "size": "9904", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "sdks/python/apache_beam/io/gcp/datastore/v1/helper_test.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "1596" }, { "name": "CSS", "bytes": "40964" }, { "name": "Dockerfile", "bytes": "22983" }, { "name": "FreeMarker", "bytes": "7428" }, { "name": "Go", "bytes": "2508482" }, { "name": "Groovy", "bytes": "300669" }, { "name": "HTML", "bytes": "54277" }, { "name": "Java", "bytes": "24796055" }, { "name": "JavaScript", "bytes": "16472" }, { "name": "Jupyter Notebook", "bytes": "54182" }, { "name": "Python", "bytes": "4544133" }, { "name": "Ruby", "bytes": "4099" }, { "name": "Shell", "bytes": "180209" } ], "symlink_target": "" }
import re,shutil,traceback from optparse import OptionParser from trac.env import Environment parser = OptionParser("""usage: [options] trac-install trac-install <- The path to your trac install.""") def p(question): return raw_input('%s\nT&E-uninstall> '%question) def cast_bool(s): return len(re.findall('(?i)(y|t|1)', s))>0 def p_bool(question): return cast_bool(raw_input('%s\nT&E-uninstall> '%question)) field_list = ['estimatedhours', 'hours', 'billable', 'totalhours', 'internal'] fields = "'estimatedhours', 'hours', 'billable', 'totalhours', 'internal'" class Script ( object ): def __init__(self): while self.find_and_remove_installs():pass (options,args) = parser.parse_args() if(len(args) == 0) : self.trac= p('Please type your trac path (or run this script passing it the path):') else: self.trac = args[0] print "Opening trac environment" self.env = Environment(self.trac) self.env.with_transaction() print "Removing T&E from trac env" self.find_and_remove_custom_vars() self.find_and_remove_ticket_change() self.find_and_remove_reports() self.remove_configuration() self.remove_system_keys() print "Done uninstalling" def execute_in_trans(self, *args): result = True c_sql =[None] c_params = [None] @self.env.with_transaction() def fn(db): try: cur = db.cursor() for sql, params in args: c_sql[0] = sql c_params[0] = params cur.execute(sql, params) except Exception, e : print 'There was a problem executing sql:%s \n \ with parameters:%s\nException:%s'%(c_sql[0], c_params[0], e); raise e return result def execute(self, sql, *params): """Executes the query on the given project""" self.execute_in_trans((sql, params)) def get_first_row(self, sql,*params): """ Returns the first row of the query results as a tuple of values (or None)""" db = self.env.get_read_db() cur = db.cursor() data = None; try: cur.execute(sql, params) data = cur.fetchone(); except Exception, e: print 'There was a problem executing sql:%s \n \ with parameters:%s\nException:%s'%(sql, params, e) return data; def find_and_remove_installs(self): try: import timingandestimationplugin path = timingandestimationplugin.__file__ install_path = re.findall( '^.*egg', path) if len(install_path)>0: install_path = install_path[0] else: print "Cant remove this install: %s" % install_path return False if p_bool('Remove: %s (y/n)'%install_path): shutil.rmtree(install_path) return true #could be true, but just run it more than once return False except ImportError: return False def find_and_remove_custom_vars(self): try: if self.get_first_row("SELECT * FROM ticket_custom WHERE name in (%s)"%fields): if p_bool("Remove custom fields (%s) (y/n)"%fields): self.execute('DELETE FROM ticket_custom WHERE name in (%s)'%fields) except Exception,e: print "Failed to remove ticket_custom",e traceback.print_exc() def find_and_remove_ticket_change(self): try: if self.get_first_row("SELECT * FROM ticket_change WHERE field in (%s)"%fields): if p_bool("Remove ticket changes (%s) (y/n)"%fields): self.execute('DELETE FROM ticket_change WHERE field in (%s)'%fields) except Exception,e: print "Failed to remove ticket_changes",e traceback.print_exc() def find_and_remove_reports(self): try: if self.get_first_row( "SELECT * FROM report WHERE id in (" "SELECT id FROM custom_report WHERE " "maingroup='Timing and Estimation Plugin')"): if p_bool("Remove T&E reports (y/n)"): self.execute("DELETE FROM report WHERE id in (" "SELECT id FROM custom_report WHERE " "maingroup='Timing and Estimation Plugin')") self.execute("DELETE FROM custom_report WHERE " "maingroup='Timing and Estimation Plugin'") except Exception,e: print "Failed to remove reports",e traceback.print_exc() def remove_configuration(self): if not p_bool('Remove T&E configuration (y/n)'): return for k,v in self.env.config.options('ticket-custom'): if any(re.search('(?i)'+f,k) for f in field_list): self.env.config.remove('ticket-custom',k) for k,v in self.env.config.options('field settings'): self.env.config.remove('field settings',k) for k,v in self.env.config.options('components'): if re.search('timingandestimationplugin',k): self.env.config.remove('components', k); for k,v in self.env.config.options('field settings'): self.env.config.remove('field settings',k) if re.search('InternalTicketsPolicy', self.env.config.get('trac','permission_policies','')): print "Please remove InternalTicketsPolicy from your trac.ini [trac] permission_policies" self.env.config.save() def remove_system_keys(self): if not p_bool('Remove T&E system keys'): return self.execute("DELETE FROM system WHERE name in " "('TimingAndEstimationPlugin_Db_Version','T&E-statuses');") if __name__ == '__main__' : Script()
{ "content_hash": "2af39bb674cf2499963aee07619e6dae", "timestamp": "", "source": "github", "line_count": 157, "max_line_length": 101, "avg_line_length": 38.36305732484077, "alnum_prop": 0.5621783164535945, "repo_name": "lexqt/EduTracTimingAndEstimation", "id": "24249e837b6b5e0e448c6b818d5ab607f0e39f56", "size": "6162", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "scripts/uninstall.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "12335" }, { "name": "Python", "bytes": "66180" }, { "name": "Shell", "bytes": "697" } ], "symlink_target": "" }
from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'User.phone' db.add_column('auth_user', 'phone', self.gf('localflavor.us.models.PhoneNumberField')(max_length=20, default='555-555-5555'), keep_default=False) def backwards(self, orm): # Deleting field 'User.phone' db.delete_column('auth_user', 'phone') models = { 'accounts.user': { 'Meta': {'object_name': 'User', 'db_table': "'auth_user'"}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '255', 'db_index': 'True', 'unique': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'to': "orm['auth.Group']", 'related_name': "'user_set'", 'symmetrical': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'phone': ('localflavor.us.models.PhoneNumberField', [], {'max_length': '20'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'to': "orm['auth.Permission']", 'related_name': "'user_set'", 'symmetrical': 'False'}) }, 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'to': "orm['auth.Permission']", 'symmetrical': 'False'}) }, 'auth.permission': { 'Meta': {'unique_together': "(('content_type', 'codename'),)", 'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'contenttypes.contenttype': { 'Meta': {'unique_together': "(('app_label', 'model'),)", 'ordering': "('name',)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) } } complete_apps = ['accounts']
{ "content_hash": "992c8a112d024d277634303db266cbe9", "timestamp": "", "source": "github", "line_count": 59, "max_line_length": 192, "avg_line_length": 62.220338983050844, "alnum_prop": 0.5625170253336965, "repo_name": "DArtagan/teetimer", "id": "5e05200aefd15a0c2ebf75e47818726ee1467e01", "size": "3695", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "accounts/migrations/0002_auto__add_field_user_phone.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "487" }, { "name": "Python", "bytes": "61670" } ], "symlink_target": "" }
import oauth2 as oauth import httplib2 import time, os, simplejson # Fill the keys and secrets you retrieved after registering your app consumer_key = '77t43u3loazvrz' consumer_secret = 'b95o3WkJj7vbNWMj' user_token = 'd77d38f5-6f3e-422a-9a6a-6fe985ae8c54' user_secret = '01027166-7044-4c73-988a-61896e12dd82' # Use your API key and secret to instantiate consumer object consumer = oauth.Consumer(consumer_key, consumer_secret) # Use the consumer object to initialize the client object client = oauth.Client(consumer) # Use your developer token and secret to instantiate access token object access_token = oauth.Token(key=user_token, secret=user_secret) client = oauth.Client(consumer, access_token) #---------examples--------# ## Make call to LinkedIn to retrieve your own profile #resp,content = client.request("https://api.linkedin.com/v1/people/~", "GET", "") ## By default, the LinkedIn API responses are in XML format. If you prefer JSON, simply specify the format in your call ## resp,content = client.request("https://api.linkedin.com/v1/people/~?format=json", "GET", "") #--------end of example---------# joburl = "https://api.linkedin.com/v1/job-search?job-title=Software+Engineer" resp,content = client.request(joburl, "GET", "") pplurl = "https://api.linkedin.com/v1/people-search?school-name=Shermer%20High%20School&current-school=false" resp,content = client.request(pplurl, "GET", "")
{ "content_hash": "eb93a961c1b827922853f009129a350a", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 119, "avg_line_length": 39.24324324324324, "alnum_prop": 0.721763085399449, "repo_name": "DistrictDataLabs/02-labormatch", "id": "a0f99426e0a0bcc975b8393439900669aaa2e5c7", "size": "1452", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "labormatch/peoplesearch.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "6718" }, { "name": "HTML", "bytes": "9049" }, { "name": "Makefile", "bytes": "6787" }, { "name": "Python", "bytes": "24861" } ], "symlink_target": "" }
from django.urls import reverse from django.utils.translation import ugettext_lazy as _ from horizon import tabs class OverviewTab(tabs.Tab): name = _("Overview") slug = "overview" template_name = "project/cg_snapshots/_detail_overview.html" def get_context_data(self, request): cg_snapshot = self.tab_group.kwargs['cg_snapshot'] return {"cg_snapshot": cg_snapshot} def get_redirect_url(self): return reverse('horizon:project:cg_snapshots:index') class CGSnapshotsDetailTabs(tabs.TabGroup): slug = "cg_snapshots_details" tabs = (OverviewTab,)
{ "content_hash": "75f689b50d8c7b8eee7db32bfdbb3e17", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 64, "avg_line_length": 27.363636363636363, "alnum_prop": 0.6976744186046512, "repo_name": "noironetworks/horizon", "id": "4c9cd997b45f9c28a172197fdac97ca2dee33d15", "size": "1175", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "openstack_dashboard/dashboards/project/cg_snapshots/tabs.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "129247" }, { "name": "HTML", "bytes": "581169" }, { "name": "JavaScript", "bytes": "2455930" }, { "name": "Python", "bytes": "5190295" }, { "name": "Shell", "bytes": "7108" } ], "symlink_target": "" }
import requests import logging import sys from apiwrapper import APIWrapper API_URL = 'https://api.sandbox.amadeus.com/v1.2' def configure_logger(log_level=logging.WARN): logger = logging.getLogger(__name__) logger.setLevel(log_level) try: sa = logging.StreamHandler(stream=sys.stdout) except TypeError: sa = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s - %(filename)s:%(lineno)d - %(levelname)s - %(message)s') sa.setFormatter(formatter) logger.addHandler(sa) return logger log = configure_logger() STRICT, GRACEFUL, IGNORE = 'strict', 'graceful', 'ignore' class Transport(APIWrapper): def __init__(self, api_key, api_url=API_URL): if not api_key: log.warning("API Key is not set.") self.api_key = '0rVN20m0UoLP9Ur9dyKg8MindOEAEtOk' self.api_url = api_url def make_request(self, service_url, method='get', headers=None, data=None, callback=None, errors=STRICT, **params): params.update({'apikey': self.api_key}) request = getattr(requests, method.lower()) r = request(service_url, headers=headers, data=data, params=params) return r.json() def get_location(self, code='BKK'): loc_key = "loc-{code}".format(code=code) path = "location/{code}".format(code=code) service_url = "{url}/{path}".format(url=self.api_url, path=path) loc_data = self.make_request(service_url) return loc_data def search_airport(self, **params): service_url = "{url}/{path}".format( url=self.api_url, path='search-airport') return self.make_request(service_url, **params) def search_circle(self, **params): service_url = "{url}/{path}".format( url=self.api_url, path='search-circle') return self.make_request(service_url, **params) def extensive_search(self, **params): service_url = "{url}/{path}".format( url=self.api_url, path='extensive-search') return self.make_request(service_url, **params) class Flights(Transport): def __init__(self, api_key, api_url=API_URL): self.api_url = "{api_url}/flights".format(api_url=api_url) self.api_key = '0rVN20m0UoLP9Ur9dyKg8MindOEAEtOk' def low_fare_search(self, **params): service_url = "{url}/{path}".format( url=self.api_url, path='low-fare-search') return self.make_request(service_url, **params) class Hotels(Transport): def __init__(self, api_key, api_url=API_URL): super(Hotels, self).__init__(api_key=api_key, api_url=api_url) self.api_url = "{api_url}/hotels".format(api_url=api_url) def search_property_code(self, **params): service_url = "{url}/{path}".format( url=self.api_url, path=params['property_code']) return self.make_request(service_url, **params) class TravelIntelligence(Transport): def __init__(self, api_key, api_url=API_URL): super(TravelIntelligence, self).__init__( api_key='0rVN20m0UoLP9Ur9dyKg8MindOEAEtOk', api_url=api_url) self.api_url = "{api_url}/travel-intelligence".format(api_url=api_url) def top_destinations(self, **params): service_url = "{url}/{path}".format( url=self.api_url, path="top-destinations") return self.make_request(service_url, **params)
{ "content_hash": "fa7c152378a50a6900680d6a8c405a69", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 115, "avg_line_length": 32.11, "alnum_prop": 0.6658361881033946, "repo_name": "jpmunic/udest", "id": "5f15c3902935ad7af6dbb86af23f9280349962ea", "size": "3211", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "amadeus.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "77582" }, { "name": "JavaScript", "bytes": "43347" }, { "name": "PHP", "bytes": "17162" }, { "name": "Python", "bytes": "62795" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models def _update(obj, model_contact_old, model_contact_new): contact_old = model_contact_old.objects.get(pk=obj.contact_id) contact_new = model_contact_new.objects.get(slug=contact_old.slug) obj.new_contact = contact_new obj.save() def transfer_to_new_contact_app(apps, schema_editor): model_ticket = apps.get_model('crm', 'Ticket') model_contact_new = apps.get_model(settings.CONTACT_MODEL) model_contact_old = apps.get_model('crm', 'Contact') pks = [obj.pk for obj in model_ticket.objects.all().order_by('pk')] for pk in pks: ticket = model_ticket.objects.get(pk=pk) _update(ticket, model_contact_old, model_contact_new) class Migration(migrations.Migration): dependencies = [ ('crm', '0005_ticket_new_contact'), ('invoice', '0007_invoice_new_contact'), migrations.swappable_dependency(settings.CONTACT_MODEL), ] operations = [ migrations.RunPython(transfer_to_new_contact_app), ]
{ "content_hash": "c929122577491e9d49598a9de574461d", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 71, "avg_line_length": 32.38235294117647, "alnum_prop": 0.6821071752951862, "repo_name": "pkimber/crm", "id": "199d59d1921e8466435fce04cc437064abd50903", "size": "1125", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "crm/migrations/0006_auto_20160125_1153.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "52" }, { "name": "HTML", "bytes": "30655" }, { "name": "Python", "bytes": "74552" }, { "name": "Shell", "bytes": "802" } ], "symlink_target": "" }
__author__ = 'bakl' from .sn_eve import PreSN from .snec import Snec from .stella import Stella # from .SneSpace import SneSpace
{ "content_hash": "12371604850225f9b705e61d457e3731", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 32, "avg_line_length": 21.666666666666668, "alnum_prop": 0.7384615384615385, "repo_name": "baklanovp/pystella", "id": "435a90c9d209c06a3c6ad158b2e5f9b4c3ec046b", "size": "130", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pystella/model/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Fortran", "bytes": "378" }, { "name": "Jupyter Notebook", "bytes": "32344" }, { "name": "Perl", "bytes": "8638492" }, { "name": "Python", "bytes": "965333" }, { "name": "ReScript", "bytes": "5700682" }, { "name": "Roff", "bytes": "19642" } ], "symlink_target": "" }
from __future__ import absolute_import from django.apps import apps from django.db import models from django.db.models import Sum from django.test import TestCase from django.utils import timezone from analytics.lib.counts import CountStat, COUNT_STATS, process_count_stat, \ zerver_count_user_by_realm, zerver_count_message_by_user, \ zerver_count_message_by_stream, zerver_count_stream_by_realm, \ do_fill_count_stat_at_hour, do_increment_logging_stat, ZerverCountQuery, \ LoggingCountStat, do_aggregate_to_summary_table, \ do_drop_all_analytics_tables from analytics.models import BaseCount, InstallationCount, RealmCount, \ UserCount, StreamCount, FillState, Anomaly, installation_epoch from zerver.lib.actions import do_create_user, do_deactivate_user, \ do_activate_user, do_reactivate_user from zerver.models import Realm, UserProfile, Message, Stream, Recipient, \ Huddle, Client, get_user_profile_by_email, get_client from datetime import datetime, timedelta from six.moves import range from typing import Any, Dict, List, Optional, Text, Tuple, Type, Union class AnalyticsTestCase(TestCase): MINUTE = timedelta(seconds = 60) HOUR = MINUTE * 60 DAY = HOUR * 24 TIME_ZERO = datetime(1988, 3, 14).replace(tzinfo=timezone.utc) TIME_LAST_HOUR = TIME_ZERO - HOUR def setUp(self): # type: () -> None self.default_realm = Realm.objects.create( string_id='realmtest', name='Realm Test', domain='test.analytics', date_created=self.TIME_ZERO - 2*self.DAY) # used to generate unique names in self.create_* self.name_counter = 100 # used as defaults in self.assertCountEquals self.current_property = None # type: Optional[str] # Lightweight creation of users, streams, and messages def create_user(self, **kwargs): # type: (**Any) -> UserProfile self.name_counter += 1 defaults = { 'email': 'user%s@domain.tld' % (self.name_counter,), 'date_joined': self.TIME_LAST_HOUR, 'full_name': 'full_name', 'short_name': 'short_name', 'pointer': -1, 'last_pointer_updater': 'seems unused?', 'realm': self.default_realm, 'api_key': '42'} for key, value in defaults.items(): kwargs[key] = kwargs.get(key, value) return UserProfile.objects.create(**kwargs) def create_stream_with_recipient(self, **kwargs): # type: (**Any) -> Tuple[Stream, Recipient] self.name_counter += 1 defaults = {'name': 'stream name %s' % (self.name_counter,), 'realm': self.default_realm, 'date_created': self.TIME_LAST_HOUR} for key, value in defaults.items(): kwargs[key] = kwargs.get(key, value) stream = Stream.objects.create(**kwargs) recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM) return stream, recipient def create_huddle_with_recipient(self, **kwargs): # type: (**Any) -> Tuple[Huddle, Recipient] self.name_counter += 1 defaults = {'huddle_hash': 'hash%s' % (self.name_counter,)} for key, value in defaults.items(): kwargs[key] = kwargs.get(key, value) huddle = Huddle.objects.create(**kwargs) recipient = Recipient.objects.create(type_id=huddle.id, type=Recipient.HUDDLE) return huddle, recipient def create_message(self, sender, recipient, **kwargs): # type: (UserProfile, Recipient, **Any) -> Message defaults = { 'sender': sender, 'recipient': recipient, 'subject': 'subject', 'content': 'hi', 'pub_date': self.TIME_LAST_HOUR, 'sending_client': get_client("website")} for key, value in defaults.items(): kwargs[key] = kwargs.get(key, value) return Message.objects.create(**kwargs) # kwargs should only ever be a UserProfile or Stream. def assertCountEquals(self, table, value, property=None, subgroup=None, end_time=TIME_ZERO, realm=None, **kwargs): # type: (Type[BaseCount], int, Optional[Text], Optional[Text], datetime, Optional[Realm], **models.Model) -> None if property is None: property = self.current_property queryset = table.objects.filter(property=property, end_time=end_time).filter(**kwargs) if table is not InstallationCount: if realm is None: realm = self.default_realm queryset = queryset.filter(realm=realm) if subgroup is not None: queryset = queryset.filter(subgroup=subgroup) self.assertEqual(queryset.values_list('value', flat=True)[0], value) def assertTableState(self, table, arg_keys, arg_values): # type: (Type[BaseCount], List[str], List[List[Union[int, str, bool, datetime, Realm, UserProfile, Stream]]]) -> None """Assert that the state of a *Count table is what it should be. Example usage: self.assertTableState(RealmCount, ['property', 'subgroup', 'realm'], [['p1', 4], ['p2', 10, self.alt_realm]]) table -- A *Count table. arg_keys -- List of columns of <table>. arg_values -- List of "rows" of <table>. Each entry of arg_values (e.g. ['p1', 4]) represents a row of <table>. The i'th value of the entry corresponds to the i'th arg_key, so e.g. the first arg_values entry here corresponds to a row of RealmCount with property='p1' and subgroup=10. Any columns not specified (in this case, every column of RealmCount other than property and subgroup) are either set to default values, or are ignored. The function checks that every entry of arg_values matches exactly one row of <table>, and that no additional rows exist. Note that this means checking a table with duplicate rows is not supported. """ defaults = { 'property': self.current_property, 'subgroup': None, 'end_time': self.TIME_ZERO} for values in arg_values: kwargs = {} # type: Dict[str, Any] for i in range(len(values)): kwargs[arg_keys[i]] = values[i] for key, value in defaults.items(): kwargs[key] = kwargs.get(key, value) if table is not InstallationCount: if 'realm' not in kwargs: if 'user' in kwargs: kwargs['realm'] = kwargs['user'].realm elif 'stream' in kwargs: kwargs['realm'] = kwargs['stream'].realm else: kwargs['realm'] = self.default_realm self.assertEqual(table.objects.filter(**kwargs).count(), 1) self.assertEqual(table.objects.count(), len(arg_values)) class TestProcessCountStat(AnalyticsTestCase): def make_dummy_count_stat(self, current_time): # type: (datetime) -> CountStat dummy_query = """INSERT INTO analytics_realmcount (realm_id, property, end_time, value) VALUES (1, 'test stat', '%(end_time)s', 22)""" % {'end_time': current_time} stat = CountStat('test stat', ZerverCountQuery(Recipient, UserCount, dummy_query), {}, None, CountStat.HOUR, False) return stat def assertFillStateEquals(self, end_time, state=FillState.DONE, property=None): # type: (datetime, int, Optional[Text]) -> None stat = self.make_dummy_count_stat(end_time) if property is None: property = stat.property fill_state = FillState.objects.filter(property=property).first() self.assertEqual(fill_state.end_time, end_time) self.assertEqual(fill_state.state, state) def test_process_stat(self): # type: () -> None # process new stat current_time = installation_epoch() + self.HOUR stat = self.make_dummy_count_stat(current_time) property = stat.property process_count_stat(stat, current_time) self.assertFillStateEquals(current_time) self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1) # dirty stat FillState.objects.filter(property=property).update(state=FillState.STARTED) process_count_stat(stat, current_time) self.assertFillStateEquals(current_time) self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1) # clean stat, no update process_count_stat(stat, current_time) self.assertFillStateEquals(current_time) self.assertEqual(InstallationCount.objects.filter(property=property).count(), 1) # clean stat, with update current_time = current_time + self.HOUR stat = self.make_dummy_count_stat(current_time) process_count_stat(stat, current_time) self.assertFillStateEquals(current_time) self.assertEqual(InstallationCount.objects.filter(property=property).count(), 2) # This tests the is_logging branch of the code in do_delete_counts_at_hour. # It is important that do_delete_counts_at_hour not delete any of the collected # logging data! def test_process_logging_stat(self): # type: () -> None end_time = self.TIME_ZERO user_stat = LoggingCountStat('user stat', UserCount, CountStat.DAY) stream_stat = LoggingCountStat('stream stat', StreamCount, CountStat.DAY) realm_stat = LoggingCountStat('realm stat', RealmCount, CountStat.DAY) user = self.create_user() stream = self.create_stream_with_recipient()[0] realm = self.default_realm UserCount.objects.create( user=user, realm=realm, property=user_stat.property, end_time=end_time, value=5) StreamCount.objects.create( stream=stream, realm=realm, property=stream_stat.property, end_time=end_time, value=5) RealmCount.objects.create( realm=realm, property=realm_stat.property, end_time=end_time, value=5) # Normal run of process_count_stat for stat in [user_stat, stream_stat, realm_stat]: process_count_stat(stat, end_time) self.assertTableState(UserCount, ['property', 'value'], [[user_stat.property, 5]]) self.assertTableState(StreamCount, ['property', 'value'], [[stream_stat.property, 5]]) self.assertTableState(RealmCount, ['property', 'value'], [[user_stat.property, 5], [stream_stat.property, 5], [realm_stat.property, 5]]) self.assertTableState(InstallationCount, ['property', 'value'], [[user_stat.property, 5], [stream_stat.property, 5], [realm_stat.property, 5]]) # Change the logged data and mark FillState as dirty UserCount.objects.update(value=6) StreamCount.objects.update(value=6) RealmCount.objects.filter(property=realm_stat.property).update(value=6) FillState.objects.update(state=FillState.STARTED) # Check that the change propagated (and the collected data wasn't deleted) for stat in [user_stat, stream_stat, realm_stat]: process_count_stat(stat, end_time) self.assertTableState(UserCount, ['property', 'value'], [[user_stat.property, 6]]) self.assertTableState(StreamCount, ['property', 'value'], [[stream_stat.property, 6]]) self.assertTableState(RealmCount, ['property', 'value'], [[user_stat.property, 6], [stream_stat.property, 6], [realm_stat.property, 6]]) self.assertTableState(InstallationCount, ['property', 'value'], [[user_stat.property, 6], [stream_stat.property, 6], [realm_stat.property, 6]]) class TestCountStats(AnalyticsTestCase): def setUp(self): # type: () -> None super(TestCountStats, self).setUp() # This tests two things for each of the queries/CountStats: Handling # more than 1 realm, and the time bounds (time_start and time_end in # the queries). self.second_realm = Realm.objects.create( string_id='second-realm', name='Second Realm', domain='second.analytics', date_created=self.TIME_ZERO-2*self.DAY) for minutes_ago in [0, 1, 61, 60*24+1]: creation_time = self.TIME_ZERO - minutes_ago*self.MINUTE user = self.create_user(email='user-%s@second.analytics' % (minutes_ago,), realm=self.second_realm, date_joined=creation_time) recipient = self.create_stream_with_recipient( name='stream %s' % (minutes_ago,), realm=self.second_realm, date_created=creation_time)[1] self.create_message(user, recipient, pub_date=creation_time) self.hourly_user = UserProfile.objects.get(email='user-1@second.analytics') self.daily_user = UserProfile.objects.get(email='user-61@second.analytics') # This realm should not show up in the *Count tables for any of the # messages_* CountStats self.no_message_realm = Realm.objects.create( string_id='no-message-realm', name='No Message Realm', domain='no.message', date_created=self.TIME_ZERO-2*self.DAY) self.create_user(realm=self.no_message_realm) self.create_stream_with_recipient(realm=self.no_message_realm) # This huddle should not show up anywhere self.create_huddle_with_recipient() def test_active_users_by_is_bot(self): # type: () -> None stat = COUNT_STATS['active_users:is_bot:day'] self.current_property = stat.property # To be included self.create_user(is_bot=True) self.create_user(is_bot=True, date_joined=self.TIME_ZERO-25*self.HOUR) self.create_user(is_bot=False) # To be excluded self.create_user(is_active=False) do_fill_count_stat_at_hour(stat, self.TIME_ZERO) self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'], [[2, 'true'], [1, 'false'], [3, 'false', self.second_realm], [1, 'false', self.no_message_realm]]) self.assertTableState(InstallationCount, ['value', 'subgroup'], [[2, 'true'], [5, 'false']]) self.assertTableState(UserCount, [], []) self.assertTableState(StreamCount, [], []) def test_messages_sent_by_is_bot(self): # type: () -> None stat = COUNT_STATS['messages_sent:is_bot:hour'] self.current_property = stat.property bot = self.create_user(is_bot=True) human1 = self.create_user() human2 = self.create_user() recipient_human1 = Recipient.objects.create(type_id=human1.id, type=Recipient.PERSONAL) recipient_stream = self.create_stream_with_recipient()[1] recipient_huddle = self.create_huddle_with_recipient()[1] self.create_message(bot, recipient_human1) self.create_message(bot, recipient_stream) self.create_message(bot, recipient_huddle) self.create_message(human1, recipient_human1) self.create_message(human2, recipient_human1) do_fill_count_stat_at_hour(stat, self.TIME_ZERO) self.assertTableState(UserCount, ['value', 'subgroup', 'user'], [[1, 'false', human1], [1, 'false', human2], [3, 'true', bot], [1, 'false', self.hourly_user]]) self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'], [[2, 'false'], [3, 'true'], [1, 'false', self.second_realm]]) self.assertTableState(InstallationCount, ['value', 'subgroup'], [[3, 'false'], [3, 'true']]) self.assertTableState(StreamCount, [], []) def test_messages_sent_by_message_type(self): # type: () -> None stat = COUNT_STATS['messages_sent:message_type:day'] self.current_property = stat.property # Nothing currently in this stat that is bot related, but so many of # the rest of our stats make the human/bot distinction that one can # imagine a later refactoring that will intentionally or # unintentionally change this. So make one of our users a bot. user1 = self.create_user(is_bot=True) user2 = self.create_user() user3 = self.create_user() # private streams recipient_stream1 = self.create_stream_with_recipient(invite_only=True)[1] recipient_stream2 = self.create_stream_with_recipient(invite_only=True)[1] self.create_message(user1, recipient_stream1) self.create_message(user2, recipient_stream1) self.create_message(user2, recipient_stream2) # public streams recipient_stream3 = self.create_stream_with_recipient()[1] recipient_stream4 = self.create_stream_with_recipient()[1] self.create_message(user1, recipient_stream3) self.create_message(user1, recipient_stream4) self.create_message(user2, recipient_stream3) # huddles recipient_huddle1 = self.create_huddle_with_recipient()[1] recipient_huddle2 = self.create_huddle_with_recipient()[1] self.create_message(user1, recipient_huddle1) self.create_message(user2, recipient_huddle2) # private messages recipient_user1 = Recipient.objects.create(type_id=user1.id, type=Recipient.PERSONAL) recipient_user2 = Recipient.objects.create(type_id=user2.id, type=Recipient.PERSONAL) recipient_user3 = Recipient.objects.create(type_id=user3.id, type=Recipient.PERSONAL) self.create_message(user1, recipient_user2) self.create_message(user2, recipient_user1) self.create_message(user3, recipient_user3) do_fill_count_stat_at_hour(stat, self.TIME_ZERO) self.assertTableState(UserCount, ['value', 'subgroup', 'user'], [[1, 'private_stream', user1], [2, 'private_stream', user2], [2, 'public_stream', user1], [1, 'public_stream', user2], [2, 'private_message', user1], [2, 'private_message', user2], [1, 'private_message', user3], [1, 'public_stream', self.hourly_user], [1, 'public_stream', self.daily_user]]) self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'], [[3, 'private_stream'], [3, 'public_stream'], [5, 'private_message'], [2, 'public_stream', self.second_realm]]) self.assertTableState(InstallationCount, ['value', 'subgroup'], [[3, 'private_stream'], [5, 'public_stream'], [5, 'private_message']]) self.assertTableState(StreamCount, [], []) def test_messages_sent_to_recipients_with_same_id(self): # type: () -> None stat = COUNT_STATS['messages_sent:message_type:day'] self.current_property = stat.property user = self.create_user(id=1000) user_recipient = Recipient.objects.create(type_id=user.id, type=Recipient.PERSONAL) stream_recipient = self.create_stream_with_recipient(id=1000)[1] huddle_recipient = self.create_huddle_with_recipient(id=1000)[1] self.create_message(user, user_recipient) self.create_message(user, stream_recipient) self.create_message(user, huddle_recipient) do_fill_count_stat_at_hour(stat, self.TIME_ZERO) self.assertCountEquals(UserCount, 2, subgroup='private_message') self.assertCountEquals(UserCount, 1, subgroup='public_stream') def test_messages_sent_by_client(self): # type: () -> None stat = COUNT_STATS['messages_sent:client:day'] self.current_property = stat.property user1 = self.create_user(is_bot=True) user2 = self.create_user() recipient_user2 = Recipient.objects.create(type_id=user2.id, type=Recipient.PERSONAL) recipient_stream = self.create_stream_with_recipient()[1] recipient_huddle = self.create_huddle_with_recipient()[1] client2 = Client.objects.create(name='client2') self.create_message(user1, recipient_user2, sending_client=client2) self.create_message(user1, recipient_stream) self.create_message(user1, recipient_huddle) self.create_message(user2, recipient_user2, sending_client=client2) self.create_message(user2, recipient_user2, sending_client=client2) do_fill_count_stat_at_hour(stat, self.TIME_ZERO) client2_id = str(client2.id) website_client_id = str(get_client('website').id) # default for self.create_message self.assertTableState(UserCount, ['value', 'subgroup', 'user'], [[2, website_client_id, user1], [1, client2_id, user1], [2, client2_id, user2], [1, website_client_id, self.hourly_user], [1, website_client_id, self.daily_user]]) self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'], [[2, website_client_id], [3, client2_id], [2, website_client_id, self.second_realm]]) self.assertTableState(InstallationCount, ['value', 'subgroup'], [[4, website_client_id], [3, client2_id]]) self.assertTableState(StreamCount, [], []) def test_messages_sent_to_stream_by_is_bot(self): # type: () -> None stat = COUNT_STATS['messages_in_stream:is_bot:day'] self.current_property = stat.property bot = self.create_user(is_bot=True) human1 = self.create_user() human2 = self.create_user() recipient_human1 = Recipient.objects.create(type_id=human1.id, type=Recipient.PERSONAL) stream1, recipient_stream1 = self.create_stream_with_recipient() stream2, recipient_stream2 = self.create_stream_with_recipient() # To be included self.create_message(human1, recipient_stream1) self.create_message(human2, recipient_stream1) self.create_message(human1, recipient_stream2) self.create_message(bot, recipient_stream2) self.create_message(bot, recipient_stream2) # To be excluded self.create_message(human2, recipient_human1) self.create_message(bot, recipient_human1) recipient_huddle = self.create_huddle_with_recipient()[1] self.create_message(human1, recipient_huddle) do_fill_count_stat_at_hour(stat, self.TIME_ZERO) self.assertTableState(StreamCount, ['value', 'subgroup', 'stream'], [[2, 'false', stream1], [1, 'false', stream2], [2, 'true', stream2], # "hourly" and "daily" stream, from TestCountStats.setUp [1, 'false', Stream.objects.get(name='stream 1')], [1, 'false', Stream.objects.get(name='stream 61')]]) self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'], [[3, 'false'], [2, 'true'], [2, 'false', self.second_realm]]) self.assertTableState(InstallationCount, ['value', 'subgroup'], [[5, 'false'], [2, 'true']]) self.assertTableState(UserCount, [], []) class TestDoAggregateToSummaryTable(AnalyticsTestCase): # do_aggregate_to_summary_table is mostly tested by the end to end # nature of the tests in TestCountStats. But want to highlight one # feature important for keeping the size of the analytics tables small, # which is that if there is no relevant data in the table being # aggregated, the aggregation table doesn't get a row with value 0. def test_no_aggregated_zeros(self): # type: () -> None stat = LoggingCountStat('test stat', UserCount, CountStat.HOUR) do_aggregate_to_summary_table(stat, self.TIME_ZERO) self.assertFalse(RealmCount.objects.exists()) self.assertFalse(InstallationCount.objects.exists()) class TestDoIncrementLoggingStat(AnalyticsTestCase): def test_table_and_id_args(self): # type: () -> None # For realms, streams, and users, tests that the new rows are going to # the appropriate *Count table, and that using a different zerver_object # results in a new row being created self.current_property = 'test' second_realm = Realm.objects.create(string_id='moo', name='moo', domain='moo') stat = LoggingCountStat('test', RealmCount, CountStat.DAY) do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO) do_increment_logging_stat(second_realm, stat, None, self.TIME_ZERO) self.assertTableState(RealmCount, ['realm'], [[self.default_realm], [second_realm]]) user1 = self.create_user() user2 = self.create_user() stat = LoggingCountStat('test', UserCount, CountStat.DAY) do_increment_logging_stat(user1, stat, None, self.TIME_ZERO) do_increment_logging_stat(user2, stat, None, self.TIME_ZERO) self.assertTableState(UserCount, ['user'], [[user1], [user2]]) stream1 = self.create_stream_with_recipient()[0] stream2 = self.create_stream_with_recipient()[0] stat = LoggingCountStat('test', StreamCount, CountStat.DAY) do_increment_logging_stat(stream1, stat, None, self.TIME_ZERO) do_increment_logging_stat(stream2, stat, None, self.TIME_ZERO) self.assertTableState(StreamCount, ['stream'], [[stream1], [stream2]]) def test_frequency(self): # type: () -> None times = [self.TIME_ZERO - self.MINUTE*i for i in [0, 1, 61, 24*60+1]] stat = LoggingCountStat('day test', RealmCount, CountStat.DAY) for time_ in times: do_increment_logging_stat(self.default_realm, stat, None, time_) stat = LoggingCountStat('hour test', RealmCount, CountStat.HOUR) for time_ in times: do_increment_logging_stat(self.default_realm, stat, None, time_) self.assertTableState(RealmCount, ['value', 'property', 'end_time'], [[3, 'day test', self.TIME_ZERO], [1, 'day test', self.TIME_ZERO - self.DAY], [2, 'hour test', self.TIME_ZERO], [1, 'hour test', self.TIME_LAST_HOUR], [1, 'hour test', self.TIME_ZERO - self.DAY]]) def test_get_or_create(self): # type: () -> None stat = LoggingCountStat('test', RealmCount, CountStat.HOUR) # All these should trigger the create part of get_or_create. # property is tested in test_frequency, and id_args are tested in test_id_args, # so this only tests a new subgroup and end_time do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_ZERO) do_increment_logging_stat(self.default_realm, stat, 'subgroup2', self.TIME_ZERO) do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_LAST_HOUR) self.current_property = 'test' self.assertTableState(RealmCount, ['value', 'subgroup', 'end_time'], [[1, 'subgroup1', self.TIME_ZERO], [1, 'subgroup2', self.TIME_ZERO], [1, 'subgroup1', self.TIME_LAST_HOUR]]) # This should trigger the get part of get_or_create do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_ZERO) self.assertTableState(RealmCount, ['value', 'subgroup', 'end_time'], [[2, 'subgroup1', self.TIME_ZERO], [1, 'subgroup2', self.TIME_ZERO], [1, 'subgroup1', self.TIME_LAST_HOUR]]) def test_increment(self): # type: () -> None stat = LoggingCountStat('test', RealmCount, CountStat.DAY) self.current_property = 'test' do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=-1) self.assertTableState(RealmCount, ['value'], [[-1]]) do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=3) self.assertTableState(RealmCount, ['value'], [[2]]) do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO) self.assertTableState(RealmCount, ['value'], [[3]]) class TestLoggingCountStats(AnalyticsTestCase): def test_aggregation(self): # type: () -> None stat = LoggingCountStat('realm test', RealmCount, CountStat.DAY) do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO) process_count_stat(stat, self.TIME_ZERO) user = self.create_user() stat = LoggingCountStat('user test', UserCount, CountStat.DAY) do_increment_logging_stat(user, stat, None, self.TIME_ZERO) process_count_stat(stat, self.TIME_ZERO) stream = self.create_stream_with_recipient()[0] stat = LoggingCountStat('stream test', StreamCount, CountStat.DAY) do_increment_logging_stat(stream, stat, None, self.TIME_ZERO) process_count_stat(stat, self.TIME_ZERO) self.assertTableState(InstallationCount, ['property', 'value'], [['realm test', 1], ['user test', 1], ['stream test', 1]]) self.assertTableState(RealmCount, ['property', 'value'], [['realm test', 1], ['user test', 1], ['stream test', 1]]) self.assertTableState(UserCount, ['property', 'value'], [['user test', 1]]) self.assertTableState(StreamCount, ['property', 'value'], [['stream test', 1]]) def test_active_users_log_by_is_bot(self): # type: () -> None property = 'active_users_log:is_bot:day' user = do_create_user('email', 'password', self.default_realm, 'full_name', 'short_name') self.assertEqual(1, RealmCount.objects.filter(property=property, subgroup=False) .aggregate(Sum('value'))['value__sum']) do_deactivate_user(user) self.assertEqual(0, RealmCount.objects.filter(property=property, subgroup=False) .aggregate(Sum('value'))['value__sum']) do_activate_user(user) self.assertEqual(1, RealmCount.objects.filter(property=property, subgroup=False) .aggregate(Sum('value'))['value__sum']) do_deactivate_user(user) self.assertEqual(0, RealmCount.objects.filter(property=property, subgroup=False) .aggregate(Sum('value'))['value__sum']) do_reactivate_user(user) self.assertEqual(1, RealmCount.objects.filter(property=property, subgroup=False) .aggregate(Sum('value'))['value__sum']) class TestDeleteStats(AnalyticsTestCase): def test_do_drop_all_analytics_tables(self): # type: () -> None user = self.create_user() stream = self.create_stream_with_recipient()[0] count_args = {'property': 'test', 'end_time': self.TIME_ZERO, 'value': 10} UserCount.objects.create(user=user, realm=user.realm, **count_args) StreamCount.objects.create(stream=stream, realm=stream.realm, **count_args) RealmCount.objects.create(realm=user.realm, **count_args) InstallationCount.objects.create(**count_args) FillState.objects.create(property='test', end_time=self.TIME_ZERO, state=FillState.DONE) Anomaly.objects.create(info='test anomaly') analytics = apps.get_app_config('analytics') for table in list(analytics.models.values()): self.assertTrue(table.objects.exists()) do_drop_all_analytics_tables() for table in list(analytics.models.values()): self.assertFalse(table.objects.exists())
{ "content_hash": "d02c4d02a5ca868243d83a7d25a82d0f", "timestamp": "", "source": "github", "line_count": 634, "max_line_length": 125, "avg_line_length": 50.93375394321767, "alnum_prop": 0.6151988108509847, "repo_name": "JPJPJPOPOP/zulip", "id": "6a85342c81c4492d9edfad656dcc6e25bd20b1d8", "size": "32292", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "analytics/tests/test_counts.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "298959" }, { "name": "Emacs Lisp", "bytes": "158" }, { "name": "GCC Machine Description", "bytes": "142" }, { "name": "Groovy", "bytes": "5509" }, { "name": "HTML", "bytes": "545905" }, { "name": "JavaScript", "bytes": "1628361" }, { "name": "Nginx", "bytes": "1280" }, { "name": "Pascal", "bytes": "1113" }, { "name": "Perl", "bytes": "401825" }, { "name": "Puppet", "bytes": "86990" }, { "name": "Python", "bytes": "3548114" }, { "name": "Ruby", "bytes": "249744" }, { "name": "Shell", "bytes": "37821" } ], "symlink_target": "" }
import xml.etree.ElementTree as ET class brocade_ras(object): """Auto generated class. """ def __init__(self, **kwargs): self._callback = kwargs.pop('callback') def logging_raslog_message_msgId_msgId(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") message = ET.SubElement(raslog, "message") msgId = ET.SubElement(message, "msgId") msgId = ET.SubElement(msgId, "msgId") msgId.text = kwargs.pop('msgId') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_raslog_message_msgId_severity(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") message = ET.SubElement(raslog, "message") msgId = ET.SubElement(message, "msgId") msgId_key = ET.SubElement(msgId, "msgId") msgId_key.text = kwargs.pop('msgId') severity = ET.SubElement(msgId, "severity") severity.text = kwargs.pop('severity') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_raslog_message_msgId_suppress(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") message = ET.SubElement(raslog, "message") msgId = ET.SubElement(message, "msgId") msgId_key = ET.SubElement(msgId, "msgId") msgId_key.text = kwargs.pop('msgId') suppress = ET.SubElement(msgId, "suppress") callback = kwargs.pop('callback', self._callback) return callback(config) def logging_raslog_message_msgId_syslog(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") message = ET.SubElement(raslog, "message") msgId = ET.SubElement(message, "msgId") msgId_key = ET.SubElement(msgId, "msgId") msgId_key.text = kwargs.pop('msgId') syslog = ET.SubElement(msgId, "syslog") callback = kwargs.pop('callback', self._callback) return callback(config) def logging_raslog_module_modId_modId(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") module = ET.SubElement(raslog, "module") modId = ET.SubElement(module, "modId") modId = ET.SubElement(modId, "modId") modId.text = kwargs.pop('modId') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_raslog_console(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") console = ET.SubElement(raslog, "console") console.text = kwargs.pop('console') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_syslog_server_syslogip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") syslog_server = ET.SubElement(logging, "syslog-server") syslogip = ET.SubElement(syslog_server, "syslogip") syslogip.text = kwargs.pop('syslogip') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_syslog_server_secure(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") syslog_server = ET.SubElement(logging, "syslog-server") syslogip_key = ET.SubElement(syslog_server, "syslogip") syslogip_key.text = kwargs.pop('syslogip') secure = ET.SubElement(syslog_server, "secure") callback = kwargs.pop('callback', self._callback) return callback(config) def logging_syslog_server_port(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") syslog_server = ET.SubElement(logging, "syslog-server") syslogip_key = ET.SubElement(syslog_server, "syslogip") syslogip_key.text = kwargs.pop('syslogip') port = ET.SubElement(syslog_server, "port") port.text = kwargs.pop('port') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_auditlog_clss_clss(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") auditlog = ET.SubElement(logging, "auditlog") clss = ET.SubElement(auditlog, "class") clss = ET.SubElement(clss, "class") clss.text = kwargs.pop('clss') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_syslog_facility_local(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") syslog_facility = ET.SubElement(logging, "syslog-facility") local = ET.SubElement(syslog_facility, "local") local.text = kwargs.pop('local') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_syslog_client_localip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") syslog_client = ET.SubElement(logging, "syslog-client") localip = ET.SubElement(syslog_client, "localip") localip.text = kwargs.pop('localip') callback = kwargs.pop('callback', self._callback) return callback(config) def system_switch_attributes_rbridge_id_rbridge_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") system = ET.SubElement(config, "system", xmlns="urn:brocade.com:mgmt:brocade-ras") switch_attributes = ET.SubElement(system, "switch-attributes") rbridge_id = ET.SubElement(switch_attributes, "rbridge-id") rbridge_id = ET.SubElement(rbridge_id, "rbridge-id") rbridge_id.text = kwargs.pop('rbridge_id') callback = kwargs.pop('callback', self._callback) return callback(config) def system_switch_attributes_rbridge_id_chassis_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") system = ET.SubElement(config, "system", xmlns="urn:brocade.com:mgmt:brocade-ras") switch_attributes = ET.SubElement(system, "switch-attributes") rbridge_id = ET.SubElement(switch_attributes, "rbridge-id") rbridge_id_key = ET.SubElement(rbridge_id, "rbridge-id") rbridge_id_key.text = kwargs.pop('rbridge_id') chassis_name = ET.SubElement(rbridge_id, "chassis-name") chassis_name.text = kwargs.pop('chassis_name') callback = kwargs.pop('callback', self._callback) return callback(config) def system_switch_attributes_rbridge_id_host_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") system = ET.SubElement(config, "system", xmlns="urn:brocade.com:mgmt:brocade-ras") switch_attributes = ET.SubElement(system, "switch-attributes") rbridge_id = ET.SubElement(switch_attributes, "rbridge-id") rbridge_id_key = ET.SubElement(rbridge_id, "rbridge-id") rbridge_id_key.text = kwargs.pop('rbridge_id') host_name = ET.SubElement(rbridge_id, "host-name") host_name.text = kwargs.pop('host_name') callback = kwargs.pop('callback', self._callback) return callback(config) def system_switch_attributes_chassis_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") system = ET.SubElement(config, "system", xmlns="urn:brocade.com:mgmt:brocade-ras") switch_attributes = ET.SubElement(system, "switch-attributes") chassis_name = ET.SubElement(switch_attributes, "chassis-name") chassis_name.text = kwargs.pop('chassis_name') callback = kwargs.pop('callback', self._callback) return callback(config) def system_switch_attributes_host_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") system = ET.SubElement(config, "system", xmlns="urn:brocade.com:mgmt:brocade-ras") switch_attributes = ET.SubElement(system, "switch-attributes") host_name = ET.SubElement(switch_attributes, "host-name") host_name.text = kwargs.pop('host_name') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_input_src(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd = ET.Element("bna_config_cmd") config = bna_config_cmd input = ET.SubElement(bna_config_cmd, "input") src = ET.SubElement(input, "src") src.text = kwargs.pop('src') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_input_dest(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd = ET.Element("bna_config_cmd") config = bna_config_cmd input = ET.SubElement(bna_config_cmd, "input") dest = ET.SubElement(input, "dest") dest.text = kwargs.pop('dest') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_output_session_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd = ET.Element("bna_config_cmd") config = bna_config_cmd output = ET.SubElement(bna_config_cmd, "output") session_id = ET.SubElement(output, "session-id") session_id.text = kwargs.pop('session_id') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_output_status(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd = ET.Element("bna_config_cmd") config = bna_config_cmd output = ET.SubElement(bna_config_cmd, "output") status = ET.SubElement(output, "status") status.text = kwargs.pop('status') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_output_status_string(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd = ET.Element("bna_config_cmd") config = bna_config_cmd output = ET.SubElement(bna_config_cmd, "output") status_string = ET.SubElement(output, "status-string") status_string.text = kwargs.pop('status_string') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_status_input_session_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd_status = ET.Element("bna_config_cmd_status") config = bna_config_cmd_status input = ET.SubElement(bna_config_cmd_status, "input") session_id = ET.SubElement(input, "session-id") session_id.text = kwargs.pop('session_id') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_status_output_status(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd_status = ET.Element("bna_config_cmd_status") config = bna_config_cmd_status output = ET.SubElement(bna_config_cmd_status, "output") status = ET.SubElement(output, "status") status.text = kwargs.pop('status') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_status_output_status_string(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd_status = ET.Element("bna_config_cmd_status") config = bna_config_cmd_status output = ET.SubElement(bna_config_cmd_status, "output") status_string = ET.SubElement(output, "status-string") status_string.text = kwargs.pop('status_string') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_param_hostip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload_param = ET.SubElement(support, "autoupload-param") hostip = ET.SubElement(autoupload_param, "hostip") hostip.text = kwargs.pop('hostip') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_param_username(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload_param = ET.SubElement(support, "autoupload-param") username = ET.SubElement(autoupload_param, "username") username.text = kwargs.pop('username') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_param_directory(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload_param = ET.SubElement(support, "autoupload-param") directory = ET.SubElement(autoupload_param, "directory") directory.text = kwargs.pop('directory') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_param_protocol(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload_param = ET.SubElement(support, "autoupload-param") protocol = ET.SubElement(autoupload_param, "protocol") protocol.text = kwargs.pop('protocol') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_param_password(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload_param = ET.SubElement(support, "autoupload-param") password = ET.SubElement(autoupload_param, "password") password.text = kwargs.pop('password') callback = kwargs.pop('callback', self._callback) return callback(config) def support_support_param_hostip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") support_param = ET.SubElement(support, "support-param") hostip = ET.SubElement(support_param, "hostip") hostip.text = kwargs.pop('hostip') callback = kwargs.pop('callback', self._callback) return callback(config) def support_support_param_username(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") support_param = ET.SubElement(support, "support-param") username = ET.SubElement(support_param, "username") username.text = kwargs.pop('username') callback = kwargs.pop('callback', self._callback) return callback(config) def support_support_param_directory(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") support_param = ET.SubElement(support, "support-param") directory = ET.SubElement(support_param, "directory") directory.text = kwargs.pop('directory') callback = kwargs.pop('callback', self._callback) return callback(config) def support_support_param_protocol(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") support_param = ET.SubElement(support, "support-param") protocol = ET.SubElement(support_param, "protocol") protocol.text = kwargs.pop('protocol') callback = kwargs.pop('callback', self._callback) return callback(config) def support_support_param_password(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") support_param = ET.SubElement(support, "support-param") password = ET.SubElement(support_param, "password") password.text = kwargs.pop('password') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_enable(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload = ET.SubElement(support, "autoupload") enable = ET.SubElement(autoupload, "enable") callback = kwargs.pop('callback', self._callback) return callback(config) def support_ffdc(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") ffdc = ET.SubElement(support, "ffdc") callback = kwargs.pop('callback', self._callback) return callback(config) def logging_raslog_message_msgId_msgId(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") message = ET.SubElement(raslog, "message") msgId = ET.SubElement(message, "msgId") msgId = ET.SubElement(msgId, "msgId") msgId.text = kwargs.pop('msgId') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_raslog_message_msgId_severity(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") message = ET.SubElement(raslog, "message") msgId = ET.SubElement(message, "msgId") msgId_key = ET.SubElement(msgId, "msgId") msgId_key.text = kwargs.pop('msgId') severity = ET.SubElement(msgId, "severity") severity.text = kwargs.pop('severity') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_raslog_message_msgId_suppress(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") message = ET.SubElement(raslog, "message") msgId = ET.SubElement(message, "msgId") msgId_key = ET.SubElement(msgId, "msgId") msgId_key.text = kwargs.pop('msgId') suppress = ET.SubElement(msgId, "suppress") callback = kwargs.pop('callback', self._callback) return callback(config) def logging_raslog_message_msgId_syslog(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") message = ET.SubElement(raslog, "message") msgId = ET.SubElement(message, "msgId") msgId_key = ET.SubElement(msgId, "msgId") msgId_key.text = kwargs.pop('msgId') syslog = ET.SubElement(msgId, "syslog") callback = kwargs.pop('callback', self._callback) return callback(config) def logging_raslog_module_modId_modId(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") module = ET.SubElement(raslog, "module") modId = ET.SubElement(module, "modId") modId = ET.SubElement(modId, "modId") modId.text = kwargs.pop('modId') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_raslog_console(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") raslog = ET.SubElement(logging, "raslog") console = ET.SubElement(raslog, "console") console.text = kwargs.pop('console') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_syslog_server_syslogip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") syslog_server = ET.SubElement(logging, "syslog-server") syslogip = ET.SubElement(syslog_server, "syslogip") syslogip.text = kwargs.pop('syslogip') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_syslog_server_secure(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") syslog_server = ET.SubElement(logging, "syslog-server") syslogip_key = ET.SubElement(syslog_server, "syslogip") syslogip_key.text = kwargs.pop('syslogip') secure = ET.SubElement(syslog_server, "secure") callback = kwargs.pop('callback', self._callback) return callback(config) def logging_syslog_server_port(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") syslog_server = ET.SubElement(logging, "syslog-server") syslogip_key = ET.SubElement(syslog_server, "syslogip") syslogip_key.text = kwargs.pop('syslogip') port = ET.SubElement(syslog_server, "port") port.text = kwargs.pop('port') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_auditlog_clss_clss(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") auditlog = ET.SubElement(logging, "auditlog") clss = ET.SubElement(auditlog, "class") clss = ET.SubElement(clss, "class") clss.text = kwargs.pop('clss') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_syslog_facility_local(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") syslog_facility = ET.SubElement(logging, "syslog-facility") local = ET.SubElement(syslog_facility, "local") local.text = kwargs.pop('local') callback = kwargs.pop('callback', self._callback) return callback(config) def logging_syslog_client_localip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") syslog_client = ET.SubElement(logging, "syslog-client") localip = ET.SubElement(syslog_client, "localip") localip.text = kwargs.pop('localip') callback = kwargs.pop('callback', self._callback) return callback(config) def system_switch_attributes_rbridge_id_rbridge_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") system = ET.SubElement(config, "system", xmlns="urn:brocade.com:mgmt:brocade-ras") switch_attributes = ET.SubElement(system, "switch-attributes") rbridge_id = ET.SubElement(switch_attributes, "rbridge-id") rbridge_id = ET.SubElement(rbridge_id, "rbridge-id") rbridge_id.text = kwargs.pop('rbridge_id') callback = kwargs.pop('callback', self._callback) return callback(config) def system_switch_attributes_rbridge_id_chassis_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") system = ET.SubElement(config, "system", xmlns="urn:brocade.com:mgmt:brocade-ras") switch_attributes = ET.SubElement(system, "switch-attributes") rbridge_id = ET.SubElement(switch_attributes, "rbridge-id") rbridge_id_key = ET.SubElement(rbridge_id, "rbridge-id") rbridge_id_key.text = kwargs.pop('rbridge_id') chassis_name = ET.SubElement(rbridge_id, "chassis-name") chassis_name.text = kwargs.pop('chassis_name') callback = kwargs.pop('callback', self._callback) return callback(config) def system_switch_attributes_rbridge_id_host_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") system = ET.SubElement(config, "system", xmlns="urn:brocade.com:mgmt:brocade-ras") switch_attributes = ET.SubElement(system, "switch-attributes") rbridge_id = ET.SubElement(switch_attributes, "rbridge-id") rbridge_id_key = ET.SubElement(rbridge_id, "rbridge-id") rbridge_id_key.text = kwargs.pop('rbridge_id') host_name = ET.SubElement(rbridge_id, "host-name") host_name.text = kwargs.pop('host_name') callback = kwargs.pop('callback', self._callback) return callback(config) def system_switch_attributes_chassis_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") system = ET.SubElement(config, "system", xmlns="urn:brocade.com:mgmt:brocade-ras") switch_attributes = ET.SubElement(system, "switch-attributes") chassis_name = ET.SubElement(switch_attributes, "chassis-name") chassis_name.text = kwargs.pop('chassis_name') callback = kwargs.pop('callback', self._callback) return callback(config) def system_switch_attributes_host_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") system = ET.SubElement(config, "system", xmlns="urn:brocade.com:mgmt:brocade-ras") switch_attributes = ET.SubElement(system, "switch-attributes") host_name = ET.SubElement(switch_attributes, "host-name") host_name.text = kwargs.pop('host_name') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_input_src(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd = ET.Element("bna_config_cmd") config = bna_config_cmd input = ET.SubElement(bna_config_cmd, "input") src = ET.SubElement(input, "src") src.text = kwargs.pop('src') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_input_dest(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd = ET.Element("bna_config_cmd") config = bna_config_cmd input = ET.SubElement(bna_config_cmd, "input") dest = ET.SubElement(input, "dest") dest.text = kwargs.pop('dest') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_output_session_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd = ET.Element("bna_config_cmd") config = bna_config_cmd output = ET.SubElement(bna_config_cmd, "output") session_id = ET.SubElement(output, "session-id") session_id.text = kwargs.pop('session_id') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_output_status(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd = ET.Element("bna_config_cmd") config = bna_config_cmd output = ET.SubElement(bna_config_cmd, "output") status = ET.SubElement(output, "status") status.text = kwargs.pop('status') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_output_status_string(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd = ET.Element("bna_config_cmd") config = bna_config_cmd output = ET.SubElement(bna_config_cmd, "output") status_string = ET.SubElement(output, "status-string") status_string.text = kwargs.pop('status_string') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_status_input_session_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd_status = ET.Element("bna_config_cmd_status") config = bna_config_cmd_status input = ET.SubElement(bna_config_cmd_status, "input") session_id = ET.SubElement(input, "session-id") session_id.text = kwargs.pop('session_id') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_status_output_status(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd_status = ET.Element("bna_config_cmd_status") config = bna_config_cmd_status output = ET.SubElement(bna_config_cmd_status, "output") status = ET.SubElement(output, "status") status.text = kwargs.pop('status') callback = kwargs.pop('callback', self._callback) return callback(config) def bna_config_cmd_status_output_status_string(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") bna_config_cmd_status = ET.Element("bna_config_cmd_status") config = bna_config_cmd_status output = ET.SubElement(bna_config_cmd_status, "output") status_string = ET.SubElement(output, "status-string") status_string.text = kwargs.pop('status_string') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_param_hostip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload_param = ET.SubElement(support, "autoupload-param") hostip = ET.SubElement(autoupload_param, "hostip") hostip.text = kwargs.pop('hostip') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_param_username(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload_param = ET.SubElement(support, "autoupload-param") username = ET.SubElement(autoupload_param, "username") username.text = kwargs.pop('username') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_param_directory(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload_param = ET.SubElement(support, "autoupload-param") directory = ET.SubElement(autoupload_param, "directory") directory.text = kwargs.pop('directory') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_param_protocol(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload_param = ET.SubElement(support, "autoupload-param") protocol = ET.SubElement(autoupload_param, "protocol") protocol.text = kwargs.pop('protocol') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_param_password(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload_param = ET.SubElement(support, "autoupload-param") password = ET.SubElement(autoupload_param, "password") password.text = kwargs.pop('password') callback = kwargs.pop('callback', self._callback) return callback(config) def support_support_param_hostip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") support_param = ET.SubElement(support, "support-param") hostip = ET.SubElement(support_param, "hostip") hostip.text = kwargs.pop('hostip') callback = kwargs.pop('callback', self._callback) return callback(config) def support_support_param_username(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") support_param = ET.SubElement(support, "support-param") username = ET.SubElement(support_param, "username") username.text = kwargs.pop('username') callback = kwargs.pop('callback', self._callback) return callback(config) def support_support_param_directory(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") support_param = ET.SubElement(support, "support-param") directory = ET.SubElement(support_param, "directory") directory.text = kwargs.pop('directory') callback = kwargs.pop('callback', self._callback) return callback(config) def support_support_param_protocol(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") support_param = ET.SubElement(support, "support-param") protocol = ET.SubElement(support_param, "protocol") protocol.text = kwargs.pop('protocol') callback = kwargs.pop('callback', self._callback) return callback(config) def support_support_param_password(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") support_param = ET.SubElement(support, "support-param") password = ET.SubElement(support_param, "password") password.text = kwargs.pop('password') callback = kwargs.pop('callback', self._callback) return callback(config) def support_autoupload_enable(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") autoupload = ET.SubElement(support, "autoupload") enable = ET.SubElement(autoupload, "enable") callback = kwargs.pop('callback', self._callback) return callback(config) def support_ffdc(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras") ffdc = ET.SubElement(support, "ffdc") callback = kwargs.pop('callback', self._callback) return callback(config)
{ "content_hash": "cc1c139d9ac99b44141d4c7ff283146c", "timestamp": "", "source": "github", "line_count": 958, "max_line_length": 92, "avg_line_length": 41.07724425887265, "alnum_prop": 0.6132343972352104, "repo_name": "SivagnanamCiena/pynos", "id": "aa6179f1be72a96af8c3374d636921b331591488", "size": "39374", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "pynos/versions/base/yang/brocade_ras.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "20665905" } ], "symlink_target": "" }
import h5py import matplotlib.pyplot as plt import numpy as np import os import os.path import tensorflow as tf from keras.backend import floatx from keras.layers import Conv1D, Conv2D, Dense from keras.layers.core import Flatten, Reshape from keras.models import load_model, Sequential from keras import optimizers from scipy import io, signal from sys import argv, exit tf.logging.set_verbosity(tf.logging.WARN) tf.logging.set_verbosity(tf.logging.INFO) os.environ['TF_CPP_MIN_LOG_LEVEL']='2' ###################################################### def main(): # files model_file = 'model_ball_rawA.h5' sets_file = 'ball_data2_sets.h5' if not os.path.isfile(model_file): print "building model..." path = os.getcwd()+'/' with h5py.File(path+sets_file, 'r') as sets: x_train = sets['train_da'][:]/32000 y_train = np.log(1+sets['train_depths'][:].reshape(-1, 192)) x_test = sets['test_da'][:]/32000 y_test = np.log(1+sets['test_depths'][:].reshape(-1, 192)) model = build_and_train_model(x_train, y_train, model_file) else: print "loading model..." path = os.getcwd()+'/' with h5py.File(path+sets_file, 'r') as sets: x_test = sets['test_da'][:]/32000 y_test = np.log(1+sets['test_depths'][:].reshape(-1, 192)) model = load_model(model_file, custom_objects={'adjusted_mse':adjusted_mse}) loss = run_model(model, x_test, y_test) ###################################################### ###################################################### def build_and_train_model(x_train, y_train, model_file): net = Sequential() net.add(Conv1D(32, (256), strides=(26), activation='relu', input_shape=x_train.shape[1:])) conv_output_size = net.layers[0].compute_output_shape(x_train.shape)[1] net.add(Reshape((conv_output_size,32,1))) net.add(Conv2D(128, (5,5), activation='relu')) net.add(Conv2D(128, (5,5), strides=(1,1), activation='relu')) net.add(Conv2D(32, (5,5), strides=(2,2), activation='relu')) net.add(Flatten()) net.add(Dense(600, activation='relu')) net.add(Dense(600, activation='relu')) net.add(Dense(300, activation='relu')) net.add(Dense(192, activation='linear')) net.compile(optimizer='adam', loss=adjusted_mse) print "finished compiling" hist = net.fit(x_train, y_train, validation_split=0.0, epochs=1, batch_size=32) with h5py.File(model_file[:-3]+'_loss_history.h5', 'w') as lh: lh.create_dataset('losses', data=hist.history['loss']) print "loss history saved as '"+model_file[:-3]+"_loss_history.h5'" net.save(model_file) print "model saved as '%s'" %model_file return net ###################################################### def run_model(net, x_test, y_test): predictions = net.predict(x_test) loss = net.evaluate(x_test, y_test) print "\nTEST LOSS:", loss view_average_error(np.exp(y_test)-1,np.exp(predictions)-1) for i in range(100, 2000, 110): view_depth_maps(i, np.exp(y_test)-1, np.exp(predictions)-1) ##################################################### def adjusted_mse(y_true, y_pred): ok_entries = np.all(y_true) ok_entries = tf.cast(ok_entries, bool) safe_targets = tf.where(ok_entries, y_true, y_pred) sqr = tf.square(y_pred - safe_targets) valid = tf.cast(ok_entries, floatx()) num_ok = tf.reduce_sum(valid, axis=-1) # count OK entries num_ok = tf.maximum(num_ok, tf.ones_like(num_ok)) # avoid divide by zero return tf.reduce_sum(sqr, axis=-1) / num_ok ##################################################### def view_average_error(ytrue, ypred): error = np.reshape(ypred-ytrue, (-1,12,16)) avg_error = np.mean(error, axis=0) stdev = np.std(avg_error) avg_val = np.mean(avg_error) rng = (avg_val-(3*stdev),avg_val+(3*stdev)) error_map = plt.imshow(avg_error, clim=rng, cmap="Greys", interpolation='none') plt.title("Absolute Average Error") plt.show() ##################################################### def view_depth_maps(index, ytrue, ypred): all_error = ypred-ytrue avg_error = np.mean(all_error) stdev = np.std(all_error) rng = (avg_error-(3*stdev),avg_error+(3*stdev)) for i in range(0, ytrue.shape[0], 50): for j in range(10): index = i + j true = np.reshape(ytrue[index], (12,16)) pred = np.reshape(ypred[index], (12,16)) error = pred - true ax1 = plt.subplot(10,3,j*3 + 1) true_map = plt.imshow(true, clim=(500, 2000), interpolation='none') ax1.set_title("True Depth") ax2 = plt.subplot(10,3,j*3 + 2) pred_map = plt.imshow(pred, clim=(500, 2000), interpolation='none') ax2.set_title("Predicted Depth") ax3 = plt.subplot(10,3,j*3 + 3) error_map = plt.imshow(error, clim=rng, cmap="Greys", interpolation='none') ax3.set_title("Squared Error Map") plt.show() ##################################################### main()
{ "content_hash": "38a520bdeba73a23ccaad6c15598caeb", "timestamp": "", "source": "github", "line_count": 140, "max_line_length": 80, "avg_line_length": 33.857142857142854, "alnum_prop": 0.6092827004219409, "repo_name": "spragunr/echolocation", "id": "3287d30571cfab8198bd9cbc2a5bc658ddc96c76", "size": "4740", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "stereo/network_raw.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "116040" }, { "name": "Shell", "bytes": "1537" } ], "symlink_target": "" }
from msrest.serialization import Model class SystemService(Model): """Information about a system service deployed in the cluster. Variables are only populated by the server, and will be ignored when sending a request. :param system_service_type: The system service type. Possible values include: 'None', 'ScoringFrontEnd', 'BatchFrontEnd' :type system_service_type: str or ~azure.mgmt.machinelearningcompute.models.SystemServiceType :ivar public_ip_address: The public IP address of the system service :vartype public_ip_address: str :ivar version: The state of the system service :vartype version: str """ _validation = { 'system_service_type': {'required': True}, 'public_ip_address': {'readonly': True}, 'version': {'readonly': True}, } _attribute_map = { 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } def __init__(self, system_service_type): super(SystemService, self).__init__() self.system_service_type = system_service_type self.public_ip_address = None self.version = None
{ "content_hash": "448e5b74421d07675cb4130b041c7bec", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 75, "avg_line_length": 35.333333333333336, "alnum_prop": 0.6462264150943396, "repo_name": "AutorestCI/azure-sdk-for-python", "id": "a6d2975738e714400f26abe4b83b94afadae7e7d", "size": "1746", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "azure-mgmt-machinelearningcompute/azure/mgmt/machinelearningcompute/models/system_service.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "34619070" } ], "symlink_target": "" }
"""Tests for the Outlook Windows Registry plugins.""" import unittest from dfdatetime import filetime as dfdatetime_filetime from dfwinreg import definitions as dfwinreg_definitions from dfwinreg import fake as dfwinreg_fake from plaso.parsers.winreg_plugins import outlook from tests.parsers.winreg_plugins import test_lib class MSOutlook2013SearchMRUPluginTest(test_lib.RegistryPluginTestCase): """Tests for the Outlook Search MRU Windows Registry plugin.""" def _CreateTestKey(self, key_path, time_string): """Creates Registry keys and values for testing. Args: key_path (str): Windows Registry key path. time_string (str): key last written date and time. Returns: dfwinreg.WinRegistryKey: a Windows Registry key. """ filetime = dfdatetime_filetime.Filetime() filetime.CopyFromDateTimeString(time_string) registry_key = dfwinreg_fake.FakeWinRegistryKey( 'Search', key_path=key_path, last_written_time=filetime.timestamp, offset=1456) value_name = ( 'C:\\Users\\username\\AppData\\Local\\Microsoft\\Outlook\\' 'username@example.com.ost') value_data = b'\xcf\x2b\x37\x00' registry_value = dfwinreg_fake.FakeWinRegistryValue( value_name, data=value_data, data_type=dfwinreg_definitions.REG_DWORD, offset=1892) registry_key.AddValue(registry_value) return registry_key def testFilters(self): """Tests the FILTERS class attribute.""" plugin = outlook.OutlookSearchMRUPlugin() key_path = ( 'HKEY_CURRENT_USER\\Software\\Microsoft\\Office\\14.0\\Outlook\\' 'Search') self._AssertFiltersOnKeyPath(plugin, key_path) key_path = ( 'HKEY_CURRENT_USER\\Software\\Microsoft\\Office\\15.0\\Outlook\\' 'Search') self._AssertFiltersOnKeyPath(plugin, key_path) self._AssertNotFiltersOnKeyPath(plugin, 'HKEY_LOCAL_MACHINE\\Bogus') def testProcess(self): """Tests the Process function.""" key_path = ( 'HKEY_CURRENT_USER\\Software\\Microsoft\\Office\\15.0\\Outlook\\' 'Search') time_string = '2012-08-28 09:23:49.002031' registry_key = self._CreateTestKey(key_path, time_string) plugin = outlook.OutlookSearchMRUPlugin() storage_writer = self._ParseKeyWithPlugin(registry_key, plugin) number_of_event_data = storage_writer.GetNumberOfAttributeContainers( 'event_data') self.assertEqual(number_of_event_data, 1) number_of_events = storage_writer.GetNumberOfAttributeContainers('event') self.assertEqual(number_of_events, 1) number_of_warnings = storage_writer.GetNumberOfAttributeContainers( 'extraction_warning') self.assertEqual(number_of_warnings, 0) number_of_warnings = storage_writer.GetNumberOfAttributeContainers( 'recovery_warning') self.assertEqual(number_of_warnings, 0) expected_event_values = { 'data_type': 'windows:registry:outlook_search_mru', 'entries': ( 'C:\\Users\\username\\AppData\\Local\\Microsoft\\Outlook\\' 'username@example.com.ost: 0x00372bcf'), 'key_path': key_path, 'last_written_time': '2012-08-28T09:23:49.0020310+00:00'} event_data = storage_writer.GetAttributeContainerByIndex('event_data', 0) self.CheckEventData(event_data, expected_event_values) # TODO: The catalog for Office 2013 (15.0) contains binary values not # dword values. Check if Office 2007 and 2010 have the same. Re-enable the # plug-ins once confirmed and OutlookSearchMRUPlugin has been extended to # handle the binary data or create a OutlookSearchCatalogMRUPlugin. # class MSOutlook2013SearchCatalogMRUPluginTest(unittest.TestCase): # """Tests for the Outlook Search Catalog MRU Windows Registry plugin.""" # # def testProcess(self): # """Tests the Process function.""" # key_path = ( # 'HKEY_CURRENT_USER\\Software\\Microsoft\\Office\\15.0\\Outlook\\' # 'Search\\Catalog') # time_string = '2012-08-28 09:23:49.002031' # # filetime = dfdatetime_filetime.Filetime() # filetime.CopyFromDateTimeString(time_string) # registry_key = dfwinreg_fake.FakeWinRegistryKey( # 'Catalog', key_path=key_path, last_written_time=filetime.timestamp, # offset=3421) # # value_name = ( # 'C:\\Users\\username\\AppData\\Local\\Microsoft\\Outlook\\' # 'username@example.com.ost') # value_data = b'\x94\x01\x00\x00\x00\x00' # registry_value = dfwinreg_fake.FakeWinRegistryValue( # value_name, data=value_data, # data_type=dfwinreg_definitions.REG_BINARY, offset=827) # registry_key.AddValue(registry_value) # # plugin = outlook.MSOutlook2013SearchCatalogMRUPlugin() # # # TODO: add test for Catalog key. if __name__ == '__main__': unittest.main()
{ "content_hash": "cc8493bcebbe12ae58242aa12eae685e", "timestamp": "", "source": "github", "line_count": 134, "max_line_length": 78, "avg_line_length": 35.850746268656714, "alnum_prop": 0.6917152373022482, "repo_name": "joachimmetz/plaso", "id": "bc3c9d1eb5cc5acf75a1d2bd68931e5b0f9f975f", "size": "4851", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "tests/parsers/winreg_plugins/outlook.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "4301" }, { "name": "Makefile", "bytes": "122" }, { "name": "PowerShell", "bytes": "1305" }, { "name": "Python", "bytes": "5345755" }, { "name": "Shell", "bytes": "27279" }, { "name": "YARA", "bytes": "507" } ], "symlink_target": "" }
import unittest import jmespath from parameterized import parameterized from tests.helm_template_generator import render_chart class GitSyncWebserverTest(unittest.TestCase): def test_should_add_dags_volume_to_the_webserver_if_git_sync_and_persistence_is_enabled(self): docs = render_chart( values={ "airflowVersion": "1.10.14", "dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": True}}, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) def test_should_add_dags_volume_to_the_webserver_if_git_sync_is_enabled_and_persistence_is_disabled(self): docs = render_chart( values={ "airflowVersion": "1.10.14", "dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": False}}, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) def test_should_add_git_sync_container_to_webserver_if_persistence_is_not_enabled_but_git_sync_is(self): docs = render_chart( values={ "airflowVersion": "1.10.14", "dags": { "gitSync": {"enabled": True, "containerName": "git-sync"}, "persistence": {"enabled": False}, }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "git-sync" == jmespath.search("spec.template.spec.containers[1].name", docs[0]) def test_should_have_service_account_defined(self): docs = render_chart( values={"dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": True}}}, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "RELEASE-NAME-airflow-webserver" == jmespath.search( "spec.template.spec.serviceAccountName", docs[0] ) @parameterized.expand( [ ( "2.0.0", True, ), ( "2.0.2", True, ), ( "1.10.14", False, ), ( "1.9.0", False, ), ( "2.1.0", True, ), ], ) def test_git_sync_with_different_airflow_versions(self, airflow_version, exclude_webserver): """ If Airflow >= 2.0.0 - git sync related containers, volume mounts & volumes are not created. """ docs = render_chart( values={ "airflowVersion": airflow_version, "dags": { "gitSync": { "enabled": True, }, "persistence": {"enabled": False}, }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) containers_names = [ container["name"] for container in jmespath.search("spec.template.spec.containers", docs[0]) ] volume_mount_names = [ vm["name"] for vm in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) ] volume_names = [volume["name"] for volume in jmespath.search("spec.template.spec.volumes", docs[0])] if exclude_webserver: assert "git-sync" not in containers_names assert "dags" not in volume_mount_names assert "dags" not in volume_names else: assert "git-sync" in containers_names assert "dags" in volume_mount_names assert "dags" in volume_names def test_should_add_env(self): docs = render_chart( values={ "airflowVersion": "1.10.14", "dags": { "gitSync": { "enabled": True, "env": [{"name": "FOO", "value": "bar"}], } }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert {"name": "FOO", "value": "bar"} in jmespath.search( "spec.template.spec.containers[1].env", docs[0] ) def test_resources_are_configurable(self): docs = render_chart( values={ "airflowVersion": "1.10.14", "dags": { "gitSync": { "enabled": True, "resources": { "limits": {"cpu": "200m", 'memory': "128Mi"}, "requests": {"cpu": "300m", 'memory': "169Mi"}, }, }, }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[1].resources.limits.memory", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[1].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[1].resources.requests.cpu", docs[0])
{ "content_hash": "2b5d09fcac25277feae3b1829ebc6aff", "timestamp": "", "source": "github", "line_count": 155, "max_line_length": 110, "avg_line_length": 35.15483870967742, "alnum_prop": 0.4931180033033584, "repo_name": "mistercrunch/airflow", "id": "506452f9e2ba0407a8b551a8fe1cb779b92ae43c", "size": "6235", "binary": false, "copies": "5", "ref": "refs/heads/main", "path": "chart/tests/test_git_sync_webserver.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "36341" }, { "name": "HTML", "bytes": "99243" }, { "name": "JavaScript", "bytes": "891460" }, { "name": "Mako", "bytes": "494" }, { "name": "Python", "bytes": "773270" }, { "name": "Shell", "bytes": "5659" } ], "symlink_target": "" }
"""Simple firmware for a temperature logger""" from dht import DHT22 import machine import network import time import urequests # turn off the access point ap_if = network.WLAN(network.AP_IF) ap_if.active(False) # join existing hotspot sta_if = network.WLAN(network.STA_IF) sta_if.active(True) sta_if.connect("G2_8802", "manchester") while not sta_if.isconnected(): time.sleep(0.5) print('network config:', sta_if.ifconfig()) led = machine.Pin(0, machine.Pin.OUT) led.high() dht_pin = machine.Pin(4) dht = DHT22(dht_pin) observations = [] N = 0 while True: time.sleep(5) led.low() time.sleep(0.05) led.high() dht.measure() t = time.time() payload = {'time': t, 'temp': dht.temperature(), 'humidity': dht.humidity(), } observations.append(payload) print('observing', payload) if N%4 == 0 and sta_if.isconnected(): while observations: payload = observations[-1] try: time.sleep(0.5) r = urequests.post("http://192.168.43.87:5000/sensor", json=payload) r.close() print('sent', payload) except Exception as e: print(e) break _ = observations.pop() N += 1
{ "content_hash": "4f3e50f0a46bd554780e731d8f79ea81", "timestamp": "", "source": "github", "line_count": 65, "max_line_length": 70, "avg_line_length": 20.723076923076924, "alnum_prop": 0.5575352635486266, "repo_name": "wildtreetech/CRUMBS", "id": "832055bd1fc714ab3eff36d356d3b39651daf84d", "size": "1347", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "board/temperatur-logger.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "6438" } ], "symlink_target": "" }
"""prompt-toolkit utilities Everything in this module is a private API, not to be used outside IPython. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import unicodedata from wcwidth import wcwidth from IPython.core.completer import ( provisionalcompleter, cursor_to_position, _deduplicate_completions) from prompt_toolkit.completion import Completer, Completion from prompt_toolkit.layout.lexers import Lexer from prompt_toolkit.layout.lexers import PygmentsLexer import pygments.lexers as pygments_lexers _completion_sentinel = object() def _elide(string, *, min_elide=30): """ If a string is long enough, and has at least 2 dots, replace the middle part with ellipses. If three consecutive dots, or two consecutive dots are encountered these are replaced by the equivalents HORIZONTAL ELLIPSIS or TWO DOT LEADER unicode equivalents """ string = string.replace('...','\N{HORIZONTAL ELLIPSIS}') string = string.replace('..','\N{TWO DOT LEADER}') if len(string) < min_elide: return string parts = string.split('.') if len(parts) <= 3: return string return '{}.{}\N{HORIZONTAL ELLIPSIS}{}.{}'.format(parts[0], parts[1][0], parts[-2][-1], parts[-1]) def _adjust_completion_text_based_on_context(text, body, offset): if text.endswith('=') and len(body) > offset and body[offset] is '=': return text[:-1] else: return text class IPythonPTCompleter(Completer): """Adaptor to provide IPython completions to prompt_toolkit""" def __init__(self, ipy_completer=None, shell=None, patch_stdout=None): if shell is None and ipy_completer is None: raise TypeError("Please pass shell=an InteractiveShell instance.") self._ipy_completer = ipy_completer self.shell = shell if patch_stdout is None: raise TypeError("Please pass patch_stdout") self.patch_stdout = patch_stdout @property def ipy_completer(self): if self._ipy_completer: return self._ipy_completer else: return self.shell.Completer def get_completions(self, document, complete_event): if not document.current_line.strip(): return # Some bits of our completion system may print stuff (e.g. if a module # is imported). This context manager ensures that doesn't interfere with # the prompt. with self.patch_stdout(), provisionalcompleter(): body = document.text cursor_row = document.cursor_position_row cursor_col = document.cursor_position_col cursor_position = document.cursor_position offset = cursor_to_position(body, cursor_row, cursor_col) yield from self._get_completions(body, offset, cursor_position, self.ipy_completer) @staticmethod def _get_completions(body, offset, cursor_position, ipyc): """ Private equivalent of get_completions() use only for unit_testing. """ debug = getattr(ipyc, 'debug', False) completions = _deduplicate_completions( body, ipyc.completions(body, offset)) for c in completions: if not c.text: # Guard against completion machinery giving us an empty string. continue text = unicodedata.normalize('NFC', c.text) # When the first character of the completion has a zero length, # then it's probably a decomposed unicode character. E.g. caused by # the "\dot" completion. Try to compose again with the previous # character. if wcwidth(text[0]) == 0: if cursor_position + c.start > 0: char_before = body[c.start - 1] fixed_text = unicodedata.normalize( 'NFC', char_before + text) # Yield the modified completion instead, if this worked. if wcwidth(text[0:1]) == 1: yield Completion(fixed_text, start_position=c.start - offset - 1) continue # TODO: Use Jedi to determine meta_text # (Jedi currently has a bug that results in incorrect information.) # meta_text = '' # yield Completion(m, start_position=start_pos, # display_meta=meta_text) display_text = c.text adjusted_text = _adjust_completion_text_based_on_context(c.text, body, offset) if c.type == 'function': display_text = display_text + '()' yield Completion(adjusted_text, start_position=c.start - offset, display=_elide(display_text), display_meta=c.type) class IPythonPTLexer(Lexer): """ Wrapper around PythonLexer and BashLexer. """ def __init__(self): l = pygments_lexers self.python_lexer = PygmentsLexer(l.Python3Lexer) self.shell_lexer = PygmentsLexer(l.BashLexer) self.magic_lexers = { 'HTML': PygmentsLexer(l.HtmlLexer), 'html': PygmentsLexer(l.HtmlLexer), 'javascript': PygmentsLexer(l.JavascriptLexer), 'js': PygmentsLexer(l.JavascriptLexer), 'perl': PygmentsLexer(l.PerlLexer), 'ruby': PygmentsLexer(l.RubyLexer), 'latex': PygmentsLexer(l.TexLexer), } def lex_document(self, cli, document): text = document.text.lstrip() lexer = self.python_lexer if text.startswith('!') or text.startswith('%%bash'): lexer = self.shell_lexer elif text.startswith('%%'): for magic, l in self.magic_lexers.items(): if text.startswith('%%' + magic): lexer = l break return lexer.lex_document(cli, document)
{ "content_hash": "d2ccebaa0af9664c8679f38a090a6570", "timestamp": "", "source": "github", "line_count": 160, "max_line_length": 127, "avg_line_length": 37.05, "alnum_prop": 0.6111673414304993, "repo_name": "nitin-cherian/LifeLongLearning", "id": "70140b4c612bab49de5424806eefc93551750355", "size": "5928", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Python/PythonProgrammingLanguage/Encapsulation/encap_env/lib/python3.5/site-packages/IPython/terminal/ptutils.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "32365" }, { "name": "CSS", "bytes": "10259" }, { "name": "HTML", "bytes": "55977" }, { "name": "JavaScript", "bytes": "7368910" }, { "name": "Jupyter Notebook", "bytes": "768879" }, { "name": "Mako", "bytes": "494" }, { "name": "Python", "bytes": "17502534" }, { "name": "Shell", "bytes": "7751" }, { "name": "Smarty", "bytes": "30663" } ], "symlink_target": "" }
"""Ops for GPU collective operations implemented using NVIDIA nccl.""" import threading from tensorflow.python.eager import context from tensorflow.python.eager import def_function from tensorflow.python.framework import device from tensorflow.python.framework import ops from tensorflow.python.ops import gen_nccl_ops _module_lock = threading.Lock() _shared_name_counter = 0 def all_sum(tensors): """Returns a list of tensors with the all-reduce sum across `tensors`. The computation is done with an all-reduce operation, so if only some of the returned tensors are evaluated then the computation will hang. Args: tensors: The input tensors across which to sum; must be assigned to GPU devices. Returns: List of tensors, each with the sum of the input tensors, where tensor i has the same device as `tensors[i]`. """ return _apply_all_reduce('sum', tensors) @ops.RegisterGradient('NcclAllReduce') def _all_sum_grad(op, grad): """The gradients for `all_sum`. Args: op: The `all_sum` `Operation` that we are differentiating. grad: Gradient with respect to the output of the `all_sum` op. Returns: The gradient with respect to the output of `all_sum`. Raises: LookupError: If `reduction` is not `sum`. """ if op.get_attr('reduction') != b'sum': raise LookupError('No gradient defined for NcclAllReduce except for ' 'reduction="sum".') _check_device(grad, expected=op.device) num_devices = op.get_attr('num_devices') shared_name = op.get_attr('shared_name') + b'_grad' with ops.device(op.device): return gen_nccl_ops.nccl_all_reduce( input=grad, reduction='sum', num_devices=num_devices, shared_name=shared_name) def all_prod(tensors): """Returns a list of tensors with the all-reduce product across `tensors`. The computation is done with an all-reduce operation, so if only some of the returned tensors are evaluated then the computation will hang. Args: tensors: The input tensors across which to multiply; must be assigned to GPU devices. Returns: List of tensors, each with the product of the input tensors, where tensor i has the same device as `tensors[i]`. """ return _apply_all_reduce('prod', tensors) def all_min(tensors): """Returns a list of tensors with the all-reduce min across `tensors`. The computation is done with an all-reduce operation, so if only some of the returned tensors are evaluated then the computation will hang. Args: tensors: The input tensors across which to reduce; must be assigned to GPU devices. Returns: List of tensors, each with the minimum of the input tensors, where tensor i has the same device as `tensors[i]`. """ return _apply_all_reduce('min', tensors) def all_max(tensors): """Returns a list of tensors with the all-reduce max across `tensors`. The computation is done with an all-reduce operation, so if only some of the returned tensors are evaluated then the computation will hang. Args: tensors: The input tensors across which to reduce; must be assigned to GPU devices. Returns: List of tensors, each with the maximum of the input tensors, where tensor i has the same device as `tensors[i]`. """ return _apply_all_reduce('max', tensors) def reduce_sum(tensors): """Returns a tensor with the reduce sum across `tensors`. The computation is done with a reduce operation, so only one tensor is returned. Args: tensors: The input tensors across which to sum; must be assigned to GPU devices. Returns: A tensor containing the sum of the input tensors. Raises: LookupError: If context is not currently using a GPU device. """ return _apply_reduce('sum', tensors) @ops.RegisterGradient('NcclReduce') def _reduce_sum_grad(op, grad): """The gradients for input `Operation` of `reduce_sum`. Args: op: The `sum send` `Operation` that we are differentiating. grad: Gradient with respect to the output of the `reduce_sum` op. Returns: The gradient with respect to the input of `reduce_sum` op. Raises: LookupError: If the reduction attribute of op is not `sum`. """ if op.get_attr('reduction') != b'sum': raise LookupError('No gradient defined for NcclAllReduce except for ' 'reduction="sum".') _check_device(grad, expected=op.device) with ops.device(op.device): result = gen_nccl_ops.nccl_broadcast(input=grad, shape=grad.shape) return [result] * len(op.inputs) def broadcast(tensor): """Returns a tensor that can be efficiently transferred to other devices. Args: tensor: The tensor to send; must be assigned to a GPU device. Returns: A tensor with the value of `src_tensor`, which can be used as input to ops on other GPU devices. """ _check_device(tensor) with ops.device(tensor.device): return gen_nccl_ops.nccl_broadcast(input=tensor, shape=tensor.shape) @ops.RegisterGradient('NcclBroadcast') def _broadcast_grad(op, accumulated_grad): """The gradients for input `Operation` of `broadcast`. Args: op: The `broadcast send` `Operation` that we are differentiating. accumulated_grad: Accumulated gradients with respect to the output of the `broadcast` op. Returns: Gradients with respect to the input of `broadcast`. """ # Grab inputs of accumulated_grad and replace accumulation with reduce_sum. grads = [t for t in accumulated_grad.op.inputs] for t in grads: _check_device(t) with ops.device(op.device): return gen_nccl_ops.nccl_reduce(input=grads, reduction='sum') def _apply_all_reduce(reduction, tensors): """Helper function for all_* functions.""" if not tensors: raise ValueError('Must pass >0 tensors to all reduce operations') shared_name = _get_shared_name() def _all_reduce(): """Call nccl allreduce.""" res = [] for t in tensors: _check_device(t) with ops.device(t.device): res.append( gen_nccl_ops.nccl_all_reduce( input=t, reduction=reduction, num_devices=len(tensors), shared_name=shared_name)) return res if context.executing_eagerly(): # Nccl ops will block unless they are executed concurrently such as in a # graph or a defun. return def_function.function(_all_reduce)() else: return _all_reduce() def _apply_reduce(reduction, tensors): """Helper function for reduce_* functions.""" if not tensors: raise ValueError('Must pass >0 tensors to reduce operations') for t in tensors: _check_device(t) result = gen_nccl_ops.nccl_reduce(input=tensors, reduction=reduction) try: next(t for t in tensors if t.device == result.device) except StopIteration: raise ValueError('One input tensor must be assigned to current device') return result def _get_shared_name(): global _shared_name_counter with _module_lock: val = _shared_name_counter _shared_name_counter += 1 return 'c%s' % val def _check_device(tensor, expected=None): if not device.canonical_name(tensor.device): raise ValueError(f'Device assignment for tensor={tensor} required for nccl ' 'collective ops') if expected and expected != tensor.device: raise ValueError(f'Expected device {expected}, got {tensor.device} for ' f'tensor={tensor}.')
{ "content_hash": "b843c996a292111d989686f3442fc4e9", "timestamp": "", "source": "github", "line_count": 253, "max_line_length": 80, "avg_line_length": 29.47826086956522, "alnum_prop": 0.6898632341110217, "repo_name": "tensorflow/tensorflow", "id": "4b4d847acecda917bb96ee79e383915dbed84d33", "size": "8147", "binary": false, "copies": "12", "ref": "refs/heads/master", "path": "tensorflow/python/ops/nccl_ops.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "36962" }, { "name": "C", "bytes": "1400913" }, { "name": "C#", "bytes": "13584" }, { "name": "C++", "bytes": "126099822" }, { "name": "CMake", "bytes": "182430" }, { "name": "Cython", "bytes": "5003" }, { "name": "Dockerfile", "bytes": "416133" }, { "name": "Go", "bytes": "2129888" }, { "name": "HTML", "bytes": "4686483" }, { "name": "Java", "bytes": "1074438" }, { "name": "Jupyter Notebook", "bytes": "792906" }, { "name": "LLVM", "bytes": "6536" }, { "name": "MLIR", "bytes": "11447433" }, { "name": "Makefile", "bytes": "2760" }, { "name": "Objective-C", "bytes": "172666" }, { "name": "Objective-C++", "bytes": "300213" }, { "name": "Pawn", "bytes": "5552" }, { "name": "Perl", "bytes": "7536" }, { "name": "Python", "bytes": "42782002" }, { "name": "Roff", "bytes": "5034" }, { "name": "Ruby", "bytes": "9199" }, { "name": "Shell", "bytes": "621854" }, { "name": "Smarty", "bytes": "89538" }, { "name": "SourcePawn", "bytes": "14625" }, { "name": "Starlark", "bytes": "7738020" }, { "name": "Swift", "bytes": "78435" }, { "name": "Vim Snippet", "bytes": "58" } ], "symlink_target": "" }
import importlib import os import pickle import sys import traceback def main(): script_dict = pickle.load(sys.stdin.buffer) req = script_dict['request'] inst = script_dict['vnf_instance'] grant_req = script_dict['grant_request'] grant = script_dict['grant_response'] tmp_csar_dir = script_dict['tmp_csar_dir'] additional_params = req['additionalParams'] userdata_path = additional_params['lcm-operation-user-data'] userdata_class = additional_params['lcm-operation-user-data-class'] sys.path.append(tmp_csar_dir) class_module = os.path.splitext( userdata_path.lstrip('./'))[0].replace('/', '.') module = importlib.import_module(class_module) klass = getattr(module, userdata_class) method = getattr(klass, grant_req['operation'].lower()) stack_dict = method(req, inst, grant_req, grant, tmp_csar_dir) pickle.dump(stack_dict, sys.stdout.buffer) sys.stdout.flush() if __name__ == "__main__": try: main() os._exit(0) except Exception: sys.stderr.write(traceback.format_exc()) sys.stderr.flush() os._exit(1)
{ "content_hash": "13fade5e94272dde0e843a2d6c688f7e", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 71, "avg_line_length": 27.75609756097561, "alnum_prop": 0.648506151142355, "repo_name": "stackforge/tacker", "id": "76acb4317cd025acf5bec5889fcb96007881da91", "size": "1800", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tacker/sol_refactored/infra_drivers/openstack/userdata_main.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Mako", "bytes": "1142" }, { "name": "Python", "bytes": "1143026" }, { "name": "Shell", "bytes": "26584" } ], "symlink_target": "" }
"""BC learner implementation.""" import time from typing import Any, Dict, Iterator, List, NamedTuple, Optional, Tuple from collections import OrderedDict import acme from acme import types from acme.jax import networks as networks_lib from acme.jax import utils from acme.utils import counting from acme.utils import loggers import haiku as hk import jax import jax.numpy as jnp from jax.scipy.special import logsumexp as jax_logsumexp import optax from jrl.agents.bc import networks as bc_networks class TrainingState(NamedTuple): """Contains training state for the learner.""" policy_optimizer_state: optax.OptState policy_params: networks_lib.Params key: networks_lib.PRNGKey img_encoder_params: Optional[networks_lib.Params] = {} class BCLearner(acme.Learner): """BC learner.""" _state: TrainingState def __init__( self, networks, rng, iterator, policy_lr = 1e-4, loss_type = 'MLE', # or MSE regularize_entropy = False, entropy_regularization_weight = 1.0, use_img_encoder = False, img_encoder_params_ckpt_path = '', counter = None, logger = None, num_sgd_steps_per_step = 1): """Initialize the BC learner. Args: networks: BC networks rng: a key for random number generation. iterator: an iterator over training data. policy_lr: learning rate for the policy regularize_entropy: whether to regularize the entropy of the policy. entropy_regularization_weight: weight for entropy regularization. use_img_encoder: whether to preprocess the image part of the observation using a pretrained encoder. img_encoder_params_ckpt_path: path to checkpoint for image encoder params counter: counter object used to keep track of steps. logger: logger object to be used by learner. num_sgd_steps_per_step: number of sgd steps to perform per learner 'step'. """ assert loss_type in ['MLE', 'MSE'], 'Invalid BC loss type!' num_devices = len(jax.devices()) self._num_sgd_steps_per_step = num_sgd_steps_per_step self._use_img_encoder = use_img_encoder policy_optimizer = optax.adam(learning_rate=policy_lr) def actor_loss( policy_params, transitions, key, img_encoder_params): obs = transitions.observation acts = transitions.action if use_img_encoder: img = obs['state_image'] dense = obs['state_dense'] obs = dict( state_image=networks.img_encoder.apply(img_encoder_params, img), state_dense=dense,) dist = networks.policy_network.apply(policy_params, obs) if loss_type == 'MLE': log_probs = networks.log_prob(dist, acts) loss = -1. * jnp.mean(log_probs) else: acts_mode = dist.mode() mse = jnp.sum((acts_mode - acts)**2, axis=-1) loss = 0.5 * jnp.mean(mse) total_loss = loss entropy_term = 0. if regularize_entropy: sample_acts = networks.sample(dist, key) sample_log_probs = networks.log_prob(dist, sample_acts) entropy_term = jnp.mean(sample_log_probs) total_loss = total_loss + entropy_regularization_weight * entropy_term return total_loss, (loss, entropy_term) actor_loss_and_grad = jax.value_and_grad(actor_loss, has_aux=True) def actor_update_step( policy_params, optim_state, transitions, key, img_encoder_params): (total_loss, (bc_loss_term, entropy_term)), actor_grad = actor_loss_and_grad( policy_params, transitions, key, img_encoder_params) actor_grad = jax.lax.pmean(actor_grad, 'across_devices') policy_update, optim_state = policy_optimizer.update(actor_grad, optim_state) policy_params = optax.apply_updates(policy_params, policy_update) return policy_params, optim_state, total_loss, bc_loss_term, entropy_term pmapped_actor_update_step = jax.pmap( actor_update_step, axis_name='across_devices', in_axes=0, out_axes=0) def _full_update_step( state, transitions, ): """The unjitted version of the full update step.""" metrics = OrderedDict() key = state.key # actor update step def reshape_for_devices(t): rest_t_shape = list(t.shape[1:]) new_shape = [num_devices, t.shape[0]//num_devices,] + rest_t_shape return jnp.reshape(t, new_shape) transitions = jax.tree_map(reshape_for_devices, transitions) sub_keys = jax.random.split(key, num_devices + 1) key = sub_keys[0] sub_keys = sub_keys[1:] new_policy_params, new_policy_optimizer_state, total_loss, bc_loss_term, entropy_term = pmapped_actor_update_step( state.policy_params, state.policy_optimizer_state, transitions, sub_keys, state.img_encoder_params) metrics['total_actor_loss'] = jnp.mean(total_loss) metrics['BC_loss'] = jnp.mean(bc_loss_term) metrics['entropy_loss'] = jnp.mean(entropy_term) # create new state new_state = TrainingState( policy_optimizer_state=new_policy_optimizer_state, policy_params=new_policy_params, key=key, img_encoder_params=state.img_encoder_params) return new_state, metrics # General learner book-keeping and loggers. self._counter = counter or counting.Counter() self._logger = logger or loggers.make_default_logger( 'learner', asynchronous=True, serialize_fn=utils.fetch_devicearray) # Iterator on demonstration transitions. self._iterator = iterator self._update_step = utils.process_multiple_batches( _full_update_step, num_sgd_steps_per_step) def make_initial_state(key): """""" # policy stuff key, sub_key = jax.random.split(key) policy_params = networks.policy_network.init(sub_key) policy_optimizer_state = policy_optimizer.init(policy_params) devices = jax.local_devices() replicated_policy_params = jax.device_put_replicated( policy_params, devices) replicated_optim_state = jax.device_put_replicated( policy_optimizer_state, devices) if use_img_encoder: """ Load pretrained img_encoder_params and do: replicated_img_encoder_params = jax.device_put_replicated( img_encoder_params, devices) """ class EncoderTrainingState(NamedTuple): encoder_params: hk.Params img_encoder_params = {} replicated_img_encoder_params = img_encoder_params raise NotImplementedError('Need to load a checkpoint.') else: img_encoder_params = {} replicated_img_encoder_params = img_encoder_params state = TrainingState( policy_optimizer_state=replicated_optim_state, policy_params=replicated_policy_params, key=key, img_encoder_params=replicated_img_encoder_params) return state # Create initial state. self._state = make_initial_state(rng) # Do not record timestamps until after the first learning step is done. # This is to avoid including the time it takes for actors to come online and # fill the replay buffer. self._timestamp = None def step(self): with jax.profiler.StepTraceAnnotation('sampling batch'): sample = next(self._iterator) transitions = types.Transition(*sample.data) with jax.profiler.StepTraceAnnotation('train step'): self._state, metrics = self._update_step(self._state, transitions) # Compute elapsed time. timestamp = time.time() elapsed_time = timestamp - self._timestamp if self._timestamp else 0 self._timestamp = timestamp # Increment counts and record the current time counts = self._counter.increment( steps=self._num_sgd_steps_per_step, walltime=elapsed_time) # Attempts to write the logs. self._logger.write({**metrics, **counts}) def get_variables(self, names): variables = { 'policy': jax.tree_map(lambda x: x[0], self._state.policy_params), } if self._use_img_encoder: img_encoder_params = jax.tree_map( lambda x: x[0], self._state.img_encoder_params) variables['img_encoder'] = img_encoder_params return [variables[name] for name in names] def save(self): return self._state def restore(self, state): self._state = state
{ "content_hash": "2ef2ff84dece200f783a9363beb0653e", "timestamp": "", "source": "github", "line_count": 263, "max_line_length": 120, "avg_line_length": 32.40684410646388, "alnum_prop": 0.6482459227971371, "repo_name": "google-research/google-research", "id": "2b2e23b5f28125b12f2c51cf060ba593c6cd1b75", "size": "9131", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "jrl/agents/bc/learning.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "9817" }, { "name": "C++", "bytes": "4166670" }, { "name": "CMake", "bytes": "6412" }, { "name": "CSS", "bytes": "27092" }, { "name": "Cuda", "bytes": "1431" }, { "name": "Dockerfile", "bytes": "7145" }, { "name": "Gnuplot", "bytes": "11125" }, { "name": "HTML", "bytes": "77599" }, { "name": "ImageJ Macro", "bytes": "50488" }, { "name": "Java", "bytes": "487585" }, { "name": "JavaScript", "bytes": "896512" }, { "name": "Julia", "bytes": "67986" }, { "name": "Jupyter Notebook", "bytes": "71290299" }, { "name": "Lua", "bytes": "29905" }, { "name": "MATLAB", "bytes": "103813" }, { "name": "Makefile", "bytes": "5636" }, { "name": "NASL", "bytes": "63883" }, { "name": "Perl", "bytes": "8590" }, { "name": "Python", "bytes": "53790200" }, { "name": "R", "bytes": "101058" }, { "name": "Roff", "bytes": "1208" }, { "name": "Rust", "bytes": "2389" }, { "name": "Shell", "bytes": "730444" }, { "name": "Smarty", "bytes": "5966" }, { "name": "Starlark", "bytes": "245038" } ], "symlink_target": "" }
import logging, functools from bottle import Bottle, auth_basic, request, HTTPError from sqlalchemy.orm.exc import NoResultFound from enum import IntEnum, unique from .types import PropertySet, Property, strip from .database import get_db, password_hash from . import api, User BASE = '/user' app = Bottle() api.register(BASE, app) def authenticate(username, password): """ Bottle-compatible simple-checker that stores the user descriptor of the currently logged in user onto the request. """ with get_db().transaction() as t: try: user = (t.query(User) .filter(User.name==username) .filter(User.password==password_hash(password)) .filter(User.status==User.Status.enabled) .one()) request.user = UserDescriptor( id=user.id, status=User.Status(user.status), name=user.name, fullname=user.fullname, user_class=User.Class(user.user_class) ) logging.debug("Logged in as %s", user.name) return True except NoResultFound: return False def require_admin(realm="private"): """ Bottle Callback decorator to require the current user to be admin or through a permission denied """ def decorator(func): @functools.wraps(func) def wrapper(*a, **ka): if not current_is_admin(): err = HTTPError(401, "Admin permission required") err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm) return err return func(*a, **ka) return wrapper return decorator def no_guests(): """ Bottle Callback decorator to require the current user to be of higher class than guest """ def decorator(func): @functools.wraps(func) def wrapper(*a, **ka): if request.user.user_class is User.Class.guest: err = HTTPError(401, "Guests not allowed") return err return func(*a, **ka) return wrapper return decorator @app.get('/me') @auth_basic(authenticate) def rest_me(): json = request.user.to_json() logging.info("Me\n%s", json) return json @app.get('/<user_id:int>') @auth_basic(authenticate) @require_admin def rest_get_user_by_id(user_id): json = get_user_by_id(user_id).to_json() logging.info("Outgoing User\n%s", json) return json @app.get('/name/<name>') @auth_basic(authenticate) @require_admin def rest_get_user_by_id(name): json = get_user_by_name(name).to_json() logging.info("Outgoing User\n%s", json) return json class UserDescriptor(PropertySet): id = Property(int) status = Property(enum=User.Status) name = Property() fullname = Property() user_class = Property(enum=User.Class) @classmethod def map_in(self, user): return UserDescriptor( id = user.id, status = User.Status(user.status), name = user.name, fullname = user.fullname, user_class = User.Class(user.user_class) ) def current_user_id(): """ Shorthand for retrieving the currently logged in user, if any. """ try: return request.user.id except AttributeError: return None def current_is_user(): return request.user.user_class is not User.Class.guest def current_is_admin(): return request.user.user_class is User.Class.admin def current_is_guest(): return request.user.user_class is User.Class.guest def require_user_id(user_id): """ Shorthand for requiring a certain user or raise a 401 """ if user_id != request.user.id: raise HTTPError(401, "Access denied") def get_user_by_id(user_id): with get_db().transaction() as t: user = (t.query(User) .filter(User.id == user_id) .one()) return UserDescriptor.map_in(user) def get_user_by_name(name): with get_db().transaction() as t: user = (t.query(User) .filter(User.name == name) .one()) return UserDescriptor.map_in(user)
{ "content_hash": "eb334dd33381380c4045e576511b8ce1", "timestamp": "", "source": "github", "line_count": 170, "max_line_length": 78, "avg_line_length": 25.11764705882353, "alnum_prop": 0.593208430913349, "repo_name": "eblade/images4", "id": "1f784e434956af9249874546c4dabc1eeb0512d8", "size": "4271", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "images/user.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3114" }, { "name": "Cucumber", "bytes": "8644" }, { "name": "HTML", "bytes": "26093" }, { "name": "JavaScript", "bytes": "19102" }, { "name": "Python", "bytes": "225920" } ], "symlink_target": "" }
"""Test that the set of gen-* files is the same as the generated files.""" import fnmatch import os import sys import generate UPDATE_TIP = ( 'To update the generated tests, run:\n' '$ python third_party/blink/web_tests/wpt_internal/bluetooth/generate.py') def main(): generated_files = set() # Tests data in gen-* files is the same as the data generated. for generated_test in generate.GetGeneratedTests(): generated_files.add(generated_test.path) try: with open(generated_test.path, 'rb') as f: data = f.read().decode('utf-8') if data != generated_test.data: print(generated_test.path + ' does not match template.') print(UPDATE_TIP) return -1 except IOError as e: if e.errno == 2: print('Missing generated test:\n{}\nFor template:\n{}'.format( generated_test.path, generated_test.template)) print(UPDATE_TIP) return -1 # Tests that there are no obsolete generated files. previous_generated_files = set() current_path = os.path.dirname(os.path.realpath(__file__)) for root, _, filenames in os.walk(current_path): for filename in fnmatch.filter(filenames, 'gen-*.html'): previous_generated_files.add(os.path.join(root, filename)) if previous_generated_files != generated_files: print('There are extra generated tests. Please remove them.') for test_path in previous_generated_files - generated_files: print(test_path) return -1 if __name__ == '__main__': sys.exit(main())
{ "content_hash": "51a139080f675e0af8a34e8398c54251", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 78, "avg_line_length": 35.93617021276596, "alnum_prop": 0.6009473060982831, "repo_name": "nwjs/chromium.src", "id": "41c57a9baa37ec33b644f5418e3adfeb2b7b85d4", "size": "2069", "binary": false, "copies": "1", "ref": "refs/heads/nw70", "path": "third_party/blink/web_tests/wpt_internal/bluetooth/generate_test.py", "mode": "33261", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
""" III. Create a program that converts the following uptime strings to a time in seconds. uptime1 = 'twb-sf-881 uptime is 6 weeks, 4 days, 2 hours, 25 minutes' uptime2 = '3750RJ uptime is 1 hour, 29 minutes' uptime3 = 'CATS3560 uptime is 8 weeks, 4 days, 18 hours, 16 minutes' uptime4 = 'rtr1 uptime is 5 years, 18 weeks, 8hours, 23 minutes' For each of these strings store the uptime in a dictionary using the device name as the key. During this conversion process, you will have to convert strings to integers. For these string to integer conversions use try/except to catch any string to integer conversion exceptions. For example: int('5') works fine int('5 years') generates a ValueError exception. Print the dictionary to standard output. """ uptime1 = 'twb-sf-881 uptime is 6 weeks, 4 days, 2 hours, 25 minutes' uptime2 = '3750RJ uptime is 1 hour, 29 minutes' uptime3 = 'CATS3560 uptime is 8 weeks, 4 days, 18 hours, 16 minutes' uptime4 = 'rtr1 uptime is 5 years, 18 weeks, 8hours, 23 minutes' d = {} for line in (uptime1, uptime2,uptime3,uptime4): # split each line and assign router name and uptime variables line_split = line.split(' ', 3) router = line_split[0] uptime = line_split[3] # split uptime with comma uptime_split = uptime.split(',') # we will store no of sec in this variable: uptime_in_sec = 0 for t in uptime_split: # strip leading space t.strip() if 'minutes' in t: try: uptime_in_sec += 60 * int( t.split(' minutes')[0] ) except ValueError as e: print "Bad Translation - " + str(e) elif 'hours' in t: try: uptime_in_sec += 60 * 60 * int( t.split(' hours')[0] ) except ValueError as e: print "Bad Translation - " + str(e) elif 'days' in t: try: uptime_in_sec += 24 * 60 * 60 * int( t.split(' days')[0] ) except ValueError as e: print "Bad Translation - " + str(e) elif 'weeks' in t: try: uptime_in_sec += 7 * 24 * 60 * 60 * int( t.split(' weeks')[0] ) except ValueError as e: print "Bad Translation - " + str(e) elif 'years' in t: try: uptime_in_sec += 52 * 7 * 24 * 60 * 60 * int( t.split(' years')[0] ) except ValueError as e: print "Bad Translation - " + str(e) d [router] = uptime_in_sec print import pprint pprint.pprint(d) print
{ "content_hash": "0981d6f0f92069d4ef0ef40f596e327f", "timestamp": "", "source": "github", "line_count": 84, "max_line_length": 85, "avg_line_length": 30.61904761904762, "alnum_prop": 0.5898133748055988, "repo_name": "blahu/pynet-mat", "id": "b5d882e7b86fca8eb1b3c33f8b9cd2a7c409f2ef", "size": "2645", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "LearnPythonCourse/class4/ex3.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "115949" }, { "name": "Shell", "bytes": "75" } ], "symlink_target": "" }
from . dev import *
{ "content_hash": "ac4e636700467f20a3e45fa0685958dd", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 19, "avg_line_length": 20, "alnum_prop": 0.65, "repo_name": "manuelnaranjo/django-template", "id": "80733c7546c3940aa8485ebe01ccd74a43f9c477", "size": "20", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "project/settings/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Emacs Lisp", "bytes": "691" }, { "name": "Python", "bytes": "10592" }, { "name": "Shell", "bytes": "7069" } ], "symlink_target": "" }
from django.conf.urls import patterns, url from horizon_contrib.forms.views import CreateView, UpdateView from .views import GenericIndexView, DataView urlpatterns = patterns('', url(r'^models/(?P<cls_name>[\w\.\-]+)/create/$', CreateView.as_view(), name='create'), url(r'^models/(?P<cls_name>[\w\.\-]+)/(?P<id>[\w\.\-]+)/update/$', UpdateView.as_view(), name='update'), # tables url(r'^models/(?P<cls_name>[\w\.\-]+)/index/$', GenericIndexView.as_view(), name='index'), url(r'^models/(?P<cls_name>[\w\.\-]+)/index/json$', DataView.as_view(), name='data'), url(r'^models/(?P<cls_name>[\w\.\-]+)/index/(?P<table>[\w\.\-]+)/$', GenericIndexView.as_view(), name='index'), url(r'^models/(?P<cls_name>[\w\.\-]+)/index/(?P<table>[\w\.\-]+)/json$', DataView.as_view(), name='data'), )
{ "content_hash": "cb93059a52de1c0c258b1c601f0e830e", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 115, "avg_line_length": 57.714285714285715, "alnum_prop": 0.5977722772277227, "repo_name": "michaelkuty/horizon-contrib", "id": "a7f766f4f9d443532c0449fa9bb8eb3d4e61f18d", "size": "809", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "horizon_contrib/generic/urls.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "1165" }, { "name": "HTML", "bytes": "10438" }, { "name": "JavaScript", "bytes": "43768" }, { "name": "Python", "bytes": "83941" } ], "symlink_target": "" }
""" This test checks the activation logic of OP_REVERSEBYTES. Derived from both abc-schnorrmultisig-activation.py (see https://reviews.bitcoinabc.org/D3736) and abc-schnorrmultisig.py """ import time from test_framework.blocktools import ( create_block, create_coinbase, make_conform_to_ctor, create_tx_with_script, pad_tx, ) from test_framework.key import CECKey from test_framework.nodemessages import ( CBlock, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, ToHex, ) from test_framework.mininode import ( P2PDataStore, NodeConn, NetworkThread, ) from test_framework import schnorr from test_framework.script import ( CScript, OP_EQUAL, OP_REVERSEBYTES, OP_RETURN, OP_TRUE, SIGHASH_ALL, SIGHASH_FORKID, SignatureHashForkId, ) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error, p2p_port, waitFor import logging # Blocks with invalid scripts give this error: BAD_INPUTS_ERROR = 'bad-blk-signatures' # Pre-upgrade, we get a BAD_OPCODE error PRE_UPGRADE_BAD_OPCODE_ERROR = 'upgrade-conditional-script-failure (Opcode missing or not understood)' class P2PNode(P2PDataStore): pass class OpReversebytesActivationTest(BitcoinTestFramework): def __init__(self): super().__init__() self.set_test_params() def set_test_params(self): self.num_nodes = 1 self.block_heights = {} self.extra_args = [["-debug=mempoolrej, mempool",]] def bootstrap_p2p(self, *, num_connections=1): """Add a P2P connection to the node. Helper to connect and wait for version handshake.""" self.p2p = P2PNode() self.connection = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.p2p) self.p2p.add_connection(self.connection) NetworkThread().start() self.p2p.wait_for_verack() assert(self.p2p.connection.state == "connected") def get_best_block(self, node): """Get the best block. Register its height so we can use build_block.""" block_height = node.getblockcount() blockhash = node.getblockhash(block_height) block = FromHex(CBlock(), node.getblock(blockhash, 0)) block.calc_sha256() self.block_heights[block.sha256] = block_height return block def build_block(self, parent, transactions=(), n_time=None): """Make a new block with an OP_1 coinbase output. Requires parent to have its height registered.""" parent.calc_sha256() block_height = self.block_heights[parent.sha256] + 1 block_time = (parent.nTime + 1) if n_time is None else n_time # the script in create_coinbase differs for BU and ABC # you need to let coinbase script be CScript([OP_TRUE]) block = create_block( parent.sha256, create_coinbase(block_height, scriptPubKey = CScript([OP_TRUE])), block_time) block.vtx.extend(transactions) make_conform_to_ctor(block) block.hashMerkleRoot = block.calc_merkle_root() block.solve() self.block_heights[block.sha256] = block_height return block def check_for_no_ban_on_rejected_tx(self, tx, reject_reason): """Check we are not disconnected when sending a txn that the node rejects.""" self.p2p.send_txs_and_test( [tx], self.nodes[0], success=False, reject_reason=reject_reason) def check_for_ban_on_rejected_tx(self, tx, reject_reason=None): """Check we are disconnected when sending a txn that the node rejects. (Can't actually get banned, since bitcoind won't ban local peers.)""" self.p2p.send_txs_and_test( [tx], self.nodes[0], success=False, expect_disconnect=True, reject_reason=reject_reason) def check_for_ban_on_rejected_block(self, block, reject_reason=None): """Check we are disconnected when sending a block that the node rejects. (Can't actually get banned, since bitcoind won't ban local peers.)""" self.p2p.send_blocks_and_test([block], self.nodes[0], success=False, reject_reason=reject_reason, expect_ban=True) def run_test(self): logging.info("Initializing test directory "+self.options.tmpdir) node = self.nodes[0] self.bootstrap_p2p() tip = self.get_best_block(node) logging.info("Create some blocks with OP_1 coinbase for spending.") blocks = [] for _ in range(10): tip = self.build_block(tip) blocks.append(tip) self.p2p.send_blocks_and_test(blocks, node, success=True) spendable_outputs = [block.vtx[0] for block in blocks] logging.info("Mature the blocks and get out of IBD.") node.generate(100) tip = self.get_best_block(node) logging.info( "Set up spending transactions to test and mine the funding transactions.") # Generate a key pair privkeybytes = b"xyzxyzhh" * 4 private_key = CECKey() private_key.set_secretbytes(privkeybytes) # get uncompressed public key serialization public_key = private_key.get_pubkey() # Create funding/spending transaction pair spend_from = spendable_outputs.pop() value = spend_from.vout[0].nValue # Reversed data data = bytes.fromhex('0123456789abcdef') rev_data = bytes(reversed(data)) # Lockscript: provide a bytestring that reverses to X script = CScript([OP_REVERSEBYTES, rev_data, OP_EQUAL]) # Fund transaction: REVERSEBYTES <reversed(x)> EQUAL tx_reversebytes_fund = create_tx_with_script(spend_from, 0, b'', value, script) tx_reversebytes_fund.rehash() # Spend transaction: <x> tx_reversebytes_spend = CTransaction() tx_reversebytes_spend.vout.append(CTxOut(value - 1000, CScript([b'x' * 100, OP_RETURN]))) tx_reversebytes_spend.vin.append(CTxIn(COutPoint(tx_reversebytes_fund.sha256, 0), b'')) tx_reversebytes_spend.vin[0].scriptSig = CScript([data]) tx_reversebytes_spend.rehash() # Mine funding transaction into block. Pre-upgrade output scripts can have # OP_REVERSEBYTES and still be fully valid, but they cannot spend it. tip = self.build_block(tip, [tx_reversebytes_fund]) self.p2p.send_blocks_and_test([tip], node) logging.info( "Submitting a new OP_REVERSEBYTES tx via net, and mining it in a block") # Send OP_REVERSEBYTES tx self.p2p.send_txs_and_test([tx_reversebytes_spend], node) # Verify OP_REVERSEBYTES tx is in mempool waitFor(10, lambda: set(node.getrawmempool()) == {tx_reversebytes_spend.hash}) # Mine OP_REVERSEBYTES tx into block tip = self.build_block(tip, [tx_reversebytes_spend]) self.p2p.send_blocks_and_test([tip], node) if __name__ == '__main__': OpReversebytesActivationTest().main()
{ "content_hash": "f1f637e6b83cf9d4e2fa3c9bbc62c3cd", "timestamp": "", "source": "github", "line_count": 196, "max_line_length": 104, "avg_line_length": 36.045918367346935, "alnum_prop": 0.6508138711960368, "repo_name": "BitcoinUnlimited/BitcoinUnlimited", "id": "f18c6c38c2949234b86173a9a4e2c1560695e3ec", "size": "7269", "binary": false, "copies": "1", "ref": "refs/heads/release", "path": "qa/rpc-tests/op_reversebytes.py", "mode": "33261", "license": "mit", "language": [ { "name": "Assembly", "bytes": "28173" }, { "name": "Batchfile", "bytes": "30639" }, { "name": "C", "bytes": "1026500" }, { "name": "C++", "bytes": "7400417" }, { "name": "CMake", "bytes": "4435" }, { "name": "Dockerfile", "bytes": "2888" }, { "name": "GDB", "bytes": "455" }, { "name": "HTML", "bytes": "20970" }, { "name": "Java", "bytes": "41235" }, { "name": "M4", "bytes": "261908" }, { "name": "Makefile", "bytes": "121033" }, { "name": "Objective-C++", "bytes": "6778" }, { "name": "Python", "bytes": "1623121" }, { "name": "QMake", "bytes": "2067" }, { "name": "Roff", "bytes": "3821" }, { "name": "Sage", "bytes": "30188" }, { "name": "Shell", "bytes": "86081" } ], "symlink_target": "" }
from AccessControl import ClassSecurityInfo from bika.lims.browser.fields import DurationField from bika.lims.config import PROJECTNAME from bika.lims.content.bikaschema import BikaSchema from bika.lims.interfaces import ISamplePartition, ISamplePrepWorkflow from bika.lims.workflow import doActionFor from bika.lims.workflow import skip from DateTime import DateTime from datetime import timedelta from Products.Archetypes.public import * from Products.ATContentTypes.lib.historyaware import HistoryAwareMixin from Products.ATContentTypes.utils import DT2dt, dt2DT from Products.CMFCore.utils import getToolByName from Products.CMFPlone.utils import safe_unicode from zope.interface import implements schema = BikaSchema.copy() + Schema(( ReferenceField('Container', allowed_types=('Container',), relationship='SamplePartitionContainer', required=1, multiValued=0, ), ReferenceField('Preservation', allowed_types=('Preservation',), relationship='SamplePartitionPreservation', required=0, multiValued=0, ), BooleanField('Separate', default=False ), ReferenceField('Analyses', allowed_types=('Analysis',), relationship='SamplePartitionAnalysis', required=0, multiValued=1, ), DateTimeField('DatePreserved', ), StringField('Preserver', searchable=True ), DurationField('RetentionPeriod', ), ComputedField('DisposalDate', expression = 'context.disposal_date()', widget = ComputedWidget( visible=False, ), ), ) ) schema['title'].required = False class SamplePartition(BaseContent, HistoryAwareMixin): implements(ISamplePartition, ISamplePrepWorkflow) security = ClassSecurityInfo() displayContentsTab = False schema = schema _at_rename_after_creation = True def _renameAfterCreation(self, check_auto_id=False): from bika.lims.idserver import renameAfterCreation renameAfterCreation(self) def _getCatalogTool(self): from bika.lims.catalog import getCatalog return getCatalog(self) def Title(self): """ Return the Sample ID as title """ return safe_unicode(self.getId()).encode('utf-8') security.declarePublic('getAnalyses') def getAnalyses(self): """ return list of titles of analyses linked to this sample Partition """ analyses = sorted(self.getBackReferences("AnalysisSamplePartition")) return analyses security.declarePublic('current_date') def current_date(self): """ return current date """ return DateTime() security.declarePublic('disposal_date') def disposal_date(self): """ return disposal date """ DateSampled = self.getDateSampled() # fallback to sampletype retention period st_retention = self.aq_parent.getSampleType().getRetentionPeriod() # but prefer retention period from preservation pres = self.getPreservation() pres_retention = pres and pres.getRetentionPeriod() or None rp = pres_retention and pres_retention or None rp = rp or st_retention td = timedelta( days='days' in rp and int(rp['days']) or 0, hours='hours' in rp and int(rp['hours']) or 0, minutes='minutes' in rp and int(rp['minutes']) or 0) dis_date = DateSampled and dt2DT(DT2dt(DateSampled) + td) or None return dis_date def workflow_script_preserve(self): workflow = getToolByName(self, 'portal_workflow') sample = self.aq_parent # Transition our analyses analyses = self.getBackReferences('AnalysisSamplePartition') if analyses: for analysis in analyses: doActionFor(analysis, "preserve") # if all our siblings are now up to date, promote sample and ARs. parts = sample.objectValues("SamplePartition") if parts: lower_states = ['to_be_sampled', 'to_be_preserved', ] escalate = True for part in parts: if workflow.getInfoFor(part, 'review_state') in lower_states: escalate = False if escalate: doActionFor(sample, "preserve") for ar in sample.getAnalysisRequests(): doActionFor(ar, "preserve") def workflow_transition_expire(self): self.setDateExpired(DateTime()) self.reindexObject(idxs=["review_state", "getDateExpired", ]) def workflow_script_sample(self): if skip(self, "sample"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') # Transition our analyses analyses = self.getBackReferences('AnalysisSamplePartition') for analysis in analyses: doActionFor(analysis, "sample") # if all our siblings are now up to date, promote sample and ARs. parts = sample.objectValues("SamplePartition") if parts: lower_states = ['to_be_sampled', ] escalate = True for part in parts: pstate = workflow.getInfoFor(part, 'review_state') if pstate in lower_states: escalate = False if escalate: doActionFor(sample, "sample") for ar in sample.getAnalysisRequests(): doActionFor(ar, "sample") def workflow_script_to_be_preserved(self): if skip(self, "to_be_preserved"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') # Transition our analyses analyses = self.getBackReferences('AnalysisSamplePartition') for analysis in analyses: doActionFor(analysis, "to_be_preserved") # if all our siblings are now up to date, promote sample and ARs. parts = sample.objectValues("SamplePartition") if parts: lower_states = ['to_be_sampled', 'to_be_preserved', ] escalate = True for part in parts: if workflow.getInfoFor(part, 'review_state') in lower_states: escalate = False if escalate: doActionFor(sample, "to_be_preserved") for ar in sample.getAnalysisRequests(): doActionFor(ar, "to_be_preserved") def workflow_script_sample_due(self): if skip(self, "sample_due"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') # Transition our analyses analyses = self.getBackReferences('AnalysisSamplePartition') for analysis in analyses: doActionFor(analysis, "sample_due") # if all our siblings are now up to date, promote sample and ARs. parts = sample.objectValues("SamplePartition") if parts: lower_states = ['to_be_preserved', ] escalate = True for part in parts: pstate = workflow.getInfoFor(part, 'review_state') if pstate in lower_states: escalate = False if escalate: doActionFor(sample, "sample_due") for ar in sample.getAnalysisRequests(): doActionFor(ar, "sample_due") def workflow_script_receive(self): if skip(self, "receive"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') sample_state = workflow.getInfoFor(sample, 'review_state') self.setDateReceived(DateTime()) self.reindexObject(idxs=["getDateReceived", ]) # Transition our analyses analyses = self.getBackReferences('AnalysisSamplePartition') for analysis in analyses: doActionFor(analysis, "receive") # if all sibling partitions are received, promote sample if not skip(sample, "receive", peek=True): due = [sp for sp in sample.objectValues("SamplePartition") if workflow.getInfoFor(sp, 'review_state') == 'sample_due'] if sample_state == 'sample_due' and not due: doActionFor(sample, 'receive') def workflow_script_reinstate(self): if skip(self, "reinstate"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') self.reindexObject(idxs=["cancellation_state", ]) sample_c_state = workflow.getInfoFor(sample, 'cancellation_state') # if all sibling partitions are active, activate sample if not skip(sample, "reinstate", peek=True): cancelled = [sp for sp in sample.objectValues("SamplePartition") if workflow.getInfoFor(sp, 'cancellation_state') == 'cancelled'] if sample_c_state == 'cancelled' and not cancelled: workflow.doActionFor(sample, 'reinstate') def workflow_script_cancel(self): if skip(self, "cancel"): return sample = self.aq_parent workflow = getToolByName(self, 'portal_workflow') self.reindexObject(idxs=["cancellation_state", ]) sample_c_state = workflow.getInfoFor(sample, 'cancellation_state') # if all sibling partitions are cancelled, cancel sample if not skip(sample, "cancel", peek=True): active = [sp for sp in sample.objectValues("SamplePartition") if workflow.getInfoFor(sp, 'cancellation_state') == 'active'] if sample_c_state == 'active' and not active: workflow.doActionFor(sample, 'cancel') registerType(SamplePartition, PROJECTNAME)
{ "content_hash": "d078a3873d8269f163433b18b5ef9740", "timestamp": "", "source": "github", "line_count": 256, "max_line_length": 89, "avg_line_length": 39.26171875, "alnum_prop": 0.6078997114714954, "repo_name": "hocinebendou/bika.gsoc", "id": "753438def7c0e38259d7b500f28c2bae6b74e8b7", "size": "10051", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "bika/lims/content/samplepartition.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "403" }, { "name": "COBOL", "bytes": "5987" }, { "name": "CSS", "bytes": "29758" }, { "name": "JavaScript", "bytes": "411425" }, { "name": "Python", "bytes": "4330980" }, { "name": "RobotFramework", "bytes": "239735" }, { "name": "Shell", "bytes": "11201" } ], "symlink_target": "" }
from multiprocessing import Pool from sqlalchemy import desc from base import Base from schema import Session, Feed, Post from datetime import datetime as dt from datetime import timedelta from config import max_post_age from parser import parse_one class Index(Base): def get(self): posts = Session.query(Post).filter_by(read=False).order_by( desc(Post.published)) return self.render('index.html', posts=posts) class FeedDelete(Base): # TODO: rewrite to post or better: delete (if that is possible?) def get(self, id): # sqlite won't cascade-delete the posts Session.query(Post).filter_by(feed_id=id).delete() Session.query(Feed).filter_by(id=id).delete() Session.commit() Session.close() return self.redirect('/') class FeedEdit(Base): def get(self, id): feed = Session.query(Feed).filter_by(id=id).one() return self.render('feed_edit.html', feed=feed) def post(self, id): feed = Session.query(Feed).filter_by(id=id).one() feed.title = self.get_argument('name') feed.url = self.get_argument('url') Session.commit() Session.close() return self.redirect('/') class FeedAdd(Base): def get(self): return self.render('feed_add.html') def post(self): name = self.get_argument('name') url = self.get_argument('url') feed = Feed() feed.title = name feed.url = url Session.add(feed) Session.commit() return self.render('index.html') class MarkAsRead(Base): def get(self): for post in Session.query(Post).filter_by(read=False): post.read = True Session.commit() Session.close() return self.redirect('/') class Refresh(Base): def get(self): Session.query(Post).filter( Post.updated <= dt.now() - timedelta(days=max_post_age)).delete() feeds = Session.query(Feed.id).all() Session.close() p = Pool(10) p.map_async(parse_one, (f.id for f in feeds)) p.close() p.join() self.redirect('/') return
{ "content_hash": "66fed6dbeaabb1ea5d57f69882fca75f", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 77, "avg_line_length": 27.518987341772153, "alnum_prop": 0.6039558417663293, "repo_name": "mfussenegger/Huluobo", "id": "46fe11c91ee460199d9ab8267528064e2ed5c650", "size": "2221", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "handler.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "120340" }, { "name": "Python", "bytes": "11540" } ], "symlink_target": "" }
from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.osconfig.v1", manifest={ "GetOSPolicyAssignmentReportRequest", "ListOSPolicyAssignmentReportsRequest", "ListOSPolicyAssignmentReportsResponse", "OSPolicyAssignmentReport", }, ) class GetOSPolicyAssignmentReportRequest(proto.Message): r"""Get a report of the OS policy assignment for a VM instance. Attributes: name (str): Required. API resource name for OS policy assignment report. Format: ``/projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/report`` For ``{project}``, either ``project-number`` or ``project-id`` can be provided. For ``{instance_id}``, either Compute Engine ``instance-id`` or ``instance-name`` can be provided. For ``{assignment_id}``, the OSPolicyAssignment id must be provided. """ name: str = proto.Field( proto.STRING, number=1, ) class ListOSPolicyAssignmentReportsRequest(proto.Message): r"""List the OS policy assignment reports for VM instances. Attributes: parent (str): Required. The parent resource name. Format: ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/reports`` For ``{project}``, either ``project-number`` or ``project-id`` can be provided. For ``{instance}``, either ``instance-name``, ``instance-id``, or ``-`` can be provided. If '-' is provided, the response will include OSPolicyAssignmentReports for all instances in the project/location. For ``{assignment}``, either ``assignment-id`` or ``-`` can be provided. If '-' is provided, the response will include OSPolicyAssignmentReports for all OSPolicyAssignments in the project/location. Either {instance} or {assignment} must be ``-``. For example: ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/-/reports`` returns all reports for the instance ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/{assignment-id}/reports`` returns all the reports for the given assignment across all instances. ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/-/reports`` returns all the reports for all assignments across all instances. page_size (int): The maximum number of results to return. filter (str): If provided, this field specifies the criteria that must be met by the ``OSPolicyAssignmentReport`` API resource that is included in the response. page_token (str): A pagination token returned from a previous call to the ``ListOSPolicyAssignmentReports`` method that indicates where this listing should continue from. """ parent: str = proto.Field( proto.STRING, number=1, ) page_size: int = proto.Field( proto.INT32, number=2, ) filter: str = proto.Field( proto.STRING, number=3, ) page_token: str = proto.Field( proto.STRING, number=4, ) class ListOSPolicyAssignmentReportsResponse(proto.Message): r"""A response message for listing OS Policy assignment reports including the page of results and page token. Attributes: os_policy_assignment_reports (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignmentReport]): List of OS policy assignment reports. next_page_token (str): The pagination token to retrieve the next page of OS policy assignment report objects. """ @property def raw_page(self): return self os_policy_assignment_reports: MutableSequence[ "OSPolicyAssignmentReport" ] = proto.RepeatedField( proto.MESSAGE, number=1, message="OSPolicyAssignmentReport", ) next_page_token: str = proto.Field( proto.STRING, number=2, ) class OSPolicyAssignmentReport(proto.Message): r"""A report of the OS policy assignment status for a given instance. Attributes: name (str): The ``OSPolicyAssignmentReport`` API resource name. Format: ``projects/{project_number}/locations/{location}/instances/{instance_id}/osPolicyAssignments/{os_policy_assignment_id}/report`` instance (str): The Compute Engine VM instance name. os_policy_assignment (str): Reference to the ``OSPolicyAssignment`` API resource that the ``OSPolicy`` belongs to. Format: ``projects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id@revision_id}`` os_policy_compliances (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance]): Compliance data for each ``OSPolicy`` that is applied to the VM. update_time (google.protobuf.timestamp_pb2.Timestamp): Timestamp for when the report was last generated. last_run_id (str): Unique identifier of the last attempted run to apply the OS policies associated with this assignment on the VM. This ID is logged by the OS Config agent while applying the OS policies associated with this assignment on the VM. NOTE: If the service is unable to successfully connect to the agent for this run, then this id will not be available in the agent logs. """ class OSPolicyCompliance(proto.Message): r"""Compliance data for an OS policy Attributes: os_policy_id (str): The OS policy id compliance_state (google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.ComplianceState): The compliance state of the OS policy. compliance_state_reason (str): The reason for the OS policy to be in an unknown compliance state. This field is always populated when ``compliance_state`` is ``UNKNOWN``. If populated, the field can contain one of the following values: - ``vm-not-running``: The VM was not running. - ``os-policies-not-supported-by-agent``: The version of the OS Config agent running on the VM does not support running OS policies. - ``no-agent-detected``: The OS Config agent is not detected for the VM. - ``resource-execution-errors``: The OS Config agent encountered errors while executing one or more resources in the policy. See ``os_policy_resource_compliances`` for details. - ``task-timeout``: The task sent to the agent to apply the policy timed out. - ``unexpected-agent-state``: The OS Config agent did not report the final status of the task that attempted to apply the policy. Instead, the agent unexpectedly started working on a different task. This mostly happens when the agent or VM unexpectedly restarts while applying OS policies. - ``internal-service-errors``: Internal service errors were encountered while attempting to apply the policy. os_policy_resource_compliances (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance]): Compliance data for each resource within the policy that is applied to the VM. """ class ComplianceState(proto.Enum): r"""Possible compliance states for an os policy.""" UNKNOWN = 0 COMPLIANT = 1 NON_COMPLIANT = 2 class OSPolicyResourceCompliance(proto.Message): r"""Compliance data for an OS policy resource. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: os_policy_resource_id (str): The ID of the OS policy resource. config_steps (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep]): Ordered list of configuration completed by the agent for the OS policy resource. compliance_state (google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ComplianceState): The compliance state of the resource. compliance_state_reason (str): A reason for the resource to be in the given compliance state. This field is always populated when ``compliance_state`` is ``UNKNOWN``. The following values are supported when ``compliance_state == UNKNOWN`` - ``execution-errors``: Errors were encountered by the agent while executing the resource and the compliance state couldn't be determined. - ``execution-skipped-by-agent``: Resource execution was skipped by the agent because errors were encountered while executing prior resources in the OS policy. - ``os-policy-execution-attempt-failed``: The execution of the OS policy containing this resource failed and the compliance state couldn't be determined. exec_resource_output (google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ExecResourceOutput): ExecResource specific output. This field is a member of `oneof`_ ``output``. """ class ComplianceState(proto.Enum): r"""Possible compliance states for a resource.""" UNKNOWN = 0 COMPLIANT = 1 NON_COMPLIANT = 2 class OSPolicyResourceConfigStep(proto.Message): r"""Step performed by the OS Config agent for configuring an ``OSPolicy`` resource to its desired state. Attributes: type_ (google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep.Type): Configuration step type. error_message (str): An error message recorded during the execution of this step. Only populated if errors were encountered during this step execution. """ class Type(proto.Enum): r"""Supported configuration step types""" TYPE_UNSPECIFIED = 0 VALIDATION = 1 DESIRED_STATE_CHECK = 2 DESIRED_STATE_ENFORCEMENT = 3 DESIRED_STATE_CHECK_POST_ENFORCEMENT = 4 type_: "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep.Type" = proto.Field( proto.ENUM, number=1, enum="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep.Type", ) error_message: str = proto.Field( proto.STRING, number=2, ) class ExecResourceOutput(proto.Message): r"""ExecResource specific output. Attributes: enforcement_output (bytes): Output from enforcement phase output file (if run). Output size is limited to 100K bytes. """ enforcement_output: bytes = proto.Field( proto.BYTES, number=2, ) os_policy_resource_id: str = proto.Field( proto.STRING, number=1, ) config_steps: MutableSequence[ "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep" ] = proto.RepeatedField( proto.MESSAGE, number=2, message="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep", ) compliance_state: "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ComplianceState" = proto.Field( proto.ENUM, number=3, enum="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ComplianceState", ) compliance_state_reason: str = proto.Field( proto.STRING, number=4, ) exec_resource_output: "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ExecResourceOutput" = proto.Field( proto.MESSAGE, number=5, oneof="output", message="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ExecResourceOutput", ) os_policy_id: str = proto.Field( proto.STRING, number=1, ) compliance_state: "OSPolicyAssignmentReport.OSPolicyCompliance.ComplianceState" = proto.Field( proto.ENUM, number=2, enum="OSPolicyAssignmentReport.OSPolicyCompliance.ComplianceState", ) compliance_state_reason: str = proto.Field( proto.STRING, number=3, ) os_policy_resource_compliances: MutableSequence[ "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance" ] = proto.RepeatedField( proto.MESSAGE, number=4, message="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance", ) name: str = proto.Field( proto.STRING, number=1, ) instance: str = proto.Field( proto.STRING, number=2, ) os_policy_assignment: str = proto.Field( proto.STRING, number=3, ) os_policy_compliances: MutableSequence[OSPolicyCompliance] = proto.RepeatedField( proto.MESSAGE, number=4, message=OSPolicyCompliance, ) update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) last_run_id: str = proto.Field( proto.STRING, number=6, ) __all__ = tuple(sorted(__protobuf__.manifest))
{ "content_hash": "336c8f882ac8c31c7c2470edce75cce2", "timestamp": "", "source": "github", "line_count": 376, "max_line_length": 177, "avg_line_length": 41.672872340425535, "alnum_prop": 0.5980598634245964, "repo_name": "googleapis/python-os-config", "id": "994582517e701083d933a5dd13fb8ea589cd065e", "size": "16269", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "google/cloud/osconfig_v1/types/os_policy_assignment_reports.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "2050" }, { "name": "Python", "bytes": "1810720" }, { "name": "Shell", "bytes": "30669" } ], "symlink_target": "" }
from test_framework.mininode import wait_until from test_framework.test_framework import BitcoinTestFramework from test_framework.util import (assert_equal, assert_raises_jsonrpc, connect_nodes_bi, start_node, stop_node, ) class NodeHandlingTest(BitcoinTestFramework): def __init__(self): super().__init__() self.num_nodes = 2 self.setup_clean_chain = False def setup_network(self): self.nodes = self.setup_nodes() connect_nodes_bi(self.nodes, 0, 1) def run_test(self): ########################### # setban/listbanned tests # ########################### assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point self.nodes[1].setban("127.0.0.1", "add") assert wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10) assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point assert_equal(len(self.nodes[1].listbanned()), 1) self.nodes[1].clearbanned() assert_equal(len(self.nodes[1].listbanned()), 0) self.nodes[1].setban("127.0.0.0/24", "add") assert_equal(len(self.nodes[1].listbanned()), 1) # This will throw an exception because 127.0.0.1 is within range 127.0.0.0/24 assert_raises_jsonrpc(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add") # This will throw an exception because 127.0.0.1/42 is not a real subnet assert_raises_jsonrpc(-30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add") assert_equal(len(self.nodes[1].listbanned()), 1) # still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24 # This will throw an exception because 127.0.0.1 was not added above assert_raises_jsonrpc(-30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove") assert_equal(len(self.nodes[1].listbanned()), 1) self.nodes[1].setban("127.0.0.0/24", "remove") assert_equal(len(self.nodes[1].listbanned()), 0) self.nodes[1].clearbanned() assert_equal(len(self.nodes[1].listbanned()), 0) # test persisted banlist self.nodes[1].setban("127.0.0.0/32", "add") self.nodes[1].setban("127.0.0.0/24", "add") self.nodes[1].setban("192.168.0.1", "add", 1) # ban for 1 seconds self.nodes[1].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) # ban for 1000 seconds listBeforeShutdown = self.nodes[1].listbanned() assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address']) assert wait_until(lambda: len(self.nodes[1].listbanned()) == 3, timeout=10) stop_node(self.nodes[1], 1) self.nodes[1] = start_node(1, self.options.tmpdir) listAfterShutdown = self.nodes[1].listbanned() assert_equal("127.0.0.0/24", listAfterShutdown[0]['address']) assert_equal("127.0.0.0/32", listAfterShutdown[1]['address']) assert_equal("/19" in listAfterShutdown[2]['address'], True) # Clear ban lists self.nodes[1].clearbanned() connect_nodes_bi(self.nodes, 0, 1) ########################### # RPC disconnectnode test # ########################### address1 = self.nodes[0].getpeerinfo()[0]['addr'] self.nodes[0].disconnectnode(address=address1) assert wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10) assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1] connect_nodes_bi(self.nodes, 0, 1) # reconnect the node assert [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1] if __name__ == '__main__': NodeHandlingTest().main()
{ "content_hash": "b70d6daba11aaf4aee00ea427b5df98d", "timestamp": "", "source": "github", "line_count": 80, "max_line_length": 138, "avg_line_length": 49.925, "alnum_prop": 0.585378067100651, "repo_name": "psionin/smartcoin", "id": "61be27ae2b67a7593d8f8c9ca914e8e46fba24ee", "size": "4235", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "qa/rpc-tests/nodehandling.py", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "1102755" }, { "name": "C++", "bytes": "3857098" }, { "name": "CSS", "bytes": "1127" }, { "name": "HTML", "bytes": "50621" }, { "name": "Java", "bytes": "2100" }, { "name": "M4", "bytes": "135902" }, { "name": "Makefile", "bytes": "85486" }, { "name": "Objective-C", "bytes": "3275" }, { "name": "Objective-C++", "bytes": "7240" }, { "name": "Protocol Buffer", "bytes": "2308" }, { "name": "Python", "bytes": "415970" }, { "name": "QMake", "bytes": "2019" }, { "name": "Roff", "bytes": "19837" }, { "name": "Shell", "bytes": "38142" } ], "symlink_target": "" }
from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class GeocodeByAddress(Choreography): def __init__(self, temboo_session): """ Create a new instance of the GeocodeByAddress Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(GeocodeByAddress, self).__init__(temboo_session, '/Library/Google/Geocoding/GeocodeByAddress') def new_input_set(self): return GeocodeByAddressInputSet() def _make_result_set(self, result, path): return GeocodeByAddressResultSet(result, path) def _make_execution(self, session, exec_id, path): return GeocodeByAddressChoreographyExecution(session, exec_id, path) class GeocodeByAddressInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the GeocodeByAddress Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_Address(self, value): """ Set the value of the Address input for this Choreo. ((required, string) The address that you want to geocode.) """ super(GeocodeByAddressInputSet, self)._set_input('Address', value) def set_Bounds(self, value): """ Set the value of the Bounds input for this Choreo. ((optional, string) The bounding box of the viewport within which to bias geocode results more prominently.) """ super(GeocodeByAddressInputSet, self)._set_input('Bounds', value) def set_Language(self, value): """ Set the value of the Language input for this Choreo. ((optional, string) The language in which to return results. Defaults to 'en' (English).) """ super(GeocodeByAddressInputSet, self)._set_input('Language', value) def set_Region(self, value): """ Set the value of the Region input for this Choreo. ((optional, string) The region code, specified as a ccTLD ("top-level domain") two-character value. Defaults to 'us' (United States).) """ super(GeocodeByAddressInputSet, self)._set_input('Region', value) def set_ResponseFormat(self, value): """ Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are "xml" (the default) and "json".) """ super(GeocodeByAddressInputSet, self)._set_input('ResponseFormat', value) def set_Sensor(self, value): """ Set the value of the Sensor input for this Choreo. ((optional, boolean) Indicates whether or not the geocoding request is from a device with a location sensor. Value must be either 1 or 0. Defaults to 0 (false).) """ super(GeocodeByAddressInputSet, self)._set_input('Sensor', value) class GeocodeByAddressResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the GeocodeByAddress Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_Latitude(self): """ Retrieve the value for the "Latitude" output from this Choreo execution. ((decimal) The latitude coordinate associated with the address provided.) """ return self._output.get('Latitude', None) def get_Response(self): """ Retrieve the value for the "Response" output from this Choreo execution. (The response from Google.) """ return self._output.get('Response', None) def get_Longitude(self): """ Retrieve the value for the "Longitude" output from this Choreo execution. ((decimal) The longitude coordinate associated with the address provided.) """ return self._output.get('Longitude', None) class GeocodeByAddressChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return GeocodeByAddressResultSet(response, path)
{ "content_hash": "b40f376e8d6ab286a2324ffe2b838af9", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 220, "avg_line_length": 46.043956043956044, "alnum_prop": 0.6890214797136038, "repo_name": "jordanemedlock/psychtruths", "id": "342ee00208f7b78d7345c38379a74f641fc6a097", "size": "5074", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "temboo/core/Library/Google/Geocoding/GeocodeByAddress.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "18544" }, { "name": "HTML", "bytes": "34650" }, { "name": "JavaScript", "bytes": "423" }, { "name": "PHP", "bytes": "1097" }, { "name": "Python", "bytes": "23444578" } ], "symlink_target": "" }
import ast from .common import * DEBUG = False ALLOWED_HOSTS = ['www.ava-project.com', 'ava-project.com', '163.5.84.224'] # email configuration EMAIL_HOST = env.get('EMAIL_HOST', '') EMAIL_PORT = int(env.get('EMAIL_PORT', 0)) EMAIL_HOST_USER = env.get('EMAIL_HOST_USER', '') EMAIL_HOST_PASSWORD = env.get('EMAIL_HOST_PASSWORD', '') EMAIL_USE_TLS = ast.literal_eval(env.get('EMAIL_USE_TLS', True)) DEFAULT_FROM_EMAIL = 'contact@ava-project.com' # config sentry logging SENTRY_KEY = env.get('SENTRY_KEY', None) if SENTRY_KEY: INSTALLED_APPS.append('raven.contrib.django.raven_compat') RAVEN_CONFIG = {'dsn': SENTRY_KEY} # compress file COMPRESS_ENABLED = True
{ "content_hash": "b07de000acf4ea3ac0fa82f034db464d", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 74, "avg_line_length": 28, "alnum_prop": 0.6979166666666666, "repo_name": "ava-project/ava-website", "id": "4e3b14ef48b36010ff21d4077ac9bd499db8c776", "size": "672", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "website/core/settings/prod.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "176820" }, { "name": "HTML", "bytes": "19026" }, { "name": "Makefile", "bytes": "668" }, { "name": "Python", "bytes": "87142" }, { "name": "Shell", "bytes": "612" } ], "symlink_target": "" }
from lib.mmonit import MmonitBaseAction class MmonitListStatusHost(MmonitBaseAction): def run(self, host_id=None, hostgroupid=None, status=None, platform=None, machine=None, led=None): self.login() data = {} # Way too explicit for my taste but I guess its easier to understand if host_id is not None: data['host_id'] = host_id if hostgroupid is not None: data['hostgroupid'] = hostgroupid if status is not None: data['status'] = status if platform is not None: data['platform'] = platform if machine is not None: data['machine'] = machine if led is not None: data['led'] = led req = self.session.get("{}/admin/hosts/update".format(self.url), params=data) try: return req.json() except Exception: raise finally: self.logout()
{ "content_hash": "dfb64f5482aee6f300cd49ae4b40840b", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 91, "avg_line_length": 30.903225806451612, "alnum_prop": 0.5636743215031316, "repo_name": "lmEshoo/st2contrib", "id": "a615df37364ac0fbd8a29a0e45172a3bd7354bb8", "size": "958", "binary": false, "copies": "12", "ref": "refs/heads/master", "path": "packs/mmonit/actions/list_status_hosts.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Groovy", "bytes": "8530" }, { "name": "Makefile", "bytes": "2262" }, { "name": "Python", "bytes": "444890" }, { "name": "Shell", "bytes": "3635" } ], "symlink_target": "" }
import os import os.path import sys def main(): time_name_pairs = [(os.stat(name).st_mtime, name) for name in os.listdir('.') if name[:4] == 'IMG_' and name[-4:] == '.JPG'] time_name_pairs.sort() names = [] for i, (mtime, oldname) in enumerate(time_name_pairs, start=1): newname = 'IMG_{:04d}.JPG'.format(i) tmpname = 'tmp_' + newname if os.path.exists(tmpname): sys.exit('"{}" already exists!'.format(tmpname)) names.append((oldname, tmpname, newname)) for oldname, tmpname, newname in names: print('Renaming', oldname, 'to', tmpname) os.rename(oldname, tmpname) for oldname, tmpname, newname in names: if os.path.exists(newname): sys.exit('"{}" already exists!'.format(newname)) print('Renaming', tmpname, 'to', newname) os.rename(tmpname, newname) if __name__ == '__main__': main()
{ "content_hash": "72e60626e2180e8715c5e60bfb0a6d19", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 78, "avg_line_length": 28.379310344827587, "alnum_prop": 0.6500607533414338, "repo_name": "nightjuggler/pig", "id": "34a885fd5c4c664b0eb7e713dcb670d1dd910411", "size": "823", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sortByMTime.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "90118" }, { "name": "JavaScript", "bytes": "48753" }, { "name": "Python", "bytes": "35057" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('domain', '0010_many_to_many_for_conditions'), ] operations = [ migrations.AlterField( model_name='attributeentity', name='conditions', field=models.ManyToManyField(blank=True, to='conditions.Condition'), ), ]
{ "content_hash": "d829fa2661935c31ca032535e67da6b8", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 80, "avg_line_length": 23.833333333333332, "alnum_prop": 0.6223776223776224, "repo_name": "rdmorganiser/rdmo", "id": "4d6aa5861bdd95ca4fa1ed8993924c2fd8565cf0", "size": "499", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "rdmo/domain/migrations/0011_meta.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "426256" }, { "name": "JavaScript", "bytes": "110821" }, { "name": "Python", "bytes": "1265092" }, { "name": "SCSS", "bytes": "20373" } ], "symlink_target": "" }
try: import unittest2 as unittest except ImportError: import unittest # noqa from uuid import uuid4, UUID import random from datetime import datetime, date, time from decimal import Decimal from operator import itemgetter from cassandra.cqlengine import columns from cassandra.cqlengine import CQLEngineException from cassandra.cqlengine.management import sync_table from cassandra.cqlengine.management import drop_table from cassandra.cqlengine.models import Model from cassandra.query import SimpleStatement from cassandra.util import Date, Time from cassandra.cqltypes import Int32Type from cassandra.cqlengine.statements import SelectStatement, DeleteStatement, WhereClause from cassandra.cqlengine.operators import EqualsOperator from tests.integration import PROTOCOL_VERSION from tests.integration.cqlengine.base import BaseCassEngTestCase from tests.integration.cqlengine import DEFAULT_KEYSPACE class TestModel(Model): id = columns.UUID(primary_key=True, default=lambda: uuid4()) count = columns.Integer() text = columns.Text(required=False) a_bool = columns.Boolean(default=False) class TestModelSave(Model): partition = columns.UUID(primary_key=True, default=uuid4) cluster = columns.Integer(primary_key=True) count = columns.Integer(required=False) text = columns.Text(required=False, index=True) text_set = columns.Set(columns.Text, required=False) text_list = columns.List(columns.Text, required=False) text_map = columns.Map(columns.Text, columns.Text, required=False) class TestModelIO(BaseCassEngTestCase): @classmethod def setUpClass(cls): super(TestModelIO, cls).setUpClass() sync_table(TestModel) @classmethod def tearDownClass(cls): super(TestModelIO, cls).tearDownClass() drop_table(TestModel) def test_model_save_and_load(self): """ Tests that models can be saved and retrieved """ tm = TestModel.create(count=8, text='123456789') self.assertIsInstance(tm, TestModel) tm2 = TestModel.objects(id=tm.pk).first() self.assertIsInstance(tm2, TestModel) for cname in tm._columns.keys(): self.assertEqual(getattr(tm, cname), getattr(tm2, cname)) def test_model_read_as_dict(self): """ Tests that columns of an instance can be read as a dict. """ tm = TestModel.create(count=8, text='123456789', a_bool=True) column_dict = { 'id': tm.id, 'count': tm.count, 'text': tm.text, 'a_bool': tm.a_bool, } self.assertEqual(sorted(tm.keys()), sorted(column_dict.keys())) self.assertSetEqual(set(tm.values()), set(column_dict.values())) self.assertEqual( sorted(tm.items(), key=itemgetter(0)), sorted(column_dict.items(), key=itemgetter(0))) self.assertEqual(len(tm), len(column_dict)) for column_id in column_dict.keys(): self.assertEqual(tm[column_id], column_dict[column_id]) tm['count'] = 6 self.assertEqual(tm.count, 6) def test_model_updating_works_properly(self): """ Tests that subsequent saves after initial model creation work """ tm = TestModel.objects.create(count=8, text='123456789') tm.count = 100 tm.a_bool = True tm.save() tm2 = TestModel.objects(id=tm.pk).first() self.assertEqual(tm.count, tm2.count) self.assertEqual(tm.a_bool, tm2.a_bool) def test_model_deleting_works_properly(self): """ Tests that an instance's delete method deletes the instance """ tm = TestModel.create(count=8, text='123456789') tm.delete() tm2 = TestModel.objects(id=tm.pk).first() self.assertIsNone(tm2) def test_column_deleting_works_properly(self): """ """ tm = TestModel.create(count=8, text='123456789') tm.text = None tm.save() tm2 = TestModel.objects(id=tm.pk).first() self.assertIsInstance(tm2, TestModel) self.assertTrue(tm2.text is None) self.assertTrue(tm2._values['text'].previous_value is None) def test_a_sensical_error_is_raised_if_you_try_to_create_a_table_twice(self): """ """ sync_table(TestModel) sync_table(TestModel) def test_can_insert_model_with_all_column_types(self): """ Test for inserting all column types into a Model test_can_insert_model_with_all_column_types tests that each cqlengine column type can be inserted into a Model. It first creates a Model that has each cqlengine column type. It then creates a Model instance where all the fields have corresponding data, which performs the insert into the Cassandra table. Finally, it verifies that each column read from the Model from Cassandra is the same as the input parameters. @since 2.6.0 @jira_ticket PYTHON-246 @expected_result The Model is inserted with each column type, and the resulting read yields proper data for each column. @test_category data_types:primitive """ class AllDatatypesModel(Model): id = columns.Integer(primary_key=True) a = columns.Ascii() b = columns.BigInt() c = columns.Blob() d = columns.Boolean() e = columns.DateTime() f = columns.Decimal() g = columns.Double() h = columns.Float() i = columns.Inet() j = columns.Integer() k = columns.Text() l = columns.TimeUUID() m = columns.UUID() n = columns.VarInt() sync_table(AllDatatypesModel) input = ['ascii', 2 ** 63 - 1, bytearray(b'hello world'), True, datetime.utcfromtimestamp(872835240), Decimal('12.3E+7'), 2.39, 3.4028234663852886e+38, '123.123.123.123', 2147483647, 'text', UUID('FE2B4360-28C6-11E2-81C1-0800200C9A66'), UUID('067e6162-3b6f-4ae2-a171-2470b63dff00'), int(str(2147483647) + '000')] AllDatatypesModel.create(id=0, a='ascii', b=2 ** 63 - 1, c=bytearray(b'hello world'), d=True, e=datetime.utcfromtimestamp(872835240), f=Decimal('12.3E+7'), g=2.39, h=3.4028234663852886e+38, i='123.123.123.123', j=2147483647, k='text', l=UUID('FE2B4360-28C6-11E2-81C1-0800200C9A66'), m=UUID('067e6162-3b6f-4ae2-a171-2470b63dff00'), n=int(str(2147483647) + '000')) self.assertEqual(1, AllDatatypesModel.objects.count()) output = AllDatatypesModel.objects().first() for i, i_char in enumerate(range(ord('a'), ord('a') + 14)): self.assertEqual(input[i], output[chr(i_char)]) def test_can_specify_none_instead_of_default(self): self.assertIsNotNone(TestModel.a_bool.column.default) # override default inst = TestModel.create(a_bool=None) self.assertIsNone(inst.a_bool) queried = TestModel.objects(id=inst.id).first() self.assertIsNone(queried.a_bool) # letting default be set inst = TestModel.create() self.assertEqual(inst.a_bool, TestModel.a_bool.column.default) queried = TestModel.objects(id=inst.id).first() self.assertEqual(queried.a_bool, TestModel.a_bool.column.default) def test_can_insert_model_with_all_protocol_v4_column_types(self): """ Test for inserting all protocol v4 column types into a Model test_can_insert_model_with_all_protocol_v4_column_types tests that each cqlengine protocol v4 column type can be inserted into a Model. It first creates a Model that has each cqlengine protocol v4 column type. It then creates a Model instance where all the fields have corresponding data, which performs the insert into the Cassandra table. Finally, it verifies that each column read from the Model from Cassandra is the same as the input parameters. @since 2.6.0 @jira_ticket PYTHON-245 @expected_result The Model is inserted with each protocol v4 column type, and the resulting read yields proper data for each column. @test_category data_types:primitive """ if PROTOCOL_VERSION < 4: raise unittest.SkipTest("Protocol v4 datatypes require native protocol 4+, currently using: {0}".format(PROTOCOL_VERSION)) class v4DatatypesModel(Model): id = columns.Integer(primary_key=True) a = columns.Date() b = columns.SmallInt() c = columns.Time() d = columns.TinyInt() sync_table(v4DatatypesModel) input = [Date(date(1970, 1, 1)), 32523, Time(time(16, 47, 25, 7)), 123] v4DatatypesModel.create(id=0, a=date(1970, 1, 1), b=32523, c=time(16, 47, 25, 7), d=123) self.assertEqual(1, v4DatatypesModel.objects.count()) output = v4DatatypesModel.objects().first() for i, i_char in enumerate(range(ord('a'), ord('a') + 3)): self.assertEqual(input[i], output[chr(i_char)]) def test_can_insert_double_and_float(self): """ Test for inserting single-precision and double-precision values into a Float and Double columns @since 2.6.0 @changed 3.0.0 removed deprecated Float(double_precision) parameter @jira_ticket PYTHON-246 @expected_result Each floating point column type is able to hold their respective precision values. @test_category data_types:primitive """ class FloatingPointModel(Model): id = columns.Integer(primary_key=True) f = columns.Float() d = columns.Double() sync_table(FloatingPointModel) FloatingPointModel.create(id=0, f=2.39) output = FloatingPointModel.objects().first() self.assertEqual(2.390000104904175, output.f) # float loses precision FloatingPointModel.create(id=0, f=3.4028234663852886e+38, d=2.39) output = FloatingPointModel.objects().first() self.assertEqual(3.4028234663852886e+38, output.f) self.assertEqual(2.39, output.d) # double retains precision FloatingPointModel.create(id=0, d=3.4028234663852886e+38) output = FloatingPointModel.objects().first() self.assertEqual(3.4028234663852886e+38, output.d) class TestMultiKeyModel(Model): partition = columns.Integer(primary_key=True) cluster = columns.Integer(primary_key=True) count = columns.Integer(required=False) text = columns.Text(required=False) class TestDeleting(BaseCassEngTestCase): @classmethod def setUpClass(cls): super(TestDeleting, cls).setUpClass() drop_table(TestMultiKeyModel) sync_table(TestMultiKeyModel) @classmethod def tearDownClass(cls): super(TestDeleting, cls).tearDownClass() drop_table(TestMultiKeyModel) def test_deleting_only_deletes_one_object(self): partition = random.randint(0, 1000) for i in range(5): TestMultiKeyModel.create(partition=partition, cluster=i, count=i, text=str(i)) self.assertTrue(TestMultiKeyModel.filter(partition=partition).count() == 5) TestMultiKeyModel.get(partition=partition, cluster=0).delete() self.assertTrue(TestMultiKeyModel.filter(partition=partition).count() == 4) TestMultiKeyModel.filter(partition=partition).delete() class TestUpdating(BaseCassEngTestCase): @classmethod def setUpClass(cls): super(TestUpdating, cls).setUpClass() drop_table(TestModelSave) drop_table(TestMultiKeyModel) sync_table(TestModelSave) sync_table(TestMultiKeyModel) @classmethod def tearDownClass(cls): super(TestUpdating, cls).tearDownClass() drop_table(TestMultiKeyModel) drop_table(TestModelSave) def setUp(self): super(TestUpdating, self).setUp() self.instance = TestMultiKeyModel.create( partition=random.randint(0, 1000), cluster=random.randint(0, 1000), count=0, text='happy' ) def test_vanilla_update(self): self.instance.count = 5 self.instance.save() check = TestMultiKeyModel.get(partition=self.instance.partition, cluster=self.instance.cluster) self.assertTrue(check.count == 5) self.assertTrue(check.text == 'happy') def test_deleting_only(self): self.instance.count = None self.instance.text = None self.instance.save() check = TestMultiKeyModel.get(partition=self.instance.partition, cluster=self.instance.cluster) self.assertTrue(check.count is None) self.assertTrue(check.text is None) def test_get_changed_columns(self): self.assertTrue(self.instance.get_changed_columns() == []) self.instance.count = 1 changes = self.instance.get_changed_columns() self.assertTrue(len(changes) == 1) self.assertTrue(changes == ['count']) self.instance.save() self.assertTrue(self.instance.get_changed_columns() == []) def test_previous_value_tracking_of_persisted_instance(self): # Check initial internal states. self.assertTrue(self.instance.get_changed_columns() == []) self.assertTrue(self.instance._values['count'].previous_value == 0) # Change value and check internal states. self.instance.count = 1 self.assertTrue(self.instance.get_changed_columns() == ['count']) self.assertTrue(self.instance._values['count'].previous_value == 0) # Internal states should be updated on save. self.instance.save() self.assertTrue(self.instance.get_changed_columns() == []) self.assertTrue(self.instance._values['count'].previous_value == 1) # Change value twice. self.instance.count = 2 self.assertTrue(self.instance.get_changed_columns() == ['count']) self.assertTrue(self.instance._values['count'].previous_value == 1) self.instance.count = 3 self.assertTrue(self.instance.get_changed_columns() == ['count']) self.assertTrue(self.instance._values['count'].previous_value == 1) # Internal states updated on save. self.instance.save() self.assertTrue(self.instance.get_changed_columns() == []) self.assertTrue(self.instance._values['count'].previous_value == 3) # Change value and reset it. self.instance.count = 2 self.assertTrue(self.instance.get_changed_columns() == ['count']) self.assertTrue(self.instance._values['count'].previous_value == 3) self.instance.count = 3 self.assertTrue(self.instance.get_changed_columns() == []) self.assertTrue(self.instance._values['count'].previous_value == 3) # Nothing to save: values in initial conditions. self.instance.save() self.assertTrue(self.instance.get_changed_columns() == []) self.assertTrue(self.instance._values['count'].previous_value == 3) # Change Multiple values self.instance.count = 4 self.instance.text = "changed" self.assertTrue(len(self.instance.get_changed_columns()) == 2) self.assertTrue('text' in self.instance.get_changed_columns()) self.assertTrue('count' in self.instance.get_changed_columns()) self.instance.save() self.assertTrue(self.instance.get_changed_columns() == []) # Reset Multiple Values self.instance.count = 5 self.instance.text = "changed" self.assertTrue(self.instance.get_changed_columns() == ['count']) self.instance.text = "changed2" self.assertTrue(len(self.instance.get_changed_columns()) == 2) self.assertTrue('text' in self.instance.get_changed_columns()) self.assertTrue('count' in self.instance.get_changed_columns()) self.instance.count = 4 self.instance.text = "changed" self.assertTrue(self.instance.get_changed_columns() == []) def test_previous_value_tracking_on_instantiation(self): self.instance = TestMultiKeyModel( partition=random.randint(0, 1000), cluster=random.randint(0, 1000), count=0, text='happy') # Columns of instances not persisted yet should be marked as changed. self.assertTrue(set(self.instance.get_changed_columns()) == set([ 'partition', 'cluster', 'count', 'text'])) self.assertTrue(self.instance._values['partition'].previous_value is None) self.assertTrue(self.instance._values['cluster'].previous_value is None) self.assertTrue(self.instance._values['count'].previous_value is None) self.assertTrue(self.instance._values['text'].previous_value is None) # Value changes doesn't affect internal states. self.instance.count = 1 self.assertTrue('count' in self.instance.get_changed_columns()) self.assertTrue(self.instance._values['count'].previous_value is None) self.instance.count = 2 self.assertTrue('count' in self.instance.get_changed_columns()) self.assertTrue(self.instance._values['count'].previous_value is None) # Value reset is properly tracked. self.instance.count = None self.assertTrue('count' not in self.instance.get_changed_columns()) self.assertTrue(self.instance._values['count'].previous_value is None) self.instance.save() self.assertTrue(self.instance.get_changed_columns() == []) self.assertTrue(self.instance._values['count'].previous_value is None) self.assertTrue(self.instance.count is None) def test_save_to_none(self): """ Test update of column value of None with save() function. Under specific scenarios calling save on a None value wouldn't update previous values. This issue only manifests with a new instantiation of the model, if existing model is modified and updated the issue will not occur. @since 3.0.0 @jira_ticket PYTHON-475 @expected_result column value should be updated to None @test_category object_mapper """ partition = uuid4() cluster = 1 text = 'set' text_list = ['set'] text_set = set(("set",)) text_map = {"set": 'set'} initial = TestModelSave(partition=partition, cluster=cluster, text=text, text_list=text_list, text_set=text_set, text_map=text_map) initial.save() current = TestModelSave.objects.get(partition=partition, cluster=cluster) self.assertEqual(current.text, text) self.assertEqual(current.text_list, text_list) self.assertEqual(current.text_set, text_set) self.assertEqual(current.text_map, text_map) next = TestModelSave(partition=partition, cluster=cluster, text=None, text_list=None, text_set=None, text_map=None) next.save() current = TestModelSave.objects.get(partition=partition, cluster=cluster) self.assertEqual(current.text, None) self.assertEqual(current.text_list, []) self.assertEqual(current.text_set, set()) self.assertEqual(current.text_map, {}) def test_none_filter_fails(): class NoneFilterModel(Model): pk = columns.Integer(primary_key=True) v = columns.Integer() sync_table(NoneFilterModel) try: NoneFilterModel.objects(pk=None) raise Exception("fail") except CQLEngineException as e: pass class TestCanUpdate(BaseCassEngTestCase): @classmethod def setUpClass(cls): super(TestCanUpdate, cls).setUpClass() drop_table(TestModel) sync_table(TestModel) @classmethod def tearDownClass(cls): super(TestCanUpdate, cls).tearDownClass() drop_table(TestModel) def test_success_case(self): tm = TestModel(count=8, text='123456789') # object hasn't been saved, # shouldn't be able to update self.assertTrue(not tm._is_persisted) self.assertTrue(not tm._can_update()) tm.save() # object has been saved, # should be able to update self.assertTrue(tm._is_persisted) self.assertTrue(tm._can_update()) tm.count = 200 # primary keys haven't changed, # should still be able to update self.assertTrue(tm._can_update()) tm.save() tm.id = uuid4() # primary keys have changed, # should not be able to update self.assertTrue(not tm._can_update()) class IndexDefinitionModel(Model): key = columns.UUID(primary_key=True) val = columns.Text(index=True) class TestIndexedColumnDefinition(BaseCassEngTestCase): def test_exception_isnt_raised_if_an_index_is_defined_more_than_once(self): sync_table(IndexDefinitionModel) sync_table(IndexDefinitionModel) class ReservedWordModel(Model): token = columns.Text(primary_key=True) insert = columns.Integer(index=True) class TestQueryQuoting(BaseCassEngTestCase): def test_reserved_cql_words_can_be_used_as_column_names(self): """ """ sync_table(ReservedWordModel) model1 = ReservedWordModel.create(token='1', insert=5) model2 = ReservedWordModel.filter(token='1') self.assertTrue(len(model2) == 1) self.assertTrue(model1.token == model2[0].token) self.assertTrue(model1.insert == model2[0].insert) class TestQueryModel(Model): test_id = columns.UUID(primary_key=True, default=uuid4) date = columns.Date(primary_key=True) description = columns.Text() class TestQuerying(BaseCassEngTestCase): @classmethod def setUpClass(cls): if PROTOCOL_VERSION < 4: return super(TestQuerying, cls).setUpClass() drop_table(TestQueryModel) sync_table(TestQueryModel) @classmethod def tearDownClass(cls): if PROTOCOL_VERSION < 4: return super(TestQuerying, cls).tearDownClass() drop_table(TestQueryModel) def setUp(self): if PROTOCOL_VERSION < 4: raise unittest.SkipTest("Date query tests require native protocol 4+, currently using: {0}".format(PROTOCOL_VERSION)) def test_query_with_date(self): uid = uuid4() day = date(2013, 11, 26) obj = TestQueryModel.create(test_id=uid, date=day, description=u'foo') self.assertEqual(obj.description, u'foo') inst = TestQueryModel.filter( TestQueryModel.test_id == uid, TestQueryModel.date == day).limit(1).first() self.assertTrue(inst.test_id == uid) self.assertTrue(inst.date == day) class BasicModel(Model): __table_name__ = 'basic_model_routing' k = columns.Integer(primary_key=True) v = columns.Integer() class BasicModelMulti(Model): __table_name__ = 'basic_model_routing_multi' k = columns.Integer(partition_key=True) v = columns.Integer(partition_key=True) class ComplexModelRouting(Model): __table_name__ = 'complex_model_routing' partition = columns.UUID(partition_key=True, default=uuid4) cluster = columns.Integer(partition_key=True) count = columns.Integer() text = columns.Text(partition_key=True) float = columns.Float(partition_key=True) text_2 = columns.Text() class TestModelRoutingKeys(BaseCassEngTestCase): @classmethod def setUpClass(cls): super(TestModelRoutingKeys, cls).setUpClass() sync_table(BasicModel) sync_table(BasicModelMulti) sync_table(ComplexModelRouting) @classmethod def tearDownClass(cls): super(TestModelRoutingKeys, cls).tearDownClass() drop_table(BasicModel) drop_table(BasicModelMulti) drop_table(ComplexModelRouting) def test_routing_key_generation_basic(self): """ Compares the routing key generated by simple partition key using the model with the one generated by the equivalent bound statement @since 3.2 @jira_ticket PYTHON-535 @expected_result they should match @test_category object_mapper """ prepared = self.session.prepare( """ INSERT INTO {0}.basic_model_routing (k, v) VALUES (?, ?) """.format(DEFAULT_KEYSPACE)) bound = prepared.bind((1, 2)) mrk = BasicModel._routing_key_from_values([1], self.session.cluster.protocol_version) simple = SimpleStatement("") simple.routing_key = mrk self.assertEqual(bound.routing_key, simple.routing_key) def test_routing_key_generation_multi(self): """ Compares the routing key generated by composite partition key using the model with the one generated by the equivalent bound statement @since 3.2 @jira_ticket PYTHON-535 @expected_result they should match @test_category object_mapper """ prepared = self.session.prepare( """ INSERT INTO {0}.basic_model_routing_multi (k, v) VALUES (?, ?) """.format(DEFAULT_KEYSPACE)) bound = prepared.bind((1, 2)) mrk = BasicModelMulti._routing_key_from_values([1, 2], self.session.cluster.protocol_version) simple = SimpleStatement("") simple.routing_key = mrk self.assertEqual(bound.routing_key, simple.routing_key) def test_routing_key_generation_complex(self): """ Compares the routing key generated by complex composite partition key using the model with the one generated by the equivalent bound statement @since 3.2 @jira_ticket PYTHON-535 @expected_result they should match @test_category object_mapper """ prepared = self.session.prepare( """ INSERT INTO {0}.complex_model_routing (partition, cluster, count, text, float, text_2) VALUES (?, ?, ?, ?, ?, ?) """.format(DEFAULT_KEYSPACE)) partition = uuid4() cluster = 1 count = 2 text = "text" float = 1.2 text_2 = "text_2" bound = prepared.bind((partition, cluster, count, text, float, text_2)) mrk = ComplexModelRouting._routing_key_from_values([partition, cluster, text, float], self.session.cluster.protocol_version) simple = SimpleStatement("") simple.routing_key = mrk self.assertEqual(bound.routing_key, simple.routing_key) def test_partition_key_index(self): """ Test to ensure that statement partition key generation is in the correct order @since 3.2 @jira_ticket PYTHON-535 @expected_result . @test_category object_mapper """ self._check_partition_value_generation(BasicModel, SelectStatement(BasicModel.__table_name__)) self._check_partition_value_generation(BasicModel, DeleteStatement(BasicModel.__table_name__)) self._check_partition_value_generation(BasicModelMulti, SelectStatement(BasicModelMulti.__table_name__)) self._check_partition_value_generation(BasicModelMulti, DeleteStatement(BasicModelMulti.__table_name__)) self._check_partition_value_generation(ComplexModelRouting, SelectStatement(ComplexModelRouting.__table_name__)) self._check_partition_value_generation(ComplexModelRouting, DeleteStatement(ComplexModelRouting.__table_name__)) self._check_partition_value_generation(BasicModel, SelectStatement(BasicModel.__table_name__), reverse=True) self._check_partition_value_generation(BasicModel, DeleteStatement(BasicModel.__table_name__), reverse=True) self._check_partition_value_generation(BasicModelMulti, SelectStatement(BasicModelMulti.__table_name__), reverse=True) self._check_partition_value_generation(BasicModelMulti, DeleteStatement(BasicModelMulti.__table_name__), reverse=True) self._check_partition_value_generation(ComplexModelRouting, SelectStatement(ComplexModelRouting.__table_name__), reverse=True) self._check_partition_value_generation(ComplexModelRouting, DeleteStatement(ComplexModelRouting.__table_name__), reverse=True) def _check_partition_value_generation(self, model, state, reverse=False): """ This generates a some statements based on the partition_key_index of the model. It then validates that order of the partition key values in the statement matches the index specified in the models partition_key_index """ # Setup some unique values for statement generation uuid = uuid4() values = {'k': 5, 'v': 3, 'partition': uuid, 'cluster': 6, 'count': 42, 'text': 'text', 'float': 3.1415, 'text_2': 'text_2'} res = dict((v, k) for k, v in values.items()) items = list(model._partition_key_index.items()) if(reverse): items.reverse() # Add where clauses for each partition key for partition_key, position in items: wc = WhereClause(partition_key, EqualsOperator(), values.get(partition_key)) state._add_where_clause(wc) # Iterate over the partition key values check to see that their index matches # Those specified in the models partition field for indx, value in enumerate(state.partition_key_values(model._partition_key_index)): name = res.get(value) self.assertEqual(indx, model._partition_key_index.get(name)) def test_none_filter_fails(): class NoneFilterModel(Model): pk = columns.Integer(primary_key=True) v = columns.Integer() sync_table(NoneFilterModel) try: NoneFilterModel.objects(pk=None) raise Exception("fail") except CQLEngineException as e: pass
{ "content_hash": "974f2a20f16bd799ff2b4f2572415a43", "timestamp": "", "source": "github", "line_count": 800, "max_line_length": 140, "avg_line_length": 37.64125, "alnum_prop": 0.6475276458672334, "repo_name": "kishkaru/python-driver", "id": "3faf62febcca903f335d07c064cac49b156e8162", "size": "30692", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/integration/cqlengine/model/test_model_io.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "28924" }, { "name": "PowerShell", "bytes": "4614" }, { "name": "Python", "bytes": "1997472" } ], "symlink_target": "" }
""" Class for reading data from from Tucker Davis TTank format. Terminology: TDT hold data with tanks (actually a directory). And tanks hold sub block (sub directories). Tanks correspond to neo.Block and tdt block correspond to neo.Segment. Note the name Block is ambiguous because it does not refer to same thing in TDT terminology and neo. In a directory there are several files: * TSQ timestamp index of data * TBK some kind of channel info and maybe more * TEV contains data : spike + event + signal (for old version) * SEV contains signals (for new version) * ./sort/ can contain offline spikesorting label for spike and can be use place of TEV. Units in this IO are not guaranteed. Author: Samuel Garcia, SummitKwan, Chadwick Boulay """ from __future__ import print_function, division, absolute_import # from __future__ import unicode_literals is not compatible with numpy.dtype both py2 py3 from .baserawio import BaseRawIO, _signal_channel_dtype, _unit_channel_dtype, _event_channel_dtype import numpy as np import os import re from collections import OrderedDict class TdtRawIO(BaseRawIO): rawmode = 'one-dir' def __init__(self, dirname='', sortname=''): """ 'sortname' is used to specify the external sortcode generated by offline spike sorting. if sortname=='PLX', there should be a ./sort/PLX/*.SortResult file in the tdt block, which stores the sortcode for every spike; defaults to '', which uses the original online sort. """ BaseRawIO.__init__(self) if dirname.endswith('/'): dirname = dirname[:-1] self.dirname = dirname self.sortname = sortname def _source_name(self): return self.dirname def _parse_header(self): tankname = os.path.basename(self.dirname) segment_names = [] for segment_name in os.listdir(self.dirname): path = os.path.join(self.dirname, segment_name) if is_tdtblock(path): segment_names.append(segment_name) nb_segment = len(segment_names) # TBK (channel info) info_channel_groups = None for seg_index, segment_name in enumerate(segment_names): path = os.path.join(self.dirname, segment_name) # TBK contain channels tbk_filename = os.path.join(path, tankname + '_' + segment_name + '.Tbk') _info_channel_groups = read_tbk(tbk_filename) if info_channel_groups is None: info_channel_groups = _info_channel_groups else: assert np.array_equal(info_channel_groups, _info_channel_groups), 'Channels differ across segments' # TEV (mixed data) self._tev_datas = [] for seg_index, segment_name in enumerate(segment_names): path = os.path.join(self.dirname, segment_name) tev_filename = os.path.join(path, tankname + '_' + segment_name + '.tev') if os.path.exists(tev_filename): tev_data = np.memmap(tev_filename, mode='r', offset=0, dtype='uint8') else: tev_data = None self._tev_datas.append(tev_data) # TSQ index with timestamp self._tsq = [] self._seg_t_starts = [] self._seg_t_stops = [] for seg_index, segment_name in enumerate(segment_names): path = os.path.join(self.dirname, segment_name) tsq_filename = os.path.join(path, tankname + '_' + segment_name + '.tsq') tsq = np.fromfile(tsq_filename, dtype=tsq_dtype) self._tsq.append(tsq) # Start and stop times are only found in the second and last header row, respectively. if tsq[1]['evname'] == chr(EVMARK_STARTBLOCK).encode(): self._seg_t_starts.append(tsq[1]['timestamp']) else: self._seg_t_starts.append(np.nan) print('segment start time not found') if tsq[-1]['evname'] == chr(EVMARK_STOPBLOCK).encode(): self._seg_t_stops.append(tsq[-1]['timestamp']) else: self._seg_t_stops.append(np.nan) print('segment stop time not found') # If there exists an external sortcode in ./sort/[sortname]/*.SortResult # (generated after offline sorting) if self.sortname != '': try: for file in os.listdir(os.path.join(path, 'sort', sortname)): if file.endswith(".SortResult"): sortresult_filename = os.path.join(path, 'sort', sortname, file) # get new sortcode newsortcode = np.fromfile(sortresult_filename, 'int8')[ 1024:] # first 1024 bytes are header # update the sort code with the info from this file tsq['sortcode'][1:-1] = newsortcode # print('sortcode updated') break except OSError: pass except IOError: pass # Re-order segments according to their start times sort_inds = np.argsort(self._seg_t_starts) if not np.array_equal(sort_inds, list(range(nb_segment))): segment_names = [segment_names[x] for x in sort_inds] self._tev_datas = [self._tev_datas[x] for x in sort_inds] self._seg_t_starts = [self._seg_t_starts[x] for x in sort_inds] self._seg_t_stops = [self._seg_t_stops[x] for x in sort_inds] self._tsq = [self._tsq[x] for x in sort_inds] self._global_t_start = self._seg_t_starts[0] # signal channels EVTYPE_STREAM signal_channels = [] self._sigs_data_buf = {seg_index: {} for seg_index in range(nb_segment)} self._sigs_index = {seg_index: {} for seg_index in range(nb_segment)} self._sig_dtype_by_group = {} # key = group_id self._sig_sample_per_chunk = {} # key = group_id self._sigs_lengths = {seg_index: {} for seg_index in range(nb_segment)} # key = seg_index then group_id self._sigs_t_start = {seg_index: {} for seg_index in range(nb_segment)} # key = seg_index then group_id keep = info_channel_groups['TankEvType'] == EVTYPE_STREAM for group_id, info in enumerate(info_channel_groups[keep]): self._sig_sample_per_chunk[group_id] = info['NumPoints'] for c in range(info['NumChan']): chan_index = len(signal_channels) chan_id = c + 1 # If several StoreName then chan_id is not unique in TDT!!!!! # loop over segment to get sampling_rate/data_index/data_buffer sampling_rate = None dtype = None for seg_index, segment_name in enumerate(segment_names): # get data index tsq = self._tsq[seg_index] mask = (tsq['evtype'] == EVTYPE_STREAM) & \ (tsq['evname'] == info['StoreName']) & \ (tsq['channel'] == chan_id) data_index = tsq[mask].copy() self._sigs_index[seg_index][chan_index] = data_index size = info['NumPoints'] * data_index.size if group_id not in self._sigs_lengths[seg_index]: self._sigs_lengths[seg_index][group_id] = size else: assert self._sigs_lengths[seg_index][group_id] == size # signal start time, relative to start of segment t_start = data_index['timestamp'][0] if group_id not in self._sigs_t_start[seg_index]: self._sigs_t_start[seg_index][group_id] = t_start else: assert self._sigs_t_start[seg_index][group_id] == t_start # sampling_rate and dtype _sampling_rate = float(data_index['frequency'][0]) _dtype = data_formats[data_index['dataformat'][0]] if sampling_rate is None: sampling_rate = _sampling_rate dtype = _dtype if group_id not in self._sig_dtype_by_group: self._sig_dtype_by_group[group_id] = np.dtype(dtype) else: assert self._sig_dtype_by_group[group_id] == dtype else: assert sampling_rate == _sampling_rate, 'sampling is changing!!!' assert dtype == _dtype, 'sampling is changing!!!' # data buffer test if SEV file exists otherwise TEV path = os.path.join(self.dirname, segment_name) sev_filename = os.path.join(path, tankname + '_' + segment_name + '_' + info['StoreName'].decode('ascii') + '_ch' + str(chan_id) + '.sev') if os.path.exists(sev_filename): data = np.memmap(sev_filename, mode='r', offset=0, dtype='uint8') else: data = self._tev_datas[seg_index] assert data is not None, 'no TEV nor SEV' self._sigs_data_buf[seg_index][chan_index] = data chan_name = '{} {}'.format(info['StoreName'], c + 1) sampling_rate = sampling_rate units = 'V' # WARNING this is not sur at all gain = 1. offset = 0. signal_channels.append((chan_name, chan_id, sampling_rate, dtype, units, gain, offset, group_id)) signal_channels = np.array(signal_channels, dtype=_signal_channel_dtype) # unit channels EVTYPE_SNIP self.internal_unit_ids = {} self._waveforms_size = [] self._waveforms_dtype = [] unit_channels = [] keep = info_channel_groups['TankEvType'] == EVTYPE_SNIP tsq = np.hstack(self._tsq) # If there is no chance the differet TSQ files will have different units, # then we can do tsq = self._tsq[0] for info in info_channel_groups[keep]: for c in range(info['NumChan']): chan_id = c + 1 mask = (tsq['evtype'] == EVTYPE_SNIP) & \ (tsq['evname'] == info['StoreName']) & \ (tsq['channel'] == chan_id) unit_ids = np.unique(tsq[mask]['sortcode']) for unit_id in unit_ids: unit_index = len(unit_channels) self.internal_unit_ids[unit_index] = (info['StoreName'], chan_id, unit_id) unit_name = "ch{}#{}".format(chan_id, unit_id) wf_units = 'V' wf_gain = 1. wf_offset = 0. wf_left_sweep = info['NumPoints'] // 2 wf_sampling_rate = info['SampleFreq'] unit_channels.append((unit_name, '{}'.format(unit_id), wf_units, wf_gain, wf_offset, wf_left_sweep, wf_sampling_rate)) self._waveforms_size.append(info['NumPoints']) self._waveforms_dtype.append(np.dtype(data_formats[info['DataFormat']])) unit_channels = np.array(unit_channels, dtype=_unit_channel_dtype) # signal channels EVTYPE_STRON event_channels = [] keep = info_channel_groups['TankEvType'] == EVTYPE_STRON for info in info_channel_groups[keep]: chan_name = info['StoreName'] chan_id = 1 event_channels.append((chan_name, chan_id, 'event')) event_channels = np.array(event_channels, dtype=_event_channel_dtype) # fill into header dict self.header = {} self.header['nb_block'] = 1 self.header['nb_segment'] = [nb_segment] self.header['signal_channels'] = signal_channels self.header['unit_channels'] = unit_channels self.header['event_channels'] = event_channels # Annotations only standard ones: self._generate_minimal_annotations() def _block_count(self): return 1 def _segment_count(self, block_index): return self.header['nb_segment'][block_index] def _segment_t_start(self, block_index, seg_index): return self._seg_t_starts[seg_index] - self._global_t_start def _segment_t_stop(self, block_index, seg_index): return self._seg_t_stops[seg_index] - self._global_t_start def _get_signal_size(self, block_index, seg_index, channel_indexes): group_id = self.header['signal_channels'][channel_indexes[0]]['group_id'] size = self._sigs_lengths[seg_index][group_id] return size def _get_signal_t_start(self, block_index, seg_index, channel_indexes): group_id = self.header['signal_channels'][channel_indexes[0]]['group_id'] return self._sigs_t_start[seg_index][group_id] - self._global_t_start def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, channel_indexes): # check of channel_indexes is same group_id is done outside (BaseRawIO) # so first is identique to others group_id = self.header['signal_channels'][channel_indexes[0]]['group_id'] if i_start is None: i_start = 0 if i_stop is None: i_stop = self._sigs_lengths[seg_index][group_id] dt = self._sig_dtype_by_group[group_id] raw_signals = np.zeros((i_stop - i_start, len(channel_indexes)), dtype=dt) sample_per_chunk = self._sig_sample_per_chunk[group_id] bl0 = i_start // sample_per_chunk bl1 = int(np.ceil(i_stop / sample_per_chunk)) chunk_nb_bytes = sample_per_chunk * dt.itemsize for c, channel_index in enumerate(channel_indexes): data_index = self._sigs_index[seg_index][channel_index] data_buf = self._sigs_data_buf[seg_index][channel_index] # loop over data blocks and get chunks ind = 0 for bl in range(bl0, bl1): ind0 = data_index[bl]['offset'] ind1 = ind0 + chunk_nb_bytes data = data_buf[ind0:ind1].view(dt) if bl == bl1 - 1: # right border # be careful that bl could be both bl0 and bl1!! border = data.size - (i_stop % sample_per_chunk) data = data[:-border] if bl == bl0: # left border border = i_start % sample_per_chunk data = data[border:] raw_signals[ind:data.size + ind, c] = data ind += data.size return raw_signals def _get_mask(self, tsq, seg_index, evtype, evname, chan_id, unit_id, t_start, t_stop): """Used inside spike and events methods""" mask = (tsq['evtype'] == evtype) & \ (tsq['evname'] == evname) & \ (tsq['channel'] == chan_id) if unit_id is not None: mask &= (tsq['sortcode'] == unit_id) if t_start is not None: mask &= tsq['timestamp'] >= (t_start + self._global_t_start) if t_stop is not None: mask &= tsq['timestamp'] <= (t_stop + self._global_t_start) return mask def _spike_count(self, block_index, seg_index, unit_index): store_name, chan_id, unit_id = self.internal_unit_ids[unit_index] tsq = self._tsq[seg_index] mask = self._get_mask(tsq, seg_index, EVTYPE_SNIP, store_name, chan_id, unit_id, None, None) nb_spike = np.sum(mask) return nb_spike def _get_spike_timestamps(self, block_index, seg_index, unit_index, t_start, t_stop): store_name, chan_id, unit_id = self.internal_unit_ids[unit_index] tsq = self._tsq[seg_index] mask = self._get_mask(tsq, seg_index, EVTYPE_SNIP, store_name, chan_id, unit_id, t_start, t_stop) timestamps = tsq[mask]['timestamp'] timestamps -= self._global_t_start return timestamps def _rescale_spike_timestamp(self, spike_timestamps, dtype): # already in s spike_times = spike_timestamps.astype(dtype) return spike_times def _get_spike_raw_waveforms(self, block_index, seg_index, unit_index, t_start, t_stop): store_name, chan_id, unit_id = self.internal_unit_ids[unit_index] tsq = self._tsq[seg_index] mask = self._get_mask(tsq, seg_index, EVTYPE_SNIP, store_name, chan_id, unit_id, t_start, t_stop) nb_spike = np.sum(mask) data = self._tev_datas[seg_index] dt = self._waveforms_dtype[unit_index] nb_sample = self._waveforms_size[unit_index] waveforms = np.zeros((nb_spike, 1, nb_sample), dtype=dt) for i, e in enumerate(tsq[mask]): ind0 = e['offset'] ind1 = ind0 + nb_sample * dt.itemsize waveforms[i, 0, :] = data[ind0:ind1].view(dt) return waveforms def _event_count(self, block_index, seg_index, event_channel_index): h = self.header['event_channels'][event_channel_index] store_name = h['name'].encode('ascii') tsq = self._tsq[seg_index] chan_id = 0 mask = self._get_mask(tsq, seg_index, EVTYPE_STRON, store_name, chan_id, None, None, None) nb_event = np.sum(mask) return nb_event def _get_event_timestamps(self, block_index, seg_index, event_channel_index, t_start, t_stop): h = self.header['event_channels'][event_channel_index] store_name = h['name'].encode('ascii') tsq = self._tsq[seg_index] chan_id = 0 mask = self._get_mask(tsq, seg_index, EVTYPE_STRON, store_name, chan_id, None, None, None) timestamps = tsq[mask]['timestamp'] timestamps -= self._global_t_start labels = tsq[mask]['offset'].astype('U') durations = None # TODO if user demand event to epoch # with EVTYPE_STROFF=258 # and so durations would be not None # it was not implemented in previous IO. return timestamps, durations, labels def _rescale_event_timestamp(self, event_timestamps, dtype): # already in s ev_times = event_timestamps.astype(dtype) return ev_times tbk_field_types = [ ('StoreName', 'S4'), ('HeadName', 'S16'), ('Enabled', 'bool'), ('CircType', 'int'), ('NumChan', 'int'), ('StrobeMode', 'int'), ('TankEvType', 'int32'), ('NumPoints', 'int'), ('DataFormat', 'int'), ('SampleFreq', 'float64'), ] def read_tbk(tbk_filename): """ Tbk contains some visible header in txt mode to describe channel group info. """ with open(tbk_filename, mode='rb') as f: txt_header = f.read() infos = [] for chan_grp_header in txt_header.split(b'[STOREHDRITEM]'): if chan_grp_header.startswith(b'[USERNOTEDELIMITER]'): break # parse into a dict info = OrderedDict() pattern = br'NAME=(\S+);TYPE=(\S+);VALUE=(\S+);' r = re.findall(pattern, chan_grp_header) for name, _type, value in r: info[name.decode('ascii')] = value infos.append(info) # and put into numpy info_channel_groups = np.zeros(len(infos), dtype=tbk_field_types) for i, info in enumerate(infos): for k, dt in tbk_field_types: v = np.dtype(dt).type(info[k]) info_channel_groups[i][k] = v return info_channel_groups tsq_dtype = [ ('size', 'int32'), # bytes 0-4 ('evtype', 'int32'), # bytes 5-8 ('evname', 'S4'), # bytes 9-12 ('channel', 'uint16'), # bytes 13-14 ('sortcode', 'uint16'), # bytes 15-16 ('timestamp', 'float64'), # bytes 17-24 ('offset', 'int64'), # bytes 25-32 ('dataformat', 'int32'), # bytes 33-36 ('frequency', 'float32'), # bytes 37-40 ] EVTYPE_UNKNOWN = int('00000000', 16) # 0 EVTYPE_STRON = int('00000101', 16) # 257 EVTYPE_STROFF = int('00000102', 16) # 258 EVTYPE_SCALAR = int('00000201', 16) # 513 EVTYPE_STREAM = int('00008101', 16) # 33025 EVTYPE_SNIP = int('00008201', 16) # 33281 EVTYPE_MARK = int('00008801', 16) # 34817 EVTYPE_HASDATA = int('00008000', 16) # 32768 EVTYPE_UCF = int('00000010', 16) # 16 EVTYPE_PHANTOM = int('00000020', 16) # 32 EVTYPE_MASK = int('0000FF0F', 16) # 65295 EVTYPE_INVALID_MASK = int('FFFF0000', 16) # 4294901760 EVMARK_STARTBLOCK = int('0001', 16) # 1 EVMARK_STOPBLOCK = int('0002', 16) # 2 data_formats = { 0: 'float32', 1: 'int32', 2: 'int16', 3: 'int8', 4: 'float64', } def is_tdtblock(blockpath): """Is tha path a TDT block (=neo.Segment) ?""" file_ext = list() if os.path.isdir(blockpath): # for every file, get extension, convert to lowercase and append for file in os.listdir(blockpath): file_ext.append(os.path.splitext(file)[1].lower()) file_ext = set(file_ext) tdt_ext = {'.tbk', '.tdx', '.tev', '.tsq'} if file_ext >= tdt_ext: # if containing all the necessary files return True else: return False
{ "content_hash": "ba02a824c6b9edf8001698ea79dd43a8", "timestamp": "", "source": "github", "line_count": 528, "max_line_length": 98, "avg_line_length": 41.178030303030305, "alnum_prop": 0.5494894673903045, "repo_name": "rgerkin/python-neo", "id": "f5e16629cc51386f08085d1e7002ac245e3036a9", "size": "21766", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "neo/rawio/tdtrawio.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "2486594" } ], "symlink_target": "" }
import io import json import unittest import mock def _make_credentials(): import google.auth.credentials return mock.Mock(spec=google.auth.credentials.Credentials) class Test_ClientFactoryMixin(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.client import _ClientFactoryMixin return _ClientFactoryMixin def test_virtual(self): klass = self._get_target_class() self.assertFalse('__init__' in klass.__dict__) class TestClient(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.client import Client return Client def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_unpickleable(self): import pickle CREDENTIALS = _make_credentials() HTTP = object() client_obj = self._make_one(credentials=CREDENTIALS, _http=HTTP) with self.assertRaises(pickle.PicklingError): pickle.dumps(client_obj) def test_constructor_defaults(self): credentials = _make_credentials() patch = mock.patch( 'google.auth.default', return_value=(credentials, None)) with patch as default: client_obj = self._make_one() self.assertIs(client_obj._credentials, credentials) self.assertIsNone(client_obj._http_internal) default.assert_called_once_with() def test_constructor_explicit(self): credentials = _make_credentials() http = mock.sentinel.http client_obj = self._make_one(credentials=credentials, _http=http) self.assertIs(client_obj._credentials, credentials) self.assertIs(client_obj._http_internal, http) def test_constructor_bad_credentials(self): credentials = mock.sentinel.credentials with self.assertRaises(ValueError): self._make_one(credentials=credentials) def test_from_service_account_json(self): from google.cloud import _helpers klass = self._get_target_class() # Mock both the file opening and the credentials constructor. info = {'dummy': 'value', 'valid': 'json'} json_fi = io.StringIO(_helpers._bytes_to_unicode(json.dumps(info))) file_open_patch = mock.patch( 'io.open', return_value=json_fi) constructor_patch = mock.patch( 'google.oauth2.service_account.Credentials.' 'from_service_account_info', return_value=_make_credentials()) with file_open_patch as file_open: with constructor_patch as constructor: client_obj = klass.from_service_account_json( mock.sentinel.filename) self.assertIs( client_obj._credentials, constructor.return_value) self.assertIsNone(client_obj._http_internal) # Check that mocks were called as expected. file_open.assert_called_once_with( mock.sentinel.filename, 'r', encoding='utf-8') constructor.assert_called_once_with(info) def test_from_service_account_json_bad_args(self): KLASS = self._get_target_class() with self.assertRaises(TypeError): KLASS.from_service_account_json( mock.sentinel.filename, credentials=mock.sentinel.credentials) def test__http_property_existing(self): credentials = _make_credentials() http = object() client = self._make_one(credentials=credentials, _http=http) self.assertIs(client._http_internal, http) self.assertIs(client._http, http) def test__http_property_new(self): credentials = _make_credentials() client = self._make_one(credentials=credentials) self.assertIsNone(client._http_internal) authorized_session_patch = mock.patch( 'google.auth.transport.requests.AuthorizedSession', return_value=mock.sentinel.http) with authorized_session_patch as AuthorizedSession: self.assertIs(client._http, mock.sentinel.http) # Check the mock. AuthorizedSession.assert_called_once_with(credentials) # Make sure the cached value is used on subsequent access. self.assertIs(client._http_internal, mock.sentinel.http) self.assertIs(client._http, mock.sentinel.http) self.assertEqual(AuthorizedSession.call_count, 1) class TestClientWithProject(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.client import ClientWithProject return ClientWithProject def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor_defaults(self): credentials = _make_credentials() patch1 = mock.patch( 'google.auth.default', return_value=(credentials, None)) project = 'prahj-ekt' patch2 = mock.patch( 'google.cloud.client._determine_default_project', return_value=project) with patch1 as default: with patch2 as _determine_default_project: client_obj = self._make_one() self.assertEqual(client_obj.project, project) self.assertIs(client_obj._credentials, credentials) self.assertIsNone(client_obj._http_internal) default.assert_called_once_with() _determine_default_project.assert_called_once_with(None) def test_constructor_missing_project(self): from google.cloud._testing import _Monkey from google.cloud import client FUNC_CALLS = [] def mock_determine_proj(project): FUNC_CALLS.append((project, '_determine_default_project')) return None with _Monkey(client, _determine_default_project=mock_determine_proj): self.assertRaises(EnvironmentError, self._make_one) self.assertEqual(FUNC_CALLS, [(None, '_determine_default_project')]) def test_constructor_w_invalid_project(self): CREDENTIALS = _make_credentials() HTTP = object() with self.assertRaises(ValueError): self._make_one(project=object(), credentials=CREDENTIALS, _http=HTTP) def _explicit_ctor_helper(self, project): import six CREDENTIALS = _make_credentials() HTTP = object() client_obj = self._make_one(project=project, credentials=CREDENTIALS, _http=HTTP) if isinstance(project, six.binary_type): self.assertEqual(client_obj.project, project.decode('utf-8')) else: self.assertEqual(client_obj.project, project) self.assertIs(client_obj._credentials, CREDENTIALS) self.assertIs(client_obj._http_internal, HTTP) def test_constructor_explicit_bytes(self): PROJECT = b'PROJECT' self._explicit_ctor_helper(PROJECT) def test_constructor_explicit_unicode(self): PROJECT = u'PROJECT' self._explicit_ctor_helper(PROJECT) def _from_service_account_json_helper(self, project=None): from google.cloud import _helpers klass = self._get_target_class() info = {'dummy': 'value', 'valid': 'json'} if project is None: expected_project = 'eye-d-of-project' else: expected_project = project info['project_id'] = expected_project # Mock both the file opening and the credentials constructor. json_fi = io.StringIO(_helpers._bytes_to_unicode(json.dumps(info))) file_open_patch = mock.patch( 'io.open', return_value=json_fi) constructor_patch = mock.patch( 'google.oauth2.service_account.Credentials.' 'from_service_account_info', return_value=_make_credentials()) with file_open_patch as file_open: with constructor_patch as constructor: kwargs = {} if project is not None: kwargs['project'] = project client_obj = klass.from_service_account_json( mock.sentinel.filename, **kwargs) self.assertIs( client_obj._credentials, constructor.return_value) self.assertIsNone(client_obj._http_internal) self.assertEqual(client_obj.project, expected_project) # Check that mocks were called as expected. file_open.assert_called_once_with( mock.sentinel.filename, 'r', encoding='utf-8') constructor.assert_called_once_with(info) def test_from_service_account_json(self): self._from_service_account_json_helper() def test_from_service_account_json_project_set(self): self._from_service_account_json_helper(project='prah-jekt')
{ "content_hash": "6e3d91fe1568a137b60c9f9e67c9874f", "timestamp": "", "source": "github", "line_count": 253, "max_line_length": 78, "avg_line_length": 34.92094861660079, "alnum_prop": 0.631918505942275, "repo_name": "jonparrott/google-cloud-python", "id": "c41fbdc4a20f7c51c14714302be31a0ea64f3fa0", "size": "9410", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "core/tests/unit/test_client.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "3366" }, { "name": "PowerShell", "bytes": "7195" }, { "name": "Protocol Buffer", "bytes": "62009" }, { "name": "Python", "bytes": "3459300" }, { "name": "Shell", "bytes": "7548" } ], "symlink_target": "" }
import unittest from test_helpers import TestHelpers helpers=TestHelpers() helpers.add_relative_path() from lib.protomemes import get_protomemes, build_corpus, get_protomemes_values_by_type, get_protomemes_values_by_type_as_array coll="week1" class TestProtomemes(unittest.TestCase): def setUp(self): print "Testing protomemes" self.count = 1 def test_get_protomemes(self): proto=get_protomemes("hashtags",self.count) self.assertEqual(len(proto),self.count) def test_build_corpus_and_store_to_db(self): proto=build_corpus("urls", "week1", 1 ,"test_urls") self.assertTrue(proto == None) def test_build_corpus_inline(self): proto=build_corpus("urls", "week1", 1 , None) self.assertTrue(proto != None) def test_corpus_should_have_tweets(self): proto=build_corpus("urls", "week1", 100, None) self.assertTrue(proto[0]["value"]["tweets"]) print proto def test_get_protomemes_values_by_type(self): data=get_protomemes_values_by_type("users",1) self.assertTrue(data[0]["value"]["users"]) # there should be no text, just users with self.assertRaises(KeyError): data[0]["value"]["txt"] def test_get_protomemes_values_by_type_as_array(self): data=get_protomemes_values_by_type_as_array("txt",1) self.assertTrue(len(data) == 1) def protomemes_should_have_type(self): proto=get_protomemes("hashtags",1) self.assertTrue(proto[0]["value"]["type"] == "hashtag") if __name__ == '__main__': unittest.main() # print "%d protomemes obtained." % len(protomemes) # print # db=MongoDB('weibodata').db # from lib.mongo import MongoDB # proto = build_corpus("hashtags",coll,2000, None) # print len(proto) # for p in proto: # print "*"*12 # try : # print p["_id"] # print p["value"]["type"] # except KeyError: # print " error" # # print p["_id"] # # test creation # protomemes.build_corpus("hashtags", week1, 1000 ,"test_hashtags") # protomemes.build_corpus("mentions", week1, 1000 ,"test_mentions") # protomemes.build_corpus("urls", week1, 1000 ,"test_urls") # # test acquisition # pms=protomemes.get_protomemes(100) # print len(pms) # for p in pms: # print p # h_db=db["test_hashtags"] # m_db=db["test_mentions"] # u_db=db["test_urls"] # # proto_count=h_db.count() # print "Total in the db :" # print " %d hashtags"%h_db.count() # print " %d mentions"%m_db.count() # print " %d urls"%u_db.count() # print 10*"-" # pm_count=1000 # c=int(round(pm_count/3)) # h=list(h_db.find().limit(c)) # m=list(m_db.find().limit(c)) # u=list(u_db.find().limit(c)) # print "%d protomemes in this set"% (len(h)+len(m)+len(u)), # print "(%d required) " % pm_count # print " %d hashtags" % len(h) # print " %d mentions" % len(m) # print " %d urls" % len(u) # print # print 10*"-"
{ "content_hash": "daaa8266fc363236bf1538d92c9e6bfc", "timestamp": "", "source": "github", "line_count": 108, "max_line_length": 126, "avg_line_length": 27.15740740740741, "alnum_prop": 0.6225707466757586, "repo_name": "clemsos/mitras", "id": "e9349a43411319a7ad589eac0324d88e2dc9917f", "size": "2980", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/test_protomemes.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "5267" }, { "name": "JavaScript", "bytes": "193629" }, { "name": "Python", "bytes": "361977" }, { "name": "Shell", "bytes": "25580" } ], "symlink_target": "" }
import json import sys class CMEModule: name = 'bh_owned' description = "Set pwned computer as owned in Bloodhound" supported_protocols = ['smb'] opsec_safe = True multiple_hosts = True def options(self, context, module_options): """ URI URI for Neo4j database (default: 127.0.0.1) PORT Listening port for Neo4j database (default: 7687) USER Username for Neo4j database (default: 'neo4j') PASS Password for Neo4j database (default: 'neo4j') """ self.neo4j_URI = "127.0.0.1" self.neo4j_Port = "7687" self.neo4j_user = "neo4j" self.neo4j_pass = "neo4j" if module_options and 'URI' in module_options: self.neo4j_URI = module_options['URI'] if module_options and 'PORT' in module_options: self.neo4j_Port = module_options['PORT'] if module_options and 'USER' in module_options: self.neo4j_user = module_options['USER'] if module_options and 'PASS' in module_options: self.neo4j_pass = module_options['PASS'] def on_admin_login(self, context, connection): try: from neo4j.v1 import GraphDatabase except: from neo4j import GraphDatabase from neo4j.exceptions import AuthError, ServiceUnavailable if context.local_auth: domain = connection.conn.getServerDNSDomainName() else: domain = connection.domain host_fqdn = (connection.hostname + "." + domain).upper() uri = "bolt://{}:{}".format(self.neo4j_URI, self.neo4j_Port) try: driver = GraphDatabase.driver(uri, auth=(self.neo4j_user, self.neo4j_pass), encrypted=False) except AuthError as e: context.log.error( "Provided Neo4J credentials ({}:{}) are not valid. See --options".format(self.neo4j_user, self.neo4j_pass)) sys.exit() except ServiceUnavailable as e: context.log.error("Neo4J does not seem to be available on {}. See --options".format(uri)) sys.exit() except Exception as e: context.log.error("Unexpected error with Neo4J") context.log.debug("Error : ".format(str(e))) sys.exit() with driver.session() as session: with session.begin_transaction() as tx: result = tx.run( "MATCH (c:Computer {{name:\"{}\"}}) SET c.owned=True RETURN c.name AS name".format(host_fqdn)) if len(result.value()) > 0: context.log.success("Node {} successfully set as owned in BloodHound".format(host_fqdn)) else: context.log.error( "Node {} does not appear to be in Neo4J database. Have you imported correct data?".format(host_fqdn)) driver.close()
{ "content_hash": "f8101549d2228f5d6287694c8c7f2a24", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 123, "avg_line_length": 39.486486486486484, "alnum_prop": 0.579397672826831, "repo_name": "byt3bl33d3r/CrackMapExec", "id": "1a78bd8cb2938966e74715fc2ec7231e0c4e48fa", "size": "3045", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cme/modules/bh_owned.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Dockerfile", "bytes": "308" }, { "name": "Makefile", "bytes": "560" }, { "name": "Nix", "bytes": "1150" }, { "name": "Python", "bytes": "1676963" } ], "symlink_target": "" }