content
stringlengths
1
1.05M
input_ids
listlengths
1
883k
ratio_char_token
float64
1
22.9
token_count
int64
1
883k
import torch import torch.nn as nn import torchvision.models as models import numpy as np
[ 11748, 28034, 198, 11748, 28034, 13, 20471, 355, 299, 77, 198, 11748, 28034, 10178, 13, 27530, 355, 4981, 198, 11748, 299, 32152, 355, 45941, 198, 220, 220, 220, 220 ]
3.241379
29
"""Items model. """ # Django from django.db import models # Utilities from App.utils.models import BlackMarketModel # Models from .category import Category from .unit import Unit from .owner import Owner
[ 37811, 23022, 2746, 13, 37227, 198, 198, 2, 37770, 198, 6738, 42625, 14208, 13, 9945, 1330, 4981, 198, 198, 2, 41086, 198, 6738, 2034, 13, 26791, 13, 27530, 1330, 2619, 27470, 17633, 198, 198, 2, 32329, 198, 6738, 764, 22872, 1330, 21...
3.781818
55
import json import time from multiprocessing import Process from utils.paths import PATHS from years.AoC2021.tasks import TASKS2021 # Constants PARALLEL_COMPUTATION = True TASKS = { 2021: TASKS2021 } def asses_task(task: type, answers: dict, year: int) -> None: """ Run a task 4 times (part 1 test, part 1 task, part 2 test, part 2 task) Test if the answers of each run correspond to the correct answers :param task: Task object able to run a task :param answers: The correct answers of the given task :param year: The year where this task was asked """ t = task() pred = t.run_all() true = answers[task.__name__] assert pred[0][0] == true[0] or true[0] == 0, \ f"({year}, {task.__name__}) Part 1 has failed on the test data. Expected: {true[0]}, got: {pred[0][0]}" assert pred[0][1] == true[1] or true[1] == 0, \ f"({year}, {task.__name__}) Part 1 has failed on the real data. Expected: {true[1]}, got: {pred[0][1]}" assert pred[1][0] == true[2] or true[2] == 0, \ f"({year}, {task.__name__}) Part 2 has failed on the test data. Expected: {true[2]}, got: {pred[1][0]}" assert pred[1][1] == true[3] or true[3] == 0, \ f"({year}, {task.__name__}) Part 2 has failed on the real data. Expected: {true[3]}, got: {pred[1][1]}" if __name__ == "__main__": start = time.perf_counter() num_tests = 0 processes = [] for year_num in TASKS.keys(): # Find the answers of the current year with open(f"{PATHS[year_num]}\\answers.json") as f: year_answers = json.load(f) # Compute task results (unknown answers have a value of -1) for i, current_task in enumerate(TASKS[year_num]): num_tests += 1 if PARALLEL_COMPUTATION: p = Process(target=asses_task, args=[current_task, year_answers, year_num]) p.start() processes.append(p) else: asses_task(current_task, year_answers, year_num) # Wait for processes to stop and report success for process in processes: process.join() print(f"\n*** All {num_tests} tests completed successfully in {time.perf_counter() - start:.2f} sec***")
[ 11748, 33918, 198, 11748, 640, 198, 6738, 18540, 305, 919, 278, 1330, 10854, 198, 6738, 3384, 4487, 13, 6978, 82, 1330, 28748, 7998, 198, 6738, 812, 13, 32, 78, 34, 1238, 2481, 13, 83, 6791, 1330, 309, 1921, 27015, 1238, 2481, 198, ...
2.373941
944
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import paddle_edl.utils.master_pb2 as master_pb2 import unittest from edl.utils.master_client import Client from edl.utils.utils import get_file_list, get_logger os.environ["https_proxy"] = "" os.environ["http_proxy"] = "" if __name__ == "__main__": logger = get_logger(10) unittest.main()
[ 2, 15069, 357, 66, 8, 12131, 350, 37382, 47, 37382, 46665, 13, 1439, 6923, 33876, 13, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845...
3.320144
278
import typing from commercetools import schemas, types from commercetools.services import abstract from commercetools.typing import OptionalListStr __all__ = ["TypeService"]
[ 11748, 19720, 198, 198, 6738, 4412, 66, 316, 10141, 1330, 3897, 5356, 11, 3858, 198, 6738, 4412, 66, 316, 10141, 13, 30416, 1330, 12531, 198, 6738, 4412, 66, 316, 10141, 13, 774, 13886, 1330, 32233, 8053, 13290, 198, 198, 834, 439, 83...
3.653061
49
import random import cv2 import numpy as np from augraphy.base.augmentation import Augmentation
[ 11748, 4738, 198, 198, 11748, 269, 85, 17, 198, 11748, 299, 32152, 355, 45941, 198, 198, 6738, 16339, 1470, 88, 13, 8692, 13, 559, 5154, 341, 1330, 2447, 14374, 628 ]
3.3
30
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright: (c) 2021, kevin-dot-g-dot-stewart-at-gmail-dot-com # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Version: 1.0.1 #### Updates: #### 1.0.1 - added 9.0 support # - changed max version # - added clientssl "alpn" proxy support # - added clientssl logPublisher support # - added serverssl logPublisher support # - updated version and previousVersion keys to match target SSLO version from __future__ import absolute_import, division, print_function __metaclass__ = type DOCUMENTATION = r''' --- module: bigip_sslo_config_ssl short_description: Manage an SSL Orchestrator SSL configuration description: - Manage an SSL Orchestrator SSL configuration version_added: "1.0.0" options: name: description: - Specifies the name of the SSL configuration. Configuration auto-prepends "ssloT_" to service. Service name should be less than 14 characters and not contain dashes "-". type: str required: True clientSettings: description: - Specifies the client-side SSL settings suboptions: cipherType: description: - Defines the type of cipher used, either "string" (for cipher strings), or "group" (an existing cipher group). type: str choices: - string - group default: string cipher: description: - Defines the actual cipher string (ex. "DEFAULT"), or existing cipher group (ex. /Common/f5-default) to use. type: str default: DEFAULT enableTLS1_3: description: - Defines whether or not to enable client-side TLSv1.3 support. When enabled, the cipherType must be "group" and cipher must indicate an existing cipher group. type: bool default: False cert: description: - Defines the certificate applied in the client side settings. For a forward proxy this is the template certificate and (ex. /Common/default.crt). For a reverse proxy, this is the client-facing server certificate. type: str default: /Common/default.crt key: description: - Defines the private key applied in the client side settings. For a forward proxy this is the template key and (ex. /Common/default.key). For a reverse proxy, this is the client-facing server private key. type: str default: /Common/default.key chain: description: - Defines the certificate keychain in the client side settings. type: str default: None caCert: description: - Defines the CA certificate applied in the client side settings. This is the signing/forging CA certificate used for forward proxy TLS handling. This setting is not applicable in reverse proxy SSL. type: str default: None caKey: description: - Defines the CA private key applied in the client side settings. This is the signing/forging CA private key used for forward proxy TLS handling. This setting is not applicable in reverse proxy SSL. type: str default: None caChain: description: - Defines the CA certificate keychain in the client side settings. This would contain any CA subordinated in the trust chain between the signing CA and explicitly-trusted root certificate. If required, it should contain any intermediate CA certificates, up to but not including the self-signed root CA. type: str default: None alpn: description: - Requires 9.0+. Enables or disables ALPN HTTP/2 full proxy in an outbound (forward proxy) topology. type: bool default: False logPublisher: description: - Requires 9.0+. Defines a specific log publisher to use for client-side SSL-related events. type: str default: /Common/sys-ssl-publisher serverSettings: description: - Specifies the server-side SSL settings suboptions: cipherType: description: - Defines the type of cipher used, either "string" (for cipher strings), or "group" (an existing cipher group). type: str choices: - string - group default: string cipher: description: - Defines the actual cipher string (ex. "DEFAULT"), or existing cipher group (ex. /Common/f5-default) to use. type: str default: DEFAULT enableTLS1_3: description: - Defines whether or not to enable server-side TLSv1.3 support. When enabled, the cipherType must be "group" and cipher must indicate an existing cipher group. type: bool default: False caBundle: description: - Defines the certificate authority bundle used to validate remote server certificates. This setting is most applicable in the forward proxy use case to validate remote (Internat) server certificates. type: str default: /Common/ca-bundle.crt blockExpired: description: - Defines the action to take if an expired remote server certificate is encountered. For forward proxy the default is to ignore expired certificates (False). For reverse proxy the default is to drop expired certificates (True). type: bool default: False blockUntrusted: description: - Defines the action to take if an untrusted remote server certificate is encountered, based on the defined caBundle. For forward proxy the default is to ignore untrusted certificates (False). For reverse proxy the default is to drop untrusted certificates (True). type: bool default: False ocsp: description: - Defines an OCSP configuration to use to perform certificate revocation checking again remote server certificates. type: str default: None crl: description: - Defines a CRL configuration to use to perform certificate revocation checking again remote server certificates. type: str default: None logPublisher: description: - Requires 9.0+. Defines a specific log publisher to use for server-side SSL-related events. type: str default: /Common/sys-ssl-publisher bypassHandshakeFailure: description: - Defines the action to take if a server side TLS handshake failure is detected. A value of False will cause the connection to fail. A value of True will shutdown TLS decryption and allow the connection to proceed un-decrypted. type: bool default: False bypassClientCertFailure: description: - Defines the action to take if a server side TLS handshake client certificate request is detected. A value of False will cause the connection to fail. A value of True will shutdown TLS decryption and allow the connection to proceed un-decrypted. type: bool default: False mode: description: - Defines how this task is handled. With the default setting of 'update', the module performs the tasks required to update the target resource. With the 'output' setting, the resulting JSON object blocks are returned without updating the target resource. This option is useful for debugging, and when subordinate objects (ex. SSL, services, service chains, policy, resolver) are created in the same playbook, and their respectice output JSON referenced in a single Topology create task. type: str choices: - update - output default: update state: description: - Specifies the present/absent state required. type: str choices: - absent - present default: present extends_documentation_fragment: f5networks.f5_modules.f5 author: - Kevin Stewart (kevin-dot-g-dot-stewart-at-gmail-dot-com) ''' EXAMPLES = r''' - name: Create SSLO SSL Forward Proxy Settings (simple) hosts: localhost gather_facts: False connection: local collections: - kevingstewart.f5_sslo_ansible vars: provider: server: 172.16.1.77 user: admin password: admin validate_certs: no server_port: 443 tasks: - name: SSLO SSL forward proxy settings bigip_sslo_config_ssl: provider: "{{ provider }}" name: "demo_ssl" clientSettings: caCert: "/Common/subrsa.f5labs.com" caKey: "/Common/subrsa.f5labs.com" delegate_to: localhost - name: Create SSLO SSL Forward Proxy Settings hosts: localhost gather_facts: False connection: local collections: - kevingstewart.f5_sslo_ansible vars: provider: server: 172.16.1.77 user: admin password: admin validate_certs: no server_port: 443 tasks: - name: SSLO SSL settings bigip_sslo_config_ssl: provider: "{{ provider }}" name: "demo_ssl" clientSettings: cipherType: "group" cipher: "/Common/f5-default" enableTLS1_3: True cert: "/Common/default.crt" key: "/Common/default.key" caCert: "/Common/subrsa.f5labs.com" caKey: "/Common/subrsa.f5labs.com" caChain: "/Common/my-ca-chain" alpn: True logPublisher: "/Common/my-ssl-publisher" serverSettings: cipherType: "group" cipher: "/Common/f5-default" enableTLS1_3: True caBundle: "/Common/local-ca-bundle.crt" blockExpired: False blockUntrusted: False ocsp: "/Common/my-ocsp" crl: "/Common/my-crl" logPublisher: "/Common/my-ssl-publisher" bypassHandshakeFailure: True bypassClientCertFailure: True delegate_to: localhost - name: Create SSLO SSL Reverse Proxy Settings (simple) hosts: localhost gather_facts: False connection: local collections: - kevingstewart.f5_sslo_ansible vars: provider: server: 172.16.1.77 user: admin password: admin validate_certs: no server_port: 443 tasks: - name: SSLO SSL settings bigip_sslo_config_ssl: provider: "{{ provider }}" name: "demo_ssl" clientSettings: cert: "/Common/myserver.f5labs.com" key: "/Common/myserver.f5labs.com" delegate_to: localhost - name: Create SSLO SSL Reverse Proxy Settings hosts: localhost gather_facts: False connection: local collections: - kevingstewart.f5_sslo_ansible vars: provider: server: 172.16.1.77 user: admin password: admin validate_certs: no server_port: 443 tasks: - name: SSLO SSL settings bigip_sslo_config_ssl: provider: "{{ provider }}" name: "demo5" clientSettings: cipherType: "group" cipher: "/Common/f5-default" enableTLS1_3: True cert: "/Common/myserver.f5labs.com" key: "/Common/myserver.f5labs.com" chain: "/Common/my-ca-chain" serverSettings: cipherType: "group" cipher: "/Common/f5-default" enableTLS1_3: True caBundle: "/Common/local-ca-bundle.crt" blockExpired: False blockUntrusted: False delegate_to: localhost ''' RETURN = r''' name: description: - Changed name of SSL configuration. type: str sample: demo_ssl clientSettings: description: client-side SSL settings type: complex contains: cipherType: description: defines "string" for cipher string, or "group" for cipher group type: str sample: string cipher: description: defines the cipher string or an existing cipher group type: str sample: DEFAULT or /Common/f5-default enableTLS1_3: description: enables or disables client-side TLSv1.3 type: bool sample: True cert: description: defines the client-facing certificate. For forward proxy this is the template certificate. For reverse proxy this is the server certificate. type: str sample: /Common/default.crt key: description: defines the client-facing private key. For forward proxy this is the template key. For reverse proxy this is the server private key. type: str sample: /Common/default.key chain: description: defines the client-facing CA certificate chain. For reverse proxy this is the server certificate's CA chain. type: str sample: /Common/local-ca-chain.crt caCert: description: defines the issuing CA certificate for a forward proxy. type: str sample: /Common/default.crt caKey: description: defines the issuing CA private key for a forward proxy. type: str sample: /Common/default.key caChain: description: defines the CA certificate chain for the issuing CA in a forward proxy. type: str sample: /Common/local-ca-chain.crt alpn: description: requires 9.0+. Enables or disables ALPN HTTP/2 full proxy through a forward proxy topology. type: bool sample: True logPublisher: description: requires 9.0+. Defines a specific log publisher for client-side SSL-related events. type: str sample: /Common/sys-ssl-publisher serverSettings: description: network settings for for-service configuration type: complex contains: cipherType: description: defines "string" for cipher string, or "group" for cipher group type: str sample: string cipher: description: defines the cipher string or an existing cipher group type: str sample: DEFAULT or /Common/f5-default enableTLS1_3: description: enables or disables server-side TLSv1.3 type: bool sample: True caBundle: description: defines a CA bundle used to valdate remote server certificates. type: str sample: /Common/ca-bundle.crt blockExpired: description: defines the action to take on receiving an expired remote server certificate, True = block, False = ignore. type: bool sample: True blockUntrusted: description: defines the action to take on receiving an untrusted remote server certificate, True = block, False = ignore. type: bool sample: True ocsp: description: defines aan existing OCSP configuration to validate revocation of remote server certificates. type: str sample: /Common/my-ocsp crl: description: defines aan existing CRL configuration to validate revocation of remote server certificates. type: str sample: /Common/my-crl logPublisher: description: requires 9.0+. Defines a specific log publisher for server-side SSL-related events. type: str sample: /Common/sys-ssl-publisher bypassHandshakeFailure: description: - Defines the action to take on receiving a TLS handshake alert from a server. True = bypass decryption and allow through, False = block type: bool sample: True bypassClientCertFailure: description: - Defines the action to take on receiving a TLS handshake client certificate request from a server. True = bypass decryption and allow through, False = block type: bool sample: True mode: description: describes the action to take on the task. type: str sample: update state: description: - Changed state. type: str sample: present ''' from datetime import datetime from ansible.module_utils.basic import ( AnsibleModule, env_fallback ) from ansible_collections.f5networks.f5_modules.plugins.module_utils.bigip import ( F5RestClient ) from ansible_collections.f5networks.f5_modules.plugins.module_utils.common import ( F5ModuleError, AnsibleF5Parameters, transform_name, f5_argument_spec ) from ansible_collections.f5networks.f5_modules.plugins.module_utils.icontrol import ( tmos_version ) from ipaddress import ( ip_network, ip_interface ) import json, time, re global print_output global json_template global obj_attempts global min_version global max_version print_output = [] ## define object creation attempts count (with 1 seconds pause between each attempt) obj_attempts = 20 ## define minimum supported tmos version - min(SSLO 5.x) min_version = 5.0 ## define maximum supported tmos version - max(SSLO 8.x) max_version = 9.0 json_template = { "name":"f5-ssl-orchestrator-gc", "inputProperties":[ { "id":"f5-ssl-orchestrator-operation-context", "type":"JSON", "value":{ "operationType":"CREATE", "deploymentType":"SSL_SETTINGS", "deploymentName":"TEMPLATE_NAME", "deploymentReference":"", "partition":"Common", "strictness":False } }, { "id":"f5-ssl-orchestrator-tls", "type":"JSON", "value":{ "sslSettingsReference":"", "sslSettingsName":"", "description":"", "previousVersion":"7.2", "version":"7.2", "generalSettings":{ "isForwardProxy":True, "bypassHandshakeAlert":False, "bypassClientCertFailure":False }, "clientSettings":{ "ciphers":{ "isCipherString":True, "cipherString":"DEFAULT", "cipherGroup":"/Common/f5-default" }, "certKeyChain":[ { "cert":"/Common/default.crt", "key":"/Common/default.key", "chain":"", "passphrase":"", "name":"CERT_KEY_CHAIN_0" } ], "caCertKeyChain":[], "forwardByPass":True, "enabledSSLProcessingOptions":[] }, "serverSettings":{ "ciphers":{ "isCipherString":True, "cipherString":"DEFAULT", "cipherGroup":"/Common/f5-default" }, "caBundle":"/Common/ca-bundle.crt", "expiredCertificates":False, "untrustedCertificates":False, "ocsp":"", "crl":"", "enabledSSLProcessingOptions":[] }, "name":"TEMPLATE_NAME", "advancedMode":"off", "strictness":False, "partition":"Common" } }, { "id":"f5-ssl-orchestrator-topology", "type":"JSON" } ], "configurationProcessorReference":{ "link":"https://localhost/mgmt/shared/iapp/processors/f5-iappslx-ssl-orchestrator-gc" }, "configProcessorTimeoutSeconds": 120, "statsProcessorTimeoutSeconds": 60, "configProcessorAffinity": { "processorPolicy": "LOCAL", "affinityProcessorReference": { "link": "https://localhost/mgmt/shared/iapp/affinity/local" } }, "state":"BINDING", "presentationHtmlReference":{ "link":"https://localhost/iapps/f5-iappslx-ssl-orchestrator/sgc/sgcIndex.html" }, "operation":"CREATE" } json_ca_cert_template = { "cert":"/Common/default.crt", "key":"/Common/defaut.key", "chain":"", "isCa":True, "usage":"CA", "port":"0", "passphrase":"", "certKeyChainMismatch":False, "isDuplicateVal":False, "name":"CA_CERT_KEY_CHAIN_0" } json_enable_tls13 = { "name":"TLSv1.3", "value":"TLSv1.3" } def main(): ## start here ## define global print_output global print_output print_output = [] ## define argumentspec spec = ArgumentSpec() module = AnsibleModule( argument_spec=spec.argument_spec, supports_check_mode=spec.supports_check_mode, ) ## send to exec_module, result contains output of tasks try: mm = ModuleManager(module=module) results = mm.exec_module() result = dict( print_output = print_output, **results ) module.exit_json(**result) except F5ModuleError as ex: module.fail_json(msg=str(ex)) if __name__ == '__main__': main()
[ 2, 48443, 14629, 14, 8800, 14, 29412, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 220, 198, 2, 15069, 25, 357, 66, 8, 33448, 11, 885, 7114, 12, 26518, 12, 70, 12, 26518, 12, 301, 413, 433, 12, 265, ...
2.470163
8,295
""" This module defines pipelines - sets of tasks in nemo that we sometimes want to do on different inputs (e.g., real data or simulated data). """ import os import sys import glob import shutil import time import astropy.io.fits as pyfits import astropy.table as atpy from astLib import astWCS import numpy as np from scipy import ndimage, interpolate import copy from pixell import enmap import nemo from . import startUp from . import filters from . import photometry from . import catalogs from . import maps from . import signals from . import completeness from . import MockSurvey import nemoCython #------------------------------------------------------------------------------------------------------------ def filterMapsAndMakeCatalogs(config, rootOutDir = None, copyFilters = False, measureFluxes = True, invertMap = False, verbose = True, useCachedMaps = True): """Runs the map filtering and catalog construction steps according to the given configuration. Args: config (:obj: 'startup.NemoConfig'): Nemo configuration object. rootOutDir (str): If None, use the default given by config. Otherwise, use this to override where the output filtered maps and catalogs are written. copyFilters (bool, optional): If True, and rootOutDir is given (not None), then filters will be copied from the default output location (from a pre-existing nemo run) to the appropriate directory under rootOutDir. This is used by, e.g., contamination tests based on sky sims, where the same kernels as used on the real data are applied to simulated maps. If rootOutDir = None, setting copyKernels = True has no effect. measureFluxes (bool, optional): If True, measure fluxes. If False, just extract S/N values for detected objects. invertMap (bool, optional): If True, multiply all maps by -1; needed by :meth:maps.estimateContaminationFromInvertedMaps). Returns: Optimal catalog (keeps the highest S/N detection when filtering at multiple scales). Note: See bin/nemo for how this pipeline is applied to real data, and maps.sourceInjectionTest for how this is applied to source-free sims that are generated on the fly. """ if config.parDict['twoPass'] == False: catalog=_filterMapsAndMakeCatalogs(config, rootOutDir = rootOutDir, copyFilters = copyFilters, measureFluxes = measureFluxes, invertMap = invertMap, verbose = verbose, useCachedMaps = useCachedMaps) else: # Two pass pipeline # On 1st pass, find sources (and maybe clusters) with canned settings, masking nothing. # On 2nd pass, the 1st pass catalog will be used to mask or subtract sources from maps used for # noise estimation only. # No point doing this if we're not using the map itself for the noise term in the filter for f in config.parDict['mapFilters']: for key in f.keys(): if key == 'noiseParams' and f['noiseParams']['method'] != 'dataMap': raise Exception("There is no point running if filter noise method != 'dataMap'.") # Pass 1 - find point sources, save nothing # NOTE: We need to do this for each map in the list, if we have a multi-frequency filter pass1PtSrcSettings={'label': "Beam", 'class': "BeamMatchedFilter", 'params': {'noiseParams': {'method': "model", 'noiseGridArcmin': 40.0, 'numNoiseBins': 2}, 'saveFilteredMaps': False, 'outputUnits': 'uK', 'edgeTrimArcmin': 0.0}} config.parDict['mapFilters']=[pass1PtSrcSettings] config.parDict['photFilter']=None config.parDict['maskPointSourcesFromCatalog']=[] # This is only applied on the 2nd pass config.parDict['measureShapes']=True # Double-lobed extended source at f090 causes havoc in one tile orig_unfilteredMapsDictList=list(config.unfilteredMapsDictList) config.parDict['forcedPhotometryCatalog']=None # If in this mode, only wanted on 2nd pass pass1CatalogsList=[] surveyMasksList=[] # ok, these should all be the same, otherwise we have problems... for mapDict in orig_unfilteredMapsDictList: # We use whole tile area (i.e., don't trim overlaps) so that we get everything if under MPI # Otherwise, powerful sources in overlap regions mess things up under MPI # Serial mode doesn't have this issue as it can see the whole catalog over all tiles # But since we now use full area, we may double subtract ovelap sources when in serial mode # So the removeDuplicates call fixes that, and doesn't impact anything else here surveyMasksList.append(mapDict['surveyMask']) mapDict['surveyMask']=None config.unfilteredMapsDictList=[mapDict] catalog=_filterMapsAndMakeCatalogs(config, verbose = False, writeAreaMasks = False) if len(catalog) > 0 : catalog, numDuplicatesFound, names=catalogs.removeDuplicates(catalog) pass1CatalogsList.append(catalog) # Pass 2 - subtract point sources in the maps used for noise term in filter only # To avoid ringing in the pass 2, we siphon off the super bright things found in pass 1 # We subtract those from the maps used in pass 2 - we then need to add them back at the end config.restoreConfig() config.parDict['measureShapes']=True # We'll keep this for pass 2 as well siphonSNR=50 for mapDict, catalog, surveyMask in zip(orig_unfilteredMapsDictList, pass1CatalogsList, surveyMasksList): #catalogs.catalog2DS9(catalog[catalog['SNR'] > siphonSNR], config.diagnosticsDir+os.path.sep+"pass1_highSNR_siphoned.reg") mapDict['noiseMaskCatalog']=catalog[catalog['SNR'] < siphonSNR] mapDict['subtractPointSourcesFromCatalog']=[catalog[catalog['SNR'] > siphonSNR]] mapDict['maskSubtractedPointSources']=True mapDict['surveyMask']=surveyMask config.unfilteredMapsDictList=orig_unfilteredMapsDictList catalog=_filterMapsAndMakeCatalogs(config, verbose = False) # Merge back in the bright sources that were subtracted in pass 1 # (but we don't do that in forced photometry mode) mergeList=[catalog] if config.parDict['forcedPhotometryCatalog'] is None: for pass1Catalog in pass1CatalogsList: mergeList.append(pass1Catalog[pass1Catalog['SNR'] > siphonSNR]) catalog=atpy.vstack(mergeList) return catalog #------------------------------------------------------------------------------------------------------------ def _filterMapsAndMakeCatalogs(config, rootOutDir = None, copyFilters = False, measureFluxes = True, invertMap = False, verbose = True, useCachedMaps = True, writeAreaMasks = True): """Runs the map filtering and catalog construction steps according to the given configuration. Args: config (:obj: 'startup.NemoConfig'): Nemo configuration object. rootOutDir (str): If None, use the default given by config. Otherwise, use this to override where the output filtered maps and catalogs are written. copyFilters (bool, optional): If True, and rootOutDir is given (not None), then filters will be copied from the default output location (from a pre-existing nemo run) to the appropriate directory under rootOutDir. This is used by, e.g., contamination tests based on sky sims, where the same kernels as used on the real data are applied to simulated maps. If rootOutDir = None, setting copyKernels = True has no effect. measureFluxes (bool, optional): If True, measure fluxes. If False, just extract S/N values for detected objects. invertMap (bool, optional): If True, multiply all maps by -1; needed by :meth:maps.estimateContaminationFromInvertedMaps). Returns: Optimal catalog (keeps the highest S/N detection when filtering at multiple scales). Note: See bin/nemo for how this pipeline is applied to real data, and maps.sourceInjectionTest for how this is applied to source-free sims that are generated on the fly. """ # If running on sims (source-free or with injected sources), this ensures we use the same kernels for # filtering the sim maps as was used on the real data, by copying kernels to the sims dir. The kernels # will then be loaded automatically when filterMaps is called. Yes, this is a bit clunky... if rootOutDir is not None: filteredMapsDir=rootOutDir+os.path.sep+"filteredMaps" diagnosticsDir=rootOutDir+os.path.sep+"diagnostics" dirList=[rootOutDir, filteredMapsDir, diagnosticsDir] for d in dirList: os.makedirs(d, exist_ok = True) if copyFilters == True: for tileName in config.tileNames: fileNames=glob.glob(config.diagnosticsDir+os.path.sep+tileName+os.path.sep+"filter*#%s*.fits" % (tileName)) if len(fileNames) == 0: raise Exception("Could not find pre-computed filters to copy - you need to add 'saveFilter: True' to the filter params in the config file (this is essential for doing source injection sims quickly).") kernelCopyDestDir=diagnosticsDir+os.path.sep+tileName os.makedirs(kernelCopyDestDir, exist_ok = True) for f in fileNames: dest=kernelCopyDestDir+os.path.sep+os.path.split(f)[-1] if os.path.exists(dest) == False: shutil.copyfile(f, dest) print("... copied filter %s to %s ..." % (f, dest)) else: rootOutDir=config.rootOutDir filteredMapsDir=config.filteredMapsDir diagnosticsDir=config.diagnosticsDir # We re-sort the filters list here - in case we have photFilter defined photFilter=config.parDict['photFilter'] filtersList=[] if photFilter is not None: for f in config.parDict['mapFilters']: if f['label'] == photFilter: filtersList.append(f) for f in config.parDict['mapFilters']: if photFilter is not None: if f['label'] == photFilter: continue filtersList.append(f) if photFilter is not None: assert(filtersList[0]['label'] == photFilter) photFilteredMapDict=None # Make filtered maps for each filter and tile catalogDict={} for tileName in config.tileNames: # Now have per-tile directories (friendlier for Lustre) tileFilteredMapsDir=filteredMapsDir+os.path.sep+tileName tileDiagnosticsDir=diagnosticsDir+os.path.sep+tileName for d in [tileFilteredMapsDir, tileDiagnosticsDir]: os.makedirs(d, exist_ok = True) if verbose == True: print(">>> Making filtered maps - tileName = %s ..." % (tileName)) # We could load the unfiltered map only once here? # We could also cache 'dataMap' noise as it will always be the same for f in filtersList: label=f['label']+"#"+tileName catalogDict[label]={} if 'saveDS9Regions' in f['params'] and f['params']['saveDS9Regions'] == True: DS9RegionsPath=config.filteredMapsDir+os.path.sep+tileName+os.path.sep+"%s_filteredMap.reg" % (label) else: DS9RegionsPath=None filteredMapDict=filters.filterMaps(config.unfilteredMapsDictList, f, tileName, filteredMapsDir = tileFilteredMapsDir, diagnosticsDir = tileDiagnosticsDir, selFnDir = config.selFnDir, verbose = True, undoPixelWindow = True, useCachedMaps = useCachedMaps) if f['label'] == photFilter: photFilteredMapDict={} photFilteredMapDict['SNMap']=filteredMapDict['SNMap'] photFilteredMapDict['data']=filteredMapDict['data'] # Forced photometry on user-supplied list of objects, or detect sources if 'forcedPhotometryCatalog' in config.parDict.keys() and config.parDict['forcedPhotometryCatalog'] is not None: catalog=photometry.makeForcedPhotometryCatalog(filteredMapDict, config.parDict['forcedPhotometryCatalog'], useInterpolator = config.parDict['useInterpolator'], DS9RegionsPath = DS9RegionsPath) else: # Normal mode catalog=photometry.findObjects(filteredMapDict, threshold = config.parDict['thresholdSigma'], minObjPix = config.parDict['minObjPix'], findCenterOfMass = config.parDict['findCenterOfMass'], removeRings = config.parDict['removeRings'], ringThresholdSigma = config.parDict['ringThresholdSigma'], rejectBorder = config.parDict['rejectBorder'], objIdent = config.parDict['objIdent'], longNames = config.parDict['longNames'], useInterpolator = config.parDict['useInterpolator'], measureShapes = config.parDict['measureShapes'], invertMap = invertMap, DS9RegionsPath = DS9RegionsPath) # We write area mask here, because it gets modified by findObjects if removing rings # NOTE: condition added to stop writing tile maps again when running nemoMass in forced photometry mode maskFileName=config.selFnDir+os.path.sep+"areaMask#%s.fits" % (tileName) surveyMask=np.array(filteredMapDict['surveyMask'], dtype = int) if writeAreaMasks == True: if os.path.exists(maskFileName) == False and os.path.exists(config.selFnDir+os.path.sep+"areaMask.fits") == False: maps.saveFITS(maskFileName, surveyMask, filteredMapDict['wcs'], compressed = True, compressionType = 'PLIO_1') if measureFluxes == True: photometry.measureFluxes(catalog, filteredMapDict, config.diagnosticsDir, photFilteredMapDict = photFilteredMapDict, useInterpolator = config.parDict['useInterpolator']) else: # Get S/N only - if the reference (fixed) filter scale has been given # This is (probably) only used by maps.estimateContaminationFromInvertedMaps if photFilter is not None: photometry.getSNRValues(catalog, photFilteredMapDict['SNMap'], filteredMapDict['wcs'], prefix = 'fixed_', useInterpolator = config.parDict['useInterpolator'], invertMap = invertMap) catalogDict[label]['catalog']=catalog # Merged/optimal catalogs optimalCatalog=catalogs.makeOptimalCatalog(catalogDict, constraintsList = config.parDict['catalogCuts']) return optimalCatalog #------------------------------------------------------------------------------------------------------------ def makeSelFnCollection(config, mockSurvey): """Makes a collection of selection function dictionaries (one per footprint specified in selFnFootprints in the config file, plus the full survey mask), that contain information on noise levels, area covered, and completeness. Returns a dictionary (keys: 'full' - corresponding to whole survey, plus other keys named by footprint). """ # Q varies across tiles Q=signals.QFit(config) # We only care about the filter used for fixed_ columns photFilterLabel=config.parDict['photFilter'] for filterDict in config.parDict['mapFilters']: if filterDict['label'] == photFilterLabel: break # We'll only calculate completeness for this given selection SNRCut=config.parDict['selFnOptions']['fixedSNRCut'] # Handle any missing options for calcCompleteness (these aren't used by the default fast method anyway) if 'numDraws' not in config.parDict['selFnOptions'].keys(): config.parDict['selFnOptions']['numDraws']=2000000 if 'numIterations' not in config.parDict['selFnOptions'].keys(): config.parDict['selFnOptions']['numIterations']=100 # We can calculate stats in different extra areas (e.g., inside optical survey footprints) footprintsList=[] if 'selFnFootprints' in config.parDict.keys(): footprintsList=footprintsList+config.parDict['selFnFootprints'] # Run the selection function calculation on each tile in turn selFnCollection={'full': []} for footprintDict in footprintsList: if footprintDict['label'] not in selFnCollection.keys(): selFnCollection[footprintDict['label']]=[] for tileName in config.tileNames: RMSTab=completeness.getRMSTab(tileName, photFilterLabel, config.selFnDir) compMz=completeness.calcCompleteness(RMSTab, SNRCut, tileName, mockSurvey, config.parDict['massOptions'], Q, numDraws = config.parDict['selFnOptions']['numDraws'], numIterations = config.parDict['selFnOptions']['numIterations'], method = config.parDict['selFnOptions']['method']) selFnDict={'tileName': tileName, 'RMSTab': RMSTab, 'tileAreaDeg2': RMSTab['areaDeg2'].sum(), 'compMz': compMz} selFnCollection['full'].append(selFnDict) # Generate footprint intersection masks (e.g., with HSC) and RMS tables, which are cached # May as well do this bit here (in parallel) and assemble output later for footprintDict in footprintsList: completeness.makeIntersectionMask(tileName, config.selFnDir, footprintDict['label'], masksList = footprintDict['maskList']) tileAreaDeg2=completeness.getTileTotalAreaDeg2(tileName, config.selFnDir, footprintLabel = footprintDict['label']) if tileAreaDeg2 > 0: RMSTab=completeness.getRMSTab(tileName, photFilterLabel, config.selFnDir, footprintLabel = footprintDict['label']) compMz=completeness.calcCompleteness(RMSTab, SNRCut, tileName, mockSurvey, config.parDict['massOptions'], Q, numDraws = config.parDict['selFnOptions']['numDraws'], numIterations = config.parDict['selFnOptions']['numIterations'], method = config.parDict['selFnOptions']['method']) selFnDict={'tileName': tileName, 'RMSTab': RMSTab, 'tileAreaDeg2': RMSTab['areaDeg2'].sum(), 'compMz': compMz} selFnCollection[footprintDict['label']].append(selFnDict) # Optional mass-limit maps if 'massLimitMaps' in list(config.parDict['selFnOptions'].keys()): for massLimitDict in config.parDict['selFnOptions']['massLimitMaps']: completeness.makeMassLimitMap(SNRCut, massLimitDict['z'], tileName, photFilterLabel, mockSurvey, config.parDict['massOptions'], Q, config.diagnosticsDir, config.selFnDir) return selFnCollection #------------------------------------------------------------------------------------------------------------ def makeMockClusterCatalog(config, numMocksToMake = 1, combineMocks = False, writeCatalogs = True, writeInfo = True, verbose = True): """Generate a mock cluster catalog using the given nemo config. Returns: List of catalogs (each is an astropy Table object) """ # Having changed nemoMock interface, we may need to make output dir if os.path.exists(config.mocksDir) == False: os.makedirs(config.mocksDir, exist_ok = True) # Noise sources in mocks if 'applyPoissonScatter' in config.parDict.keys(): applyPoissonScatter=config.parDict['applyPoissonScatter'] else: applyPoissonScatter=True if 'applyIntrinsicScatter' in config.parDict.keys(): applyIntrinsicScatter=config.parDict['applyIntrinsicScatter'] else: applyIntrinsicScatter=True if 'applyNoiseScatter' in config.parDict.keys(): applyNoiseScatter=config.parDict['applyNoiseScatter'] else: applyNoiseScatter=True if verbose: print(">>> Mock noise sources (Poisson, intrinsic, measurement noise) = (%s, %s, %s) ..." % (applyPoissonScatter, applyIntrinsicScatter, applyNoiseScatter)) # Q varies across tiles Q=signals.QFit(config) # We only care about the filter used for fixed_ columns photFilterLabel=config.parDict['photFilter'] for filterDict in config.parDict['mapFilters']: if filterDict['label'] == photFilterLabel: break # The same as was used for detecting objects thresholdSigma=config.parDict['thresholdSigma'] # We need an assumed scaling relation for mock observations scalingRelationDict=config.parDict['massOptions'] if verbose: print(">>> Setting up mock survey ...") # NOTE: Sanity check is possible here: area in RMSTab should equal area from areaMask.fits # If it isn't, there is a problem... # Also, we're skipping the individual tile-loading routines here for speed checkAreaConsistency=False wcsDict={} RMSMap=pyfits.open(config.selFnDir+os.path.sep+"RMSMap_%s.fits" % (photFilterLabel)) RMSTab=atpy.Table().read(config.selFnDir+os.path.sep+"RMSTab.fits") count=0 totalAreaDeg2=0 RMSMapDict={} areaDeg2Dict={} if checkAreaConsistency == True: areaMap=pyfits.open(config.selFnDir+os.path.sep+"areaMask.fits") t0=time.time() for tileName in config.tileNames: count=count+1 if tileName == 'PRIMARY': if tileName in RMSMap: extName=tileName data=RMSMap[extName].data else: data=None if data is None: for extName in RMSMap: data=RMSMap[extName].data if data is not None: break RMSMapDict[tileName]=RMSMap[extName].data wcsDict[tileName]=astWCS.WCS(RMSMap[extName].header, mode = 'pyfits') else: RMSMapDict[tileName]=RMSMap[tileName].data wcsDict[tileName]=astWCS.WCS(RMSMap[tileName].header, mode = 'pyfits') # Area from RMS table areaDeg2=RMSTab[RMSTab['tileName'] == tileName]['areaDeg2'].sum() areaDeg2Dict[tileName]=areaDeg2 totalAreaDeg2=totalAreaDeg2+areaDeg2 # Area from map (slower) if checkAreaConsistency == True: areaMask, wcsDict[tileName]=completeness.loadAreaMask(tileName, config.selFnDir) areaMask=areaMap[tileName].data map_areaDeg2=(areaMask*maps.getPixelAreaArcmin2Map(areaMask.shape, wcsDict[tileName])).sum()/(60**2) if abs(map_areaDeg2-areaDeg2) > 1e-4: raise Exception("Area from areaMask.fits doesn't agree with area from RMSTab.fits") RMSMap.close() if checkAreaConsistency == True: areaMap.close() t1=time.time() if verbose: print("... took %.3f sec ..." % (t1-t0)) # Useful for testing: if 'seed' in config.parDict.keys(): seed=config.parDict['seed'] else: seed=None if seed is not None: np.random.seed(seed) # We're now using one MockSurvey object for the whole survey massOptions=config.parDict['massOptions'] minMass=5e13 zMin=0.0 zMax=2.0 defCosmo={'H0': 70.0, 'Om0': 0.30, 'Ob0': 0.05, 'sigma8': 0.80, 'ns': 0.95, 'delta': 500, 'rhoType': 'critical'} for key in defCosmo: if key not in massOptions.keys(): massOptions[key]=defCosmo[key] H0=massOptions['H0'] Om0=massOptions['Om0'] Ob0=massOptions['Ob0'] sigma8=massOptions['sigma8'] ns=massOptions['ns'] delta=massOptions['delta'] rhoType=massOptions['rhoType'] mockSurvey=MockSurvey.MockSurvey(minMass, totalAreaDeg2, zMin, zMax, H0, Om0, Ob0, sigma8, ns, delta = delta, rhoType = rhoType, enableDrawSample = True) print("... mock survey parameters:") for key in defCosmo.keys(): print(" %s = %s" % (key, str(massOptions[key]))) for key in ['tenToA0', 'B0', 'Mpivot', 'sigma_int']: print(" %s = %s" % (key, str(scalingRelationDict[key]))) print(" total area = %.1f square degrees" % (totalAreaDeg2)) print(" random seed = %s" % (str(seed))) if verbose: print(">>> Making mock catalogs ...") catList=[] for i in range(numMocksToMake): mockTabsList=[] t0=time.time() for tileName in config.tileNames: # It's possible (depending on tiling) that blank tiles were included - so skip # We may also have some tiles that are almost but not quite blank if RMSMapDict[tileName].sum() == 0 or areaDeg2Dict[tileName] < 0.5: continue mockTab=mockSurvey.drawSample(RMSMapDict[tileName], scalingRelationDict, Q, wcs = wcsDict[tileName], photFilterLabel = photFilterLabel, tileName = tileName, makeNames = True, SNRLimit = thresholdSigma, applySNRCut = True, areaDeg2 = areaDeg2Dict[tileName], applyPoissonScatter = applyPoissonScatter, applyIntrinsicScatter = applyIntrinsicScatter, applyNoiseScatter = applyNoiseScatter) if mockTab is not None: mockTabsList.append(mockTab) tab=atpy.vstack(mockTabsList) catList.append(tab) t1=time.time() if verbose: print("... making mock catalog %d took %.3f sec ..." % (i+1, t1-t0)) # Write catalog and .reg file if writeCatalogs == True: #colNames=['name', 'RADeg', 'decDeg', 'template', 'redshift', 'redshiftErr', 'true_M500', 'true_fixed_y_c', 'fixed_SNR', 'fixed_y_c', 'fixed_err_y_c'] #colFmts =['%s', '%.6f', '%.6f', '%s', '%.3f', '%.3f', '%.3f', '%.3f', '%.1f', '%.3f', '%.3f'] mockCatalogFileName=config.mocksDir+os.path.sep+"mockCatalog_%d.csv" % (i+1) catalogs.writeCatalog(tab, mockCatalogFileName) catalogs.writeCatalog(tab, mockCatalogFileName.replace(".csv", ".fits")) addInfo=[{'key': 'fixed_SNR', 'fmt': '%.1f'}] catalogs.catalog2DS9(tab, mockCatalogFileName.replace(".csv", ".reg"), constraintsList = [], addInfo = addInfo, color = "cyan") if combineMocks == True: tab=None for i in range(numMocksToMake): mockCatalogFileName=config.mocksDir+os.path.sep+"mockCatalog_%d.fits" % (i+1) stackTab=atpy.Table().read(mockCatalogFileName) if tab == None: tab=stackTab else: tab=atpy.vstack([tab, stackTab]) outFileName=config.mocksDir+os.path.sep+"mockCatalog_combined.fits" tab.meta['NEMOVER']=nemo.__version__ tab.write(outFileName, overwrite = True) # Write a small text file with the parameters used to generate the mocks into the mocks dir (easier than using headers) if writeInfo == True: mockKeys=['massOptions', 'makeMockCatalogs', 'applyPoissonScatter', 'applyIntrinsicScatter', 'applyNoiseScatter'] with open(config.mocksDir+os.path.sep+"mockParameters.txt", "w") as outFile: for m in mockKeys: if m in config.parDict.keys(): outFile.write("%s: %s\n" % (m, config.parDict[m])) return catList #------------------------------------------------------------------------------------------------------------ def extractSpec(config, tab, method = 'CAP', diskRadiusArcmin = 4.0, highPassFilter = False, estimateErrors = True, saveFilteredMaps = False): """Returns a table containing the spectral energy distribution, extracted using either compensated aperture photometry (CAP) at each object location in the input catalog, or using a matched filter. Maps at different frequencies will first be matched to the lowest resolution beam, using a Gaussian kernel. For the CAP method, at each object location, the temperature fluctuation is measured within a disk of radius diskRadiusArcmin, after subtracting the background measured in an annulus between diskRadiusArcmin < r < sqrt(2) * diskRadiusArcmin (i.e., this should be similar to the method described in Schaan et al. 2020). For the matched filter method, the catalog must contain a `template` column, as produced by the main `nemo` script, with template names in the format Arnaud_M2e14_z0p4 (for example). This will be used to set the signal scale used for each object. All definitions of filters in the config will be ignored, in favour of a filter using a simple CMB + white noise model. Identical filters will be used for all maps (i.e., the method of Saro et al. 2014). Args: config (:obj:`startup.NemoConfig`): Nemo configuration object. tab (:obj:`astropy.table.Table`): Catalog containing input object positions. Must contain columns 'name', 'RADeg', 'decDeg'. method (str, optional): diskRadiusArcmin (float, optional): If using CAP method: disk aperture radius in arcmin, within which the signal is measured. The background will be estimated in an annulus between diskRadiusArcmin < r < sqrt(2) * diskRadiusArcmin. highPassFilter (bool, optional): If using CAP method: if set, subtract the large scale background using maps.subtractBackground, with the smoothing scale set to 2 * sqrt(2) * diskRadiusArcmin. estimateErrors (bool, optional): If used CAP method: if set, estimate uncertainties by placing random apertures throughout the map. For now, this is done on a tile-by-tile basis, and doesn't take into account inhomogeneous noise within a tile. saveFilteredMaps (bool, optional): If using matchedFilter method: save the filtered maps under the `nemoSpecCache` directory (which is created in the current working directory, if it doesn't already exist). Returns: Catalog containing spectral energy distribution measurements for each object. For the CAP method, units of extracted signals are uK arcmin^2. For the matchedFilter method, extracted signals are deltaT CMB amplitude in uK. """ diagnosticsDir=config.diagnosticsDir # Choose lowest resolution as the reference beam - we match to that refBeam=None refFWHMArcmin=0 refIndex=0 beams=[] for i in range(len(config.unfilteredMapsDictList)): mapDict=config.unfilteredMapsDictList[i] beam=signals.BeamProfile(mapDict['beamFileName']) if beam.FWHMArcmin > refFWHMArcmin: refBeam=beam refFWHMArcmin=beam.FWHMArcmin refIndex=i beams.append(beam) # Sort the list of beams and maps so that the one with the reference beam is in index 0 config.unfilteredMapsDictList.insert(0, config.unfilteredMapsDictList.pop(refIndex)) beams.insert(0, beams.pop(refIndex)) # Figure out how much we need to Gaussian blur to match the reference beam # NOTE: This was an alternative to proper PSF-matching that wasn't good enough for ACT beams #for i in range(1, len(config.unfilteredMapsDictList)): #mapDict=config.unfilteredMapsDictList[i] #beam=beams[i] #degPerPix=np.mean(np.diff(beam.rDeg)) #assert(abs(np.diff(beam.rDeg).max()-degPerPix) < 0.001) #resMin=1e6 #smoothPix=0 #attFactor=1.0 #for j in range(1, 100): #smoothProf=ndimage.gaussian_filter1d(beam.profile1d, j) #smoothProf=smoothProf/smoothProf.max() #res=np.sum(np.power(refBeam.profile1d-smoothProf, 2)) #if res < resMin: #resMin=res #smoothPix=j #attFactor=1/smoothProf.max() #smoothScaleDeg=smoothPix*degPerPix #mapDict['smoothScaleDeg']=smoothScaleDeg #mapDict['smoothAttenuationFactor']=1/ndimage.gaussian_filter1d(beam.profile1d, smoothPix).max() # For testing on CMB maps here refMapDict=config.unfilteredMapsDictList[0] # PSF matching via a convolution kernel kernelDict={} # keys: tile, obsFreqGHz for tileName in config.tileNames: if tileName not in kernelDict.keys(): kernelDict[tileName]={} for i in range(1, len(config.unfilteredMapsDictList)): mapDict=config.unfilteredMapsDictList[i] beam=beams[i] degPerPix=np.mean(np.diff(beam.rDeg)) assert(abs(np.diff(beam.rDeg).max()-degPerPix) < 0.001) # Calculate convolution kernel sizePix=beam.profile1d.shape[0]*2 if sizePix % 2 == 0: sizePix=sizePix+1 symRDeg=np.linspace(-0.5, 0.5, sizePix) assert((symRDeg == 0).sum()) symProf=interpolate.splev(abs(symRDeg), beam.tck) symRefProf=interpolate.splev(abs(symRDeg), refBeam.tck) fSymRef=np.fft.fft(np.fft.fftshift(symRefProf)) fSymBeam=np.fft.fft(np.fft.fftshift(symProf)) fSymConv=fSymRef/fSymBeam fSymConv[fSymBeam < 1e-1]=0 # Was 1e-2; this value avoids ringing, smaller values do not symMatched=np.fft.ifft(fSymBeam*fSymConv).real symConv=np.fft.ifft(fSymConv).real # This allows normalization in same way as Gaussian smooth method symConv=symConv/symConv.sum() convedProf=ndimage.convolve(symProf, np.fft.fftshift(symConv)) attenuationFactor=1/convedProf.max() # norm # Make profile object peakIndex=np.argmax(np.fft.fftshift(symConv)) convKernel=signals.BeamProfile(profile1d = np.fft.fftshift(symConv)[peakIndex:], rDeg = symRDeg[peakIndex:]) ## Check plots #import pylab as plt #plt.figure(figsize=(10,8)) #plt.plot(abs(symRDeg*60), symRefProf, label = 'ref', lw = 3) #plt.plot(abs(symRDeg*60), convedProf*attenuationFactor, label = 'kernel convolved') #integralRatio=np.trapz(symRefProf)/np.trapz(convedProf*attenuationFactor) #plt.title("%.3f" % (integralRatio)) #plt.semilogy() #plt.legend() #ratio=(convedProf*attenuationFactor)/symRefProf #plt.figure(figsize=(10,8)) #plt.plot(abs(symRDeg*60), ratio, label = 'ratio') #plt.plot(abs(symRDeg*60), [1.0]*len(symRDeg), 'r-') #plt.legend() # Fudging 2d kernel to match (fix properly later) # NOTE: Now done at higher res but doesn't make much difference # (but DOES blow up in some tiles if you use e.g. have the resolution) wcs=astWCS.WCS(config.tileCoordsDict[tileName]['header'], mode = 'pyfits').copy() wcs.header['CDELT1']=np.diff(refBeam.rDeg)[0] wcs.header['CDELT2']=np.diff(refBeam.rDeg)[0] wcs.header['NAXIS1']=int(np.ceil(2*refBeam.rDeg.max()/wcs.header['CDELT1'])) wcs.header['NAXIS2']=int(np.ceil(2*refBeam.rDeg.max()/wcs.header['CDELT2'])) wcs.updateFromHeader() shape=(wcs.header['NAXIS2'], wcs.header['NAXIS1']) degreesMap=np.ones([shape[0], shape[1]], dtype = float)*1e6 RADeg, decDeg=wcs.pix2wcs(int(degreesMap.shape[1]/2), int(degreesMap.shape[0]/2)) degreesMap, xBounds, yBounds=nemoCython.makeDegreesDistanceMap(degreesMap, wcs, RADeg, decDeg, 1.0) beamMap=signals.makeBeamModelSignalMap(degreesMap, wcs, beam, amplitude = None) refBeamMap=signals.makeBeamModelSignalMap(degreesMap, wcs, refBeam, amplitude = None) matchedBeamMap=maps.convolveMapWithBeam(beamMap*attenuationFactor, wcs, convKernel, maxDistDegrees = 1.0) # Find and apply radial fudge factor yRow=np.where(refBeamMap == refBeamMap.max())[0][0] rowValid=np.logical_and(degreesMap[yRow] < refBeam.rDeg.max(), matchedBeamMap[yRow] != 0) ratio=refBeamMap[yRow][rowValid]/matchedBeamMap[yRow][rowValid] zeroIndex=np.argmin(degreesMap[yRow][rowValid]) assert(degreesMap[yRow][rowValid][zeroIndex] == 0) tck=interpolate.splrep(degreesMap[yRow][rowValid][zeroIndex:], ratio[zeroIndex:]) fudge=interpolate.splev(convKernel.rDeg, tck) #fudge[fudge < 0.5]=1.0 #fudge[fudge > 1.5]=1.0 fudgeKernel=signals.BeamProfile(profile1d = convKernel.profile1d*fudge, rDeg = convKernel.rDeg) ## Check plot #import pylab as plt #plt.figure(figsize=(10,8)) #plt.plot(convKernel.rDeg, fudge, lw = 3, label = 'fudge') #plt.plot(convKernel.rDeg, [1.0]*len(fudge), 'r-') #plt.title("fudge") ##plt.ylim(0, 2) #plt.legend() #plt.show() # 2nd fudge factor - match integrals of 2d kernels fudgeMatchedBeamMap=maps.convolveMapWithBeam(beamMap*attenuationFactor, wcs, fudgeKernel, maxDistDegrees = 1.0) attenuationFactor=refBeamMap.sum()/fudgeMatchedBeamMap.sum() # Check at map pixelization that is actually used #shape=(config.tileCoordsDict[tileName]['header']['NAXIS2'], #config.tileCoordsDict[tileName]['header']['NAXIS1']) #wcs=astWCS.WCS(config.tileCoordsDict[tileName]['header'], mode = 'pyfits').copy() #degreesMap=np.ones([shape[0], shape[1]], dtype = float)*1e6 #RADeg, decDeg=wcs.pix2wcs(int(degreesMap.shape[1]/2), int(degreesMap.shape[0]/2)) #degreesMap, xBounds, yBounds=nemoCython.makeDegreesDistanceMap(degreesMap, wcs, RADeg, decDeg, 1.0) #beamMap=signals.makeBeamModelSignalMap(degreesMap, wcs, beam, amplitude = None) #refBeamMap=signals.makeBeamModelSignalMap(degreesMap, wcs, refBeam, amplitude = None) #fudgeMatchedBeamMap=maps.convolveMapWithBeam(beamMap*attenuationFactor, wcs, fudgeKernel, maxDistDegrees = 1.0) ## Check plot #import pylab as plt #yRow=np.where(refBeamMap == refBeamMap.max())[0][0] #rowValid=np.logical_and(degreesMap[yRow] < refBeam.rDeg.max(), fudgeMatchedBeamMap[yRow] != 0) #plt.figure(figsize=(10,8)) #plt.plot(degreesMap[yRow][rowValid]*60, refBeamMap[yRow][rowValid], lw = 3, label = 'ref') #plt.plot(degreesMap[yRow][rowValid]*60, fudgeMatchedBeamMap[yRow][rowValid], label = 'fudged') #integralRatio=np.trapz(fudgeMatchedBeamMap[yRow][rowValid])/np.trapz(refBeamMap[yRow][rowValid]) #plt.title("native map res - %.3f" % (integralRatio)) #plt.semilogy() #plt.ylim(1e-5) #plt.legend() #plt.show() #from astLib import astImages #astImages.saveFITS("ref.fits", refBeamMap, wcs) #astImages.saveFITS("fudgematched.fits", fudgeMatchedBeamMap, wcs) #astImages.saveFITS("diff.fits", refBeamMap-fudgeMatchedBeamMap, wcs) #import IPython #IPython.embed() #sys.exit() # NOTE: If we're NOT passing in 2d kernels, don't need to organise by tile kernelDict[tileName][mapDict['obsFreqGHz']]={'smoothKernel': fudgeKernel, 'smoothAttenuationFactor': attenuationFactor} if method == 'CAP': catalog=_extractSpecCAP(config, tab, kernelDict, diskRadiusArcmin = 4.0, highPassFilter = False, estimateErrors = True) elif method == 'matchedFilter': catalog=_extractSpecMatchedFilter(config, tab, kernelDict, saveFilteredMaps = saveFilteredMaps) else: raise Exception("'method' should be 'CAP' or 'matchedFilter'") return catalog #------------------------------------------------------------------------------------------------------------ def _extractSpecMatchedFilter(config, tab, kernelDict, saveFilteredMaps = False, noiseMethod = 'dataMap'): """See extractSpec. """ cacheDir="nemoSpecCache"+os.path.sep+os.path.basename(config.rootOutDir) os.makedirs(cacheDir, exist_ok = True) # Build filter configs allFilters={'class': 'ArnaudModelMatchedFilter', 'params': {'noiseParams': {'method': noiseMethod, 'noiseGridArcmin': 40.0}, 'saveFilteredMaps': False, 'saveRMSMap': False, 'savePlots': False, 'saveDS9Regions': False, 'saveFilter': False, 'outputUnits': 'yc', 'edgeTrimArcmin': 0.0, 'GNFWParams': 'default'}} filtersList=[] templatesUsed=np.unique(tab['template']).tolist() for t in templatesUsed: newDict=copy.deepcopy(allFilters) M500MSun=float(t.split("_M")[-1].split("_")[0]) z=float(t.split("_z")[-1].replace("p", ".")) newDict['params']['M500MSun']=M500MSun newDict['params']['z']=z newDict['label']=t filtersList.append(newDict) # Filter and extract # NOTE: We assume index 0 of the unfiltered maps list is the reference for which the filter is made catalogList=[] for tileName in config.tileNames: print("... rank %d: tileName = %s ..." % (config.rank, tileName)) diagnosticsDir=cacheDir+os.path.sep+tileName os.makedirs(diagnosticsDir, exist_ok = True) for f in filtersList: tempTileTab=None # catalogs are organised by tile and template filterObj=None for mapDict in config.unfilteredMapsDictList: if tempTileTab is None: shape=(config.tileCoordsDict[tileName]['header']['NAXIS2'], config.tileCoordsDict[tileName]['header']['NAXIS1']) wcs=astWCS.WCS(config.tileCoordsDict[tileName]['header'], mode = 'pyfits') tempTileTab=catalogs.getCatalogWithinImage(tab, shape, wcs) tempTileTab=tempTileTab[tempTileTab['template'] == f['label']] if tempTileTab is None or len(tempTileTab) == 0: continue if mapDict['obsFreqGHz'] == config.unfilteredMapsDictList[0]['obsFreqGHz']: filteredMapDict, filterObj=filters.filterMaps([mapDict], f, tileName, filteredMapsDir = cacheDir, diagnosticsDir = diagnosticsDir, selFnDir = cacheDir, verbose = True, undoPixelWindow = True, returnFilter = True) else: mapDict['smoothKernel']=kernelDict[tileName][mapDict['obsFreqGHz']]['smoothKernel'] mapDict['smoothAttenuationFactor']=kernelDict[tileName][mapDict['obsFreqGHz']]['smoothAttenuationFactor'] mapDictToFilter=maps.preprocessMapDict(mapDict.copy(), tileName = tileName) filteredMapDict['data']=filterObj.applyFilter(mapDictToFilter['data']) RMSMap=filterObj.makeNoiseMap(filteredMapDict['data']) filteredMapDict['SNMap']=np.zeros(filterObj.shape) mask=np.greater(filteredMapDict['surveyMask'], 0) filteredMapDict['SNMap'][mask]=filteredMapDict['data'][mask]/RMSMap[mask] filteredMapDict['data']=enmap.apply_window(filteredMapDict['data'], pow=-1.0) if saveFilteredMaps == True: outFileName=cacheDir+os.path.sep+'%d_' % (mapDict['obsFreqGHz'])+f['label']+'#'+tileName+'.fits' # Add conversion to delta T in here? maps.saveFITS(outFileName, filteredMapDict['data'], filteredMapDict['wcs']) freqTileTab=photometry.makeForcedPhotometryCatalog(filteredMapDict, tempTileTab, useInterpolator = config.parDict['useInterpolator']) photometry.measureFluxes(freqTileTab, filteredMapDict, cacheDir, useInterpolator = config.parDict['useInterpolator'], ycObsFreqGHz = mapDict['obsFreqGHz']) # We don't take tileName from the catalog, some objects in overlap areas may only get cut here if len(freqTileTab) == 0: tempTileTab=None continue tempTileTab, freqTileTab, rDeg=catalogs.crossMatch(tempTileTab, freqTileTab, radiusArcmin = 2.5) colNames=['deltaT_c', 'y_c', 'SNR'] suff='_%d' % (mapDict['obsFreqGHz']) for colName in colNames: tempTileTab[colName+suff]=freqTileTab[colName] if 'err_'+colName in freqTileTab.keys(): tempTileTab['err_'+colName+suff]=freqTileTab['err_'+colName] if tempTileTab is not None and len(tempTileTab) > 0: catalogList.append(tempTileTab) if len(catalogList) > 0: catalog=atpy.vstack(catalogList) else: catalog=[] return catalog #------------------------------------------------------------------------------------------------------------ def _extractSpecCAP(config, tab, kernelDict, method = 'CAP', diskRadiusArcmin = 4.0, highPassFilter = False, estimateErrors = True): """See extractSpec. """ # Define apertures like Schaan et al. style compensated aperture photometry filter innerRadiusArcmin=diskRadiusArcmin outerRadiusArcmin=diskRadiusArcmin*np.sqrt(2) catalogList=[] for tileName in config.tileNames: # This loads the maps, applies any masks, and smooths to approx. same scale mapDictList=[] freqLabels=[] for mapDict in config.unfilteredMapsDictList: mapDict=maps.preprocessMapDict(mapDict.copy(), tileName = tileName) if highPassFilter == True: mapDict['data']=maps.subtractBackground(mapDict['data'], mapDict['wcs'], smoothScaleDeg = (2*outerRadiusArcmin)/60) freqLabels.append(int(round(mapDict['obsFreqGHz']))) mapDictList.append(mapDict) wcs=mapDict['wcs'] shape=mapDict['data'].shape # Extract spectra pixAreaMap=maps.getPixelAreaArcmin2Map(shape, wcs) maxSizeDeg=(outerRadiusArcmin*1.2)/60 tileTab=catalogs.getCatalogWithinImage(tab, shape, wcs) for label in freqLabels: tileTab['diskT_uKArcmin2_%s' % (label)]=np.zeros(len(tileTab)) tileTab['err_diskT_uKArcmin2_%s' % (label)]=np.zeros(len(tileTab)) tileTab['diskSNR_%s' % (label)]=np.zeros(len(tileTab)) for row in tileTab: degreesMap=np.ones(shape, dtype = float)*1e6 # NOTE: never move this degreesMap, xBounds, yBounds=nemoCython.makeDegreesDistanceMap(degreesMap, wcs, row['RADeg'], row['decDeg'], maxSizeDeg) innerMask=degreesMap < innerRadiusArcmin/60 outerMask=np.logical_and(degreesMap >= innerRadiusArcmin/60, degreesMap < outerRadiusArcmin/60) for mapDict, label in zip(mapDictList, freqLabels): d=mapDict['data'] diskFlux=(d[innerMask]*pixAreaMap[innerMask]).sum()-(d[outerMask]*pixAreaMap[outerMask]).sum() row['diskT_uKArcmin2_%s' % (label)]=diskFlux # Estimate noise in every measurement (on average) from spatting down on random positions # This will break if noise is inhomogeneous though. But at least it's done separately for each tile. # We can later add something that scales / fits using the weight map? if estimateErrors == True: randTab=catalogs.generateRandomSourcesCatalog(mapDict['surveyMask'], wcs, 1000) for label in freqLabels: randTab['diskT_uKArcmin2_%s' % (label)]=np.zeros(len(randTab)) for row in randTab: degreesMap=np.ones(shape, dtype = float)*1e6 # NOTE: never move this degreesMap, xBounds, yBounds=nemoCython.makeDegreesDistanceMap(degreesMap, wcs, row['RADeg'], row['decDeg'], maxSizeDeg) innerMask=degreesMap < innerRadiusArcmin/60 outerMask=np.logical_and(degreesMap >= innerRadiusArcmin/60, degreesMap < outerRadiusArcmin/60) for mapDict, label in zip(mapDictList, freqLabels): d=mapDict['data'] diskFlux=(d[innerMask]*pixAreaMap[innerMask]).sum()-(d[outerMask]*pixAreaMap[outerMask]).sum() row['diskT_uKArcmin2_%s' % (label)]=diskFlux noiseLevels={} for label in freqLabels: if signals.fSZ(float(label)) < 0: SNRSign=-1 else: SNRSign=1 noiseLevels[label]=np.percentile(abs(randTab['diskT_uKArcmin2_%s' % (label)]), 68.3) tileTab['err_diskT_uKArcmin2_%s' % (label)]=noiseLevels[label] tileTab['diskSNR_%s' % (label)]=SNRSign*(tileTab['diskT_uKArcmin2_%s' % (label)]/noiseLevels[label]) catalogList.append(tileTab) catalog=atpy.vstack(catalogList) return catalog
[ 37811, 198, 198, 1212, 8265, 15738, 31108, 532, 5621, 286, 8861, 287, 36945, 78, 326, 356, 3360, 765, 284, 466, 319, 1180, 17311, 198, 7, 68, 13, 70, 1539, 1103, 1366, 393, 28590, 1366, 737, 198, 198, 37811, 198, 198, 11748, 28686, ...
2.108341
25,069
#!/usr/bin/env python ''' This program is free software; you can redistribute it and/or modify it under the terms of the Revised BSD License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the Revised BSD License for more details. Copyright 2011-2016 Game Maker 2k - https://github.com/GameMaker2k Copyright 2011-2016 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski $FileInfo: pypkg-gen.py - Last Update: 6/1/2016 Ver. 0.2.0 RC 1 - Author: cooldude2k $ ''' from __future__ import absolute_import, division, print_function, unicode_literals; import re, os, sys, time, platform, datetime, argparse, subprocess; __version_info__ = (0, 2, 0, "rc1"); if(__version_info__[3]!=None): __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2])+"+"+str(__version_info__[3]); if(__version_info__[3]==None): __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]); proname = "pypkg-gen"; prover = __version__; profullname = proname+" "+prover; linuxdist = [None]; try: linuxdist = platform.linux_distribution(); except AttributeError: linuxdist = [None]; getlinuxdist = linuxdist; setdistroname = "debian"; setdistrocname = "jessie"; if(getlinuxdist[0] is not None and (getlinuxdist[0].lower()=="debian" or getlinuxdist[0].lower()=="ubuntu" or getlinuxdist[0].lower()=="linuxmint")): setdistroname = getlinuxdist[0].lower(); setdistrocname = getlinuxdist[2].lower(); if(setdistrocname==""): lsblocatout = which_exec("lsb_release"); pylsblistp = subprocess.Popen([lsblocatout, "-c"], stdout=subprocess.PIPE, stderr=subprocess.PIPE); pylsbout, pylsberr = pylsblistp.communicate(); if(sys.version[0]=="3"): pylsbout = pylsbout.decode("utf-8"); pylsb_esc = re.escape("Codename:")+'([a-zA-Z\t+\s+]+)'; pylsbname = re.findall(pylsb_esc, pylsbout)[0].lower(); setdistrocname = pylsbname.strip(); if(getlinuxdist[0] is not None and getlinuxdist[0].lower()=="archlinux"): setdistroname = getlinuxdist[0].lower(); setdistrocname = None; parser = argparse.ArgumentParser(conflict_handler = "resolve", add_help = True); parser.add_argument("-v", "--version", action = "version", version = profullname); parser.add_argument("-s", "--source", default = os.path.realpath(os.getcwd()), help = "source dir"); parser.add_argument("-d", "--distro", default = setdistroname, help = "enter linux distribution name"); parser.add_argument("-c", "--codename", default = setdistrocname, help = "enter release code name"); parser.add_argument("-p", "--pyver", default = sys.version[0], help = "enter version of python to use"); getargs = parser.parse_args(); bashlocatout = which_exec("bash"); getargs.source = os.path.realpath(getargs.source); getargs.codename = getargs.codename.lower(); getargs.distro = getargs.distro.lower(); if(getargs.pyver=="2"): getpyver = "python2"; if(getargs.pyver=="3"): getpyver = "python3"; if(getargs.pyver!="2" and getargs.pyver!="3"): if(sys.version[0]=="2"): getpyver = "python2"; if(sys.version[0]=="3"): getpyver = "python3"; get_pkgbuild_dir = os.path.realpath(getargs.source+os.path.sep+"pkgbuild"); get_pkgbuild_dist_pre_list = [d for d in os.listdir(get_pkgbuild_dir) if os.path.isdir(os.path.join(get_pkgbuild_dir, d))]; get_pkgbuild_dist_list = []; for dists in get_pkgbuild_dist_pre_list: tmp_pkgbuild_python = os.path.realpath(get_pkgbuild_dir+os.path.sep+dists+os.path.sep+getpyver); if(os.path.exists(tmp_pkgbuild_python) and os.path.isdir(tmp_pkgbuild_python)): get_pkgbuild_dist_list.append(dists); if(not getargs.distro in get_pkgbuild_dist_list): print("Could not build for "+getargs.distro+" distro."); sys.exit(); if(getargs.distro=="debian" or getargs.distro=="ubuntu" or getargs.distro=="linuxmint"): pypkgpath = os.path.realpath(getargs.source+os.path.sep+"pkgbuild"+os.path.sep+getargs.distro+os.path.sep+getpyver+os.path.sep+"pydeb-gen.sh"); pypkgenlistp = subprocess.Popen([bashlocatout, pypkgpath, getargs.source, getargs.codename], stdout=subprocess.PIPE, stderr=subprocess.PIPE); pypkgenout, pypkgenerr = pypkgenlistp.communicate(); if(sys.version[0]=="3"): pypkgenout = pypkgenout.decode("utf-8"); print(pypkgenout); pypkgenlistp.wait(); if(getargs.distro=="archlinux"): pypkgpath = os.path.realpath(getargs.source+os.path.sep+"pkgbuild"+os.path.sep+getargs.distro+os.path.sep+getpyver+os.path.sep+"pypac-gen.sh"); pypkgenlistp = subprocess.Popen([bashlocatout, pypkgpath, getargs.source, getargs.codename], stdout=subprocess.PIPE, stderr=subprocess.PIPE); pypkgenout, pypkgenerr = pypkgenlistp.communicate(); if(sys.version[0]=="3"): pypkgenout = pypkgenout.decode("utf-8"); print(pypkgenout); pypkgenlistp.wait();
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 198, 7061, 6, 198, 220, 220, 220, 770, 1430, 318, 1479, 3788, 26, 345, 460, 17678, 4163, 340, 290, 14, 273, 13096, 198, 220, 220, 220, 340, 739, 262, 2846, 286, 262, 31492, 347, 10...
2.559286
1,906
import time print (time.strftime("%B %e, %Y")) # Guides: # how to formate date: # http://strftime.net/ # how to use time: # http://www.cyberciti.biz/faq/howto-get-current-date-time-in-python/
[ 11748, 640, 198, 4798, 357, 2435, 13, 2536, 31387, 7203, 4, 33, 4064, 68, 11, 4064, 56, 48774, 198, 198, 2, 48770, 25, 198, 2, 703, 284, 1296, 378, 3128, 25, 198, 2, 2638, 1378, 2536, 31387, 13, 3262, 14, 198, 198, 2, 703, 284, ...
2.425
80
import re with open('input.txt', 'r') as f: pw_ok=0 for line in f: (rule,s,space_and_pw) = line.partition(':') (lowhigh,s,c) = rule.partition(' ') (low,s,high) = lowhigh.partition('-') pw=space_and_pw[1:-1] c1=pw[int(low)-1] c2=pw[int(high)-1] if (c1==c and c2!=c) or (c1!=c and c2==c): print(low, high, c, pw, c1, c2, 'ok') pw_ok+=1 else: print(low, high, c, pw, c1, c2, 'falsch') print (pw_ok) #737
[ 11748, 302, 198, 198, 4480, 1280, 10786, 15414, 13, 14116, 3256, 705, 81, 11537, 355, 277, 25, 628, 220, 220, 220, 279, 86, 62, 482, 28, 15, 198, 220, 220, 220, 329, 1627, 287, 277, 25, 198, 220, 220, 220, 220, 220, 220, 220, 35...
1.621451
317
# Copyright (c) 2019, CRS4 # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in # the Software without restriction, including without limitation the rights to # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of # the Software, and to permit persons to whom the Software is furnished to do so, # subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. import openslide from openslide import OpenSlide from openslide.deepzoom import DeepZoomGenerator from io import BytesIO from PIL import Image from .rendering_engine_interface import RenderingEngineInterface from .. import settings from ome_seadragon_cache import CacheDriverFactory
[ 2, 220, 15069, 357, 66, 8, 13130, 11, 327, 6998, 19, 198, 2, 198, 2, 220, 2448, 3411, 318, 29376, 7520, 11, 1479, 286, 3877, 11, 284, 597, 1048, 16727, 257, 4866, 286, 198, 2, 220, 428, 3788, 290, 3917, 10314, 3696, 357, 1169, 3...
3.707775
373
"""Global config management This module provides functions for initializing, accessing and destroying a global config object. You can initialize a global config from any object. However, in the context of pyppy, only the instance attributes of the object are used and work with the decorators ``fill_args`` and ``condition``. But you can use any object you like. The config management methods are just a convenience reference to the original object. Initialization -------------- In this example, we initialize a global config from a ``NameSpace`` parsed with a custom ``ArgumentParser``. For demonstration purposes, the parser will not parse args from the commandline but from a list:: from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument("--message") # parse_args returns an argparse.Namespace args = parser.parse_args(["--message", "hello!"]) To initialize a global config object, import the function ``initialize_config`` and pass the args variable:: from pyppy.config import initialize_config initialize_config(args) You can also create an empty global object (which just holds a reference to an empty ``object``) and change it afterwards via accessing the global config object (see Config access section):: from pyppy.config import initialize_config initialize_config(args) Access ------ Now that you have initialized the global config, you can use it throughout your code:: from pyppy.config import config print(config().message) # "hello!" Note ---- The original object that you used to initialize the global config is returned any time you call ``config()``, so you can do everything with the object that you could also do before. Modification ------------ It is possible to change the global config object during time, e.g. to pass information between objects in your code. We know that the term 'config' is not ideal for these use cases and we're working on functionality to handle these use cases in a better way. Here's an example of config modification:: config().message = "bye!" print(config().message) Reset ----- There can be only one global config object. So whenever you have initialized a config you cannot initialize a new one. If you try to an exception is raised. In the rare cases you might want to have a new global config you can explicitly destroy the current one and initialize a new one:: from pyppy.config import destroy_config destroy_config() initialize_config(args2) """ from types import SimpleNamespace from pyppy.exc import ConfigAlreadyInitializedException _CONFIG = "pyppy-config" def initialize_config(obj: object = SimpleNamespace()) -> None: """ Initialize a global config with the specified object or with an empty ``object`` if no object is given. Parameters ---------- obj : object Object to initialize the global config with. Whenever you will call ``pyppy.config.config()`` you will get a r reference to this object. Returns ------- None Examples -------- >>> destroy_config() >>> c = SimpleNamespace() >>> c.option = "say_hello" >>> initialize_config(c) >>> config().option 'say_hello' >>> destroy_config() """ if hasattr(config, _CONFIG): raise ConfigAlreadyInitializedException( ( "Config has already been initialized. " "If you want to initialize a new config call " f"{destroy_config.__name__}()." ) ) config(obj) def config(_obj: object = None) -> object: """ Accesses a previously initialized global config. Returns ------- object: The object that was used to initialize the global config. Examples -------- >>> destroy_config() >>> c = SimpleNamespace() >>> c.option = "say_hello" >>> initialize_config(c) >>> config().option 'say_hello' >>> destroy_config() """ if not hasattr(config, _CONFIG) and _obj: setattr(config, _CONFIG, _obj) if not hasattr(config, _CONFIG): raise Exception("Please initialize config first!") return getattr(config, _CONFIG) def destroy_config() -> None: """ Deletes the global reference to the object that the config was initialized with. Examples -------- >>> destroy_config() >>> c = SimpleNamespace() >>> c.option = "say_hello" >>> initialize_config(c) >>> config().option 'say_hello' >>> destroy_config() >>> config().option Traceback (most recent call last): ... Exception: Please initialize config first! """ if hasattr(config, _CONFIG): delattr(config, _CONFIG)
[ 37811, 22289, 4566, 4542, 198, 198, 1212, 8265, 3769, 5499, 329, 4238, 2890, 11, 22534, 290, 13897, 198, 64, 3298, 4566, 2134, 13, 921, 460, 41216, 257, 3298, 4566, 422, 597, 2134, 13, 198, 4864, 11, 287, 262, 4732, 286, 12972, 14097,...
3.217802
1,483
def three_sum(nums): """ Given an array nums of n integers, are there elements a, b, c in nums such that a + b + c = 0? Find all unique triplets in the array which gives the sum of zero. :param nums: list[int] :return: list[list[int]] """ if len(nums) < 3: return [] nums.sort() res = [] for i in range(len(nums) - 2): if i > 0 and nums[i - 1] == nums[i]: continue l, r = i + 1, len(nums) - 1 while l < r: s = nums[i] + nums[l] + nums[r] if s == 0: res.append([nums[i], nums[l], nums[r]]) l += 1; r -= 1 while l < r and nums[l] == nums[l - 1]: l += 1 while l < r and nums[r] == nums[r + 1]: r -= 1 elif s < 0: l += 1 else: r -= 1 return res
[ 4299, 1115, 62, 16345, 7, 77, 5700, 2599, 198, 220, 220, 220, 37227, 198, 220, 220, 220, 11259, 281, 7177, 997, 82, 286, 299, 37014, 11, 389, 612, 4847, 257, 11, 275, 11, 269, 287, 997, 82, 884, 326, 257, 1343, 275, 1343, 269, 7...
1.795501
489
import unittest import os from dao.guild_roles_dao import GuildRolesDAO from dao.guild_role_categories_dao import GuildRoleCategoriesDAO
[ 11748, 555, 715, 395, 198, 11748, 28686, 198, 6738, 288, 5488, 13, 70, 3547, 62, 305, 829, 62, 67, 5488, 1330, 16446, 49, 4316, 5631, 46, 198, 6738, 288, 5488, 13, 70, 3547, 62, 18090, 62, 66, 26129, 62, 67, 5488, 1330, 16446, 474...
2.795918
49
import picamera import datetime import os delcount = 2 with picamera.PiCamera() as camera: try: check_fs() tstamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f') print "recording", tstamp camera.start_recording(tstamp + '.h264') camera.wait_recording(60) while True: check_fs() tstamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f') print "recording", tstamp camera.split_recording(tstamp + '.h264') camera.wait_recording(60) except KeyboardInterrupt: print "quitting" camera.stop_recording()
[ 11748, 8301, 18144, 198, 11748, 4818, 8079, 198, 11748, 28686, 198, 198, 12381, 9127, 796, 362, 198, 198, 4480, 8301, 18144, 13, 38729, 35632, 3419, 355, 4676, 25, 198, 220, 220, 220, 1949, 25, 198, 220, 220, 220, 220, 220, 220, 220, ...
2.034921
315
from .handle import ForgeHandle
[ 6738, 764, 28144, 1330, 24855, 37508, 198 ]
4.571429
7
import FWCore.ParameterSet.Config as cms from CondCore.DBCommon.CondDBCommon_cfi import * PoolDBESSourceMistag110118 = cms.ESSource("PoolDBESSource", CondDBCommon, toGet = cms.VPSet( # # working points # cms.PSet( record = cms.string('PerformancePayloadRecord'), tag = cms.string('BTagMISTAGJBPLtable_v5_offline'), label = cms.untracked.string('BTagMISTAGJBPLtable_v5_offline') ), cms.PSet( record = cms.string('PerformanceWPRecord'), tag = cms.string('BTagMISTAGJBPLwp_v5_offline'), label = cms.untracked.string('BTagMISTAGJBPLwp_v5_offline') ), cms.PSet( record = cms.string('PerformancePayloadRecord'), tag = cms.string('BTagMISTAGJBPMtable_v5_offline'), label = cms.untracked.string('BTagMISTAGJBPMtable_v5_offline') ), cms.PSet( record = cms.string('PerformanceWPRecord'), tag = cms.string('BTagMISTAGJBPMwp_v5_offline'), label = cms.untracked.string('BTagMISTAGJBPMwp_v5_offline') ), cms.PSet( record = cms.string('PerformancePayloadRecord'), tag = cms.string('BTagMISTAGJBPTtable_v5_offline'), label = cms.untracked.string('BTagMISTAGJBPTtable_v5_offline') ), cms.PSet( record = cms.string('PerformanceWPRecord'), tag = cms.string('BTagMISTAGJBPTwp_v5_offline'), label = cms.untracked.string('BTagMISTAGJBPTwp_v5_offline') ), cms.PSet( record = cms.string('PerformancePayloadRecord'), tag = cms.string('BTagMISTAGJPLtable_v5_offline'), label = cms.untracked.string('BTagMISTAGJPLtable_v5_offline') ), cms.PSet( record = cms.string('PerformanceWPRecord'), tag = cms.string('BTagMISTAGJPLwp_v5_offline'), label = cms.untracked.string('BTagMISTAGJPLwp_v5_offline') ), cms.PSet( record = cms.string('PerformancePayloadRecord'), tag = cms.string('BTagMISTAGJPMtable_v5_offline'), label = cms.untracked.string('BTagMISTAGJPMtable_v5_offline') ), cms.PSet( record = cms.string('PerformanceWPRecord'), tag = cms.string('BTagMISTAGJPMwp_v5_offline'), label = cms.untracked.string('BTagMISTAGJPMwp_v5_offline') ), cms.PSet( record = cms.string('PerformancePayloadRecord'), tag = cms.string('BTagMISTAGJPTtable_v5_offline'), label = cms.untracked.string('BTagMISTAGJPTtable_v5_offline') ), cms.PSet( record = cms.string('PerformanceWPRecord'), tag = cms.string('BTagMISTAGJPTwp_v5_offline'), label = cms.untracked.string('BTagMISTAGJPTwp_v5_offline') ), cms.PSet( record = cms.string('PerformancePayloadRecord'), tag = cms.string('BTagMISTAGSSVHEMtable_v5_offline'), label = cms.untracked.string('BTagMISTAGSSVHEMtable_v5_offline') ), cms.PSet( record = cms.string('PerformanceWPRecord'), tag = cms.string('BTagMISTAGSSVHEMwp_v5_offline'), label = cms.untracked.string('BTagMISTAGSSVHEMwp_v5_offline') ), cms.PSet( record = cms.string('PerformancePayloadRecord'), tag = cms.string('BTagMISTAGSSVHPTtable_v5_offline'), label = cms.untracked.string('BTagMISTAGSSVHPTtable_v5_offline') ), cms.PSet( record = cms.string('PerformanceWPRecord'), tag = cms.string('BTagMISTAGSSVHPTwp_v5_offline'), label = cms.untracked.string('BTagMISTAGSSVHPTwp_v5_offline') ), cms.PSet( record = cms.string('PerformancePayloadRecord'), tag = cms.string('BTagMISTAGTCHELtable_v5_offline'), label = cms.untracked.string('BTagMISTAGTCHELtable_v5_offline') ), cms.PSet( record = cms.string('PerformanceWPRecord'), tag = cms.string('BTagMISTAGTCHELwp_v5_offline'), label = cms.untracked.string('BTagMISTAGTCHELwp_v5_offline') ), cms.PSet( record = cms.string('PerformancePayloadRecord'), tag = cms.string('BTagMISTAGTCHEMtable_v5_offline'), label = cms.untracked.string('BTagMISTAGTCHEMtable_v5_offline') ), cms.PSet( record = cms.string('PerformanceWPRecord'), tag = cms.string('BTagMISTAGTCHEMwp_v5_offline'), label = cms.untracked.string('BTagMISTAGTCHEMwp_v5_offline') ), cms.PSet( record = cms.string('PerformancePayloadRecord'), tag = cms.string('BTagMISTAGTCHPTtable_v5_offline'), label = cms.untracked.string('BTagMISTAGTCHPTtable_v5_offline') ), cms.PSet( record = cms.string('PerformanceWPRecord'), tag = cms.string('BTagMISTAGTCHPTwp_v5_offline'), label = cms.untracked.string('BTagMISTAGTCHPTwp_v5_offline') ), )) PoolDBESSourceMistag110118.connect = 'frontier://FrontierProd/CMS_COND_31X_PHYSICSTOOLS'
[ 11748, 48849, 14055, 13, 36301, 7248, 13, 16934, 355, 269, 907, 198, 198, 6738, 9724, 14055, 13, 35, 2749, 2002, 261, 13, 25559, 35, 2749, 2002, 261, 62, 66, 12463, 1330, 1635, 198, 198, 27201, 11012, 1546, 7416, 49370, 363, 1157, 486...
2.143057
2,146
''' Created on Nov 22, 2018 @author: kjnether methods that evaluate the given schedule ''' import logging import FMEUtil.FMEServerApiData import re
[ 7061, 6, 201, 198, 41972, 319, 5267, 2534, 11, 2864, 201, 198, 201, 198, 31, 9800, 25, 479, 73, 3262, 372, 201, 198, 201, 198, 24396, 82, 326, 13446, 262, 1813, 7269, 201, 198, 7061, 6, 201, 198, 201, 198, 11748, 18931, 201, 198, ...
2.5625
64
from PyQt5 import QtWidgets, QtCore from podcastista.ShowEpisodeWidget import ShowEpisodeWidget from podcastista.FlowLayout import FlowLayout
[ 6738, 9485, 48, 83, 20, 1330, 33734, 54, 312, 11407, 11, 33734, 14055, 198, 6738, 9905, 12523, 13, 15307, 23758, 38300, 1330, 5438, 23758, 38300, 198, 6738, 9905, 12523, 13, 37535, 32517, 1330, 27782, 32517, 628, 198 ]
3.891892
37
import pyparsing as pp #relationship will refer to 'track' in all of your examples relationship = pp.Word(pp.alphas).setResultsName('relationship') number = pp.Word(pp.nums + '.') variable = pp.Word(pp.alphas) # an argument to a relationship can be either a number or a variable argument = number | variable # arguments are a delimited list of 'argument' surrounded by parenthesis arguments= (pp.Suppress('(') + pp.delimitedList(argument) + pp.Suppress(')')).setResultsName('arguments') # a fact is composed of a relationship and it's arguments # (I'm aware it's actually more complicated than this # it's just a simplifying assumption) fact = (relationship + arguments).setResultsName('facts', listAllMatches=True) # a sentence is a fact plus a period sentence = fact + pp.Suppress('.') # self explanatory prolog_sentences = pp.OneOrMore(sentence)
[ 11748, 279, 4464, 945, 278, 355, 9788, 198, 2, 39468, 1056, 481, 3522, 284, 705, 11659, 6, 287, 477, 286, 534, 6096, 198, 39468, 1056, 796, 9788, 13, 26449, 7, 381, 13, 282, 5902, 737, 2617, 25468, 5376, 10786, 39468, 1056, 11537, 1...
3.292776
263
#!/usr/bin/env python import vtk from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() # this script tests vtkImageReslice with various axes permutations, # in order to cover a nasty set of "if" statements that check # the intersections of the raster lines with the input bounding box. # Image pipeline reader = vtk.vtkImageReader() reader.ReleaseDataFlagOff() reader.SetDataByteOrderToLittleEndian() reader.SetDataExtent(0,63,0,63,1,93) reader.SetDataSpacing(3.2,3.2,1.5) reader.SetFilePrefix("" + str(VTK_DATA_ROOT) + "/Data/headsq/quarter") reader.SetDataMask(0x7fff) transform = vtk.vtkTransform() # rotate about the center of the image transform.Translate(+100.8,+100.8,+69.0) transform.RotateWXYZ(10,1,1,0) transform.Translate(-100.8,-100.8,-69.0) reslice1 = vtk.vtkImageReslice() reslice1.SetInputConnection(reader.GetOutputPort()) reslice1.SetResliceAxesDirectionCosines([1,0,0,0,1,0,0,0,1]) reslice1.SetResliceTransform(transform) reslice1.SetOutputSpacing(3.2,3.2,3.2) reslice1.SetOutputExtent(0,74,0,74,0,0) reslice2 = vtk.vtkImageReslice() reslice2.SetInputConnection(reader.GetOutputPort()) reslice2.SetResliceAxesDirectionCosines([0,1,0,0,0,1,1,0,0]) reslice2.SetResliceTransform(transform) reslice2.SetOutputSpacing(3.2,3.2,3.2) reslice2.SetOutputExtent(0,74,0,74,0,0) reslice3 = vtk.vtkImageReslice() reslice3.SetInputConnection(reader.GetOutputPort()) reslice3.SetResliceAxesDirectionCosines([0,0,1,1,0,0,0,1,0]) reslice3.SetResliceTransform(transform) reslice3.SetOutputSpacing(3.2,3.2,3.2) reslice3.SetOutputExtent(0,74,0,74,0,0) reslice4 = vtk.vtkImageReslice() reslice4.SetInputConnection(reader.GetOutputPort()) reslice4.SetResliceAxesDirectionCosines([-1,0,0,0,-1,0,0,0,-1]) reslice4.SetResliceTransform(transform) reslice4.SetOutputSpacing(3.2,3.2,3.2) reslice4.SetOutputExtent(0,74,0,74,0,0) reslice5 = vtk.vtkImageReslice() reslice5.SetInputConnection(reader.GetOutputPort()) reslice5.SetResliceAxesDirectionCosines([0,-1,0,0,0,-1,-1,0,0]) reslice5.SetResliceTransform(transform) reslice5.SetOutputSpacing(3.2,3.2,3.2) reslice5.SetOutputExtent(0,74,0,74,0,0) reslice6 = vtk.vtkImageReslice() reslice6.SetInputConnection(reader.GetOutputPort()) reslice6.SetResliceAxesDirectionCosines([0,0,-1,-1,0,0,0,-1,0]) reslice6.SetResliceTransform(transform) reslice6.SetOutputSpacing(3.2,3.2,3.2) reslice6.SetOutputExtent(0,74,0,74,0,0) mapper1 = vtk.vtkImageMapper() mapper1.SetInputConnection(reslice1.GetOutputPort()) mapper1.SetColorWindow(2000) mapper1.SetColorLevel(1000) mapper1.SetZSlice(0) mapper2 = vtk.vtkImageMapper() mapper2.SetInputConnection(reslice2.GetOutputPort()) mapper2.SetColorWindow(2000) mapper2.SetColorLevel(1000) mapper2.SetZSlice(0) mapper3 = vtk.vtkImageMapper() mapper3.SetInputConnection(reslice3.GetOutputPort()) mapper3.SetColorWindow(2000) mapper3.SetColorLevel(1000) mapper3.SetZSlice(0) mapper4 = vtk.vtkImageMapper() mapper4.SetInputConnection(reslice4.GetOutputPort()) mapper4.SetColorWindow(2000) mapper4.SetColorLevel(1000) mapper4.SetZSlice(0) mapper5 = vtk.vtkImageMapper() mapper5.SetInputConnection(reslice5.GetOutputPort()) mapper5.SetColorWindow(2000) mapper5.SetColorLevel(1000) mapper5.SetZSlice(0) mapper6 = vtk.vtkImageMapper() mapper6.SetInputConnection(reslice6.GetOutputPort()) mapper6.SetColorWindow(2000) mapper6.SetColorLevel(1000) mapper6.SetZSlice(0) actor1 = vtk.vtkActor2D() actor1.SetMapper(mapper1) actor2 = vtk.vtkActor2D() actor2.SetMapper(mapper2) actor3 = vtk.vtkActor2D() actor3.SetMapper(mapper3) actor4 = vtk.vtkActor2D() actor4.SetMapper(mapper4) actor5 = vtk.vtkActor2D() actor5.SetMapper(mapper5) actor6 = vtk.vtkActor2D() actor6.SetMapper(mapper6) imager1 = vtk.vtkRenderer() imager1.AddActor2D(actor1) imager1.SetViewport(0.0,0.0,0.3333,0.5) imager2 = vtk.vtkRenderer() imager2.AddActor2D(actor2) imager2.SetViewport(0.0,0.5,0.3333,1.0) imager3 = vtk.vtkRenderer() imager3.AddActor2D(actor3) imager3.SetViewport(0.3333,0.0,0.6667,0.5) imager4 = vtk.vtkRenderer() imager4.AddActor2D(actor4) imager4.SetViewport(0.3333,0.5,0.6667,1.0) imager5 = vtk.vtkRenderer() imager5.AddActor2D(actor5) imager5.SetViewport(0.6667,0.0,1.0,0.5) imager6 = vtk.vtkRenderer() imager6.AddActor2D(actor6) imager6.SetViewport(0.6667,0.5,1.0,1.0) imgWin = vtk.vtkRenderWindow() imgWin.AddRenderer(imager1) imgWin.AddRenderer(imager2) imgWin.AddRenderer(imager3) imgWin.AddRenderer(imager4) imgWin.AddRenderer(imager5) imgWin.AddRenderer(imager6) imgWin.SetSize(225,150) imgWin.Render() # --- end of script --
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 11748, 410, 30488, 198, 6738, 410, 30488, 13, 22602, 13, 44374, 1330, 410, 30488, 3855, 6601, 30016, 198, 36392, 42, 62, 26947, 62, 13252, 2394, 796, 410, 30488, 3855, 6601, 30016, 3419,...
2.223541
2,022
import collections from itertools import repeat import torch import torch.nn as nn import torch.nn.utils.rnn as rnn_utils _single = _ntuple(1) _pair = _ntuple(2) _triple = _ntuple(3) _quadruple = _ntuple(4) def prepare_rnn_seq(rnn_input, lengths, hx=None, masks=None, batch_first=False): ''' Args: rnn_input: [seq_len, batch, input_size]: tensor containing the features of the input sequence. lengths: [batch]: tensor containing the lengthes of the input sequence hx: [num_layers * num_directions, batch, hidden_size]: tensor containing the initial hidden state for each element in the batch. masks: [seq_len, batch]: tensor containing the mask for each element in the batch. batch_first: If True, then the input and output tensors are provided as [batch, seq_len, feature]. Returns: ''' check_res = check_decreasing(lengths) if check_res is None: lens = lengths rev_order = None else: lens, order, rev_order = check_res batch_dim = 0 if batch_first else 1 rnn_input = rnn_input.index_select(batch_dim, order) if hx is not None: # hack lstm if isinstance(hx, tuple): hx, cx = hx hx = hx.index_select(1, order) cx = cx.index_select(1, order) hx = (hx, cx) else: hx = hx.index_select(1, order) lens = lens.tolist() seq = rnn_utils.pack_padded_sequence(rnn_input, lens, batch_first=batch_first) if masks is not None: if batch_first: masks = masks[:, :lens[0]] else: masks = masks[:lens[0]] return seq, hx, rev_order, masks
[ 11748, 17268, 198, 6738, 340, 861, 10141, 1330, 9585, 198, 11748, 28034, 198, 11748, 28034, 13, 20471, 355, 299, 77, 198, 11748, 28034, 13, 20471, 13, 26791, 13, 81, 20471, 355, 374, 20471, 62, 26791, 628, 198, 62, 29762, 796, 4808, 4...
2.245431
766
import pandas as pandas_Pandas_Module Script.main()
[ 11748, 19798, 292, 355, 19798, 292, 62, 47206, 292, 62, 26796, 628, 628, 198, 7391, 13, 12417, 3419 ]
3.055556
18
############################################################################# # # Copyright (c) 2008 by Casey Duncan and contributors # All Rights Reserved. # # This software is subject to the provisions of the MIT License # A copy of the license should accompany this distribution. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # ############################################################################# """Fire simulation using point sprites""" __version__ = '$Id$' import os from pyglet import image from pyglet.gl import * from lepton import Particle, ParticleGroup, default_system from lepton.renderer import PointRenderer from lepton.texturizer import SpriteTexturizer, create_point_texture from lepton.emitter import StaticEmitter from lepton.domain import Line from lepton.controller import Gravity, Lifetime, Movement, Fader, ColorBlender win = pyglet.window.Window(resizable=True, visible=False) win.clear() glEnable(GL_BLEND) glShadeModel(GL_SMOOTH) glBlendFunc(GL_SRC_ALPHA,GL_ONE) glDisable(GL_DEPTH_TEST) flame = StaticEmitter( rate=500, template=Particle( position=(300,25,0), velocity=(0,0,0), color=(1,1,1,1), ), position=Line((win.width/2 - 85, -15, 0), (win.width/2 + 85, -15, 0)), deviation=Particle(position=(10,0,0), velocity=(7,50,0), age=0.75) ) default_system.add_global_controller( Lifetime(6), Gravity((0,20,0)), Movement(), ColorBlender( [(0, (0,0,0.5,0)), (0.5, (0,0,0.5,0.2)), (0.75, (0,0.5,1,0.6)), (1.5, (1,1,0,0.2)), (2.7, (0.9,0.2,0,0.4)), (3.2, (0.6,0.1,0.05,0.2)), (4.0, (0.8,0.8,0.8,0.1)), (6.0, (0.8,0.8,0.8,0)), ] ), ) group = ParticleGroup(controllers=[flame], renderer=PointRenderer(64, SpriteTexturizer(create_point_texture(64, 5)))) win.set_visible(True) pyglet.clock.schedule_interval(default_system.update, (1.0/30.0)) pyglet.clock.set_fps_limit(None) if __name__ == '__main__': default_system.run_ahead(2, 30) pyglet.app.run()
[ 29113, 29113, 7804, 4242, 2, 198, 2, 198, 2, 15069, 357, 66, 8, 3648, 416, 21097, 18625, 290, 20420, 198, 2, 1439, 6923, 33876, 13, 198, 2, 198, 2, 770, 3788, 318, 2426, 284, 262, 8617, 286, 262, 17168, 13789, 198, 2, 317, 4866, ...
2.570552
815
# coding=utf-8 """ Command Line Interface ====================== """ import argparse import logging import os from os import path import sys from landspout import core, __version__ LOGGER = logging.getLogger('landspout') LOGGING_FORMAT = '[%(asctime)-15s] %(levelname)-8s %(name)-15s: %(message)s' def exit_application(message=None, code=0): """Exit the application displaying the message to info or error based upon the exit code :param str message: The exit message :param int code: The exit code (default: 0) """ log_method = LOGGER.error if code else LOGGER.info log_method(message.strip()) sys.exit(code) def parse_cli_arguments(): """Return the base argument parser for CLI applications. :return: :class:`~argparse.ArgumentParser` """ parser = argparse.ArgumentParser( 'landspout', 'Static website generation tool', formatter_class=argparse.ArgumentDefaultsHelpFormatter, conflict_handler='resolve') parser.add_argument('-s', '--source', metavar='SOURCE', help='Source content directory', default='content') parser.add_argument('-d', '--destination', metavar='DEST', help='Destination directory for built content', default='build') parser.add_argument('-t', '--templates', metavar='TEMPLATE DIR', help='Template directory', default='templates') parser.add_argument('-b', '--base-uri-path', action='store', default='/') parser.add_argument('--whitespace', action='store', choices=['all', 'single', 'oneline'], default='all', help='Compress whitespace') parser.add_argument('-n', '--namespace', type=argparse.FileType('r'), help='Load a JSON file of values to inject into the ' 'default rendering namespace.') parser.add_argument('-i', '--interval', type=int, default=3, help='Interval in seconds between file ' 'checks while watching or serving') parser.add_argument('--port', type=int, default=8080, help='The port to listen on when serving') parser.add_argument('--debug', action='store_true', help='Extra verbose debug logging') parser.add_argument('-v', '--version', action='version', version='%(prog)s {}'.format(__version__), help='output version information, then exit') parser.add_argument('command', nargs='?', choices=['build', 'watch', 'serve'], help='The command to run', default='build') return parser.parse_args() def validate_paths(args): """Ensure all of the configured paths actually exist.""" if not path.exists(args.destination): LOGGER.warning('Destination path "%s" does not exist, creating', args.destination) os.makedirs(path.normpath(args.destination)) for file_path in [args.source, args.templates]: if not path.exists(file_path): exit_application('Path {} does not exist'.format(file_path), 1) def main(): """Application entry point""" args = parse_cli_arguments() log_level = logging.DEBUG if args.debug else logging.INFO logging.basicConfig(level=log_level, format=LOGGING_FORMAT) LOGGER.info('Landspout v%s [%s]', __version__, args.command) validate_paths(args) landspout = core.Landspout(args) if args.command == 'build': landspout.build() elif args.command == 'watch': landspout.watch() elif args.command == 'serve': landspout.serve()
[ 2, 19617, 28, 40477, 12, 23, 198, 37811, 198, 21575, 6910, 26491, 198, 4770, 50155, 198, 198, 37811, 198, 11748, 1822, 29572, 198, 11748, 18931, 198, 11748, 28686, 198, 6738, 28686, 1330, 3108, 198, 11748, 25064, 198, 198, 6738, 8604, 7...
2.343173
1,626
import sys sys.path.append('../') import numpy as np import tt d = 30 n = 2 ** d b = 1E3 h = b / (n + 1) #x = np.arange(n) #x = np.reshape(x, [2] * d, order = 'F') #x = tt.tensor(x, 1e-12) x = tt.xfun(2, d) e = tt.ones(2, d) x = x + e x = x * h sf = lambda x : np.sin(x) / x #Should be rank 2 y = tt.multifuncrs([x], sf, 1e-6, ['y0', tt.ones(2, d)]) #y1 = tt.tensor(sf(x.full()), 1e-8) print "pi / 2 ~ ", tt.dot(y, tt.ones(2, d)) * h #print (y - y1).norm() / y.norm()
[ 11748, 25064, 198, 17597, 13, 6978, 13, 33295, 10786, 40720, 11537, 198, 11748, 299, 32152, 355, 45941, 198, 11748, 256, 83, 198, 198, 67, 796, 1542, 198, 77, 796, 362, 12429, 288, 198, 65, 796, 352, 36, 18, 198, 71, 796, 275, 1220,...
1.830116
259
import tornado.httpserver import tornado.ioloop import tornado.options import tornado.web import tornado.auth import tornado.escape import os.path import logging import sys import urllib import json from uuid import uuid4 from tornado.options import define, options define("port", default=8000, help="run on the given port", type=int) #to do - # check character set of inputs (not vital as 'block' added to each user). # scores? #------------------------------------------------------------------------------Main app code------------------------------------------- #----------------------------------------------------------status handlers------------------------- # these handle the asynch hooks from the pages and sending messages to the pages # a lot of shared code here - I'm sure this could be better! # message handlers - recieves messages from the pages (currently only control and client) # - template handlers ------------- pages that are actually called by the browser. if __name__ == '__main__': # tornado.options.parse_command_line() app = Application() if len(sys.argv) > 1: try: with open(sys.argv[1]) as json_data: app.gamefile = json.load(json_data) json_data.close() app.quiztype = app.gamefile["quiztype"] if "notes" in app.gamefile: app.notes = app.gamefile["notes"] if "questionarray" in app.gamefile: app.questionarray = app.gamefile["questionarray"] else: app.questionarray = "{}" if "answerarray" in app.gamefile: app.answerarray = app.gamefile["answerarray"] else: app.answerarray = "{}" except: print("not a valid json file, using defaults") set_defaults() else: print("no file given - using defaults") set_defaults() app.status.setQuizType(app.quiztype) http_server = tornado.httpserver.HTTPServer(app) http_server.listen(options.port) tornado.ioloop.IOLoop.instance().start()
[ 11748, 33718, 13, 5450, 18497, 198, 11748, 33718, 13, 1669, 11224, 198, 11748, 33718, 13, 25811, 198, 11748, 33718, 13, 12384, 198, 11748, 33718, 13, 18439, 198, 11748, 33718, 13, 41915, 198, 11748, 28686, 13, 6978, 198, 11748, 18931, 220...
2.591765
850
import matplotlib.pyplot as plt import netCDF4 import numpy nc = netCDF4.Dataset("data/ructemps.nc") data = nc.variables["tmpc"][17, :, :] nc.close() (fig, ax) = plt.subplots(1, 1) ax.imshow(numpy.flipud(data)) fig.savefig("test.png")
[ 11748, 2603, 29487, 8019, 13, 9078, 29487, 355, 458, 83, 198, 11748, 2010, 34, 8068, 19, 198, 11748, 299, 32152, 198, 198, 10782, 796, 2010, 34, 8068, 19, 13, 27354, 292, 316, 7203, 7890, 14, 1356, 368, 862, 13, 10782, 4943, 198, 78...
2.133929
112
import matplotlib import matplotlib.pyplot as plt import numpy as np import math from matplotlib.ticker import FormatStrFormatter from matplotlib import scale as mscale from matplotlib import transforms as mtransforms # z = [0,0.1,0.3,0.9,1,2,5] z = [7.8, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1230] # thick = [20,40,20,60,37,32,21]ax1.set_xscale('log') # thick=[15.4, 18.2, 18.7, 19.2, 19.4, 19.5, 19.9, 20.1, 20.4, 20.5, 20.6, 20.7, 20.8, 20.7, 20.7, 20.6, 20.6, 20.6, 20.5, 20.5, 19.8] mrcnn=[17.7, 19.8, 20.0, 19.9, 20.2, 19.5, 19.1, 19.1] x_ticks = [0.001, 0.002, 0.004, 0.008, 0.01, 0.02, 0.04, 0.08] # plt.plot([1.0],[44.8], 'D', color = 'black') # plt.plot([0],[35.9], 'D', color = 'red') # plt.plot([1.0],[56.8], 'D', color = 'black') fig = plt.figure(figsize=(8,5)) ax1 = fig.add_subplot(111) matplotlib.rcParams.update({'font.size': 20}) ax1.plot(x_ticks, mrcnn, linestyle='dashed', marker='o', linewidth=2, c='k', label='mrcnn-r50-ag') # ax1.plot(z, htc, marker='o', linewidth=2, c='g', label='htc') # ax1.plot([1e-4],[15.4], 'D', color = 'green') # ax1.plot([1230],[19.8], 'D', color = 'red') plt.xlabel('calibration lr', size=16) plt.ylabel('bAP', size=16) # plt.gca().set_xscale('custom') ax1.set_xscale('log') ax1.set_xticks(x_ticks) # from matplotlib.ticker import ScalarFormatter # ax1.xaxis.set_major_formatter(ScalarFormatter()) # plt.legend(['calibration lr'], loc='best') plt.minorticks_off() plt.grid() plt.savefig('calibration_lr.eps', format='eps', dpi=1000) plt.show() # import numpy as np # import matplotlib.pyplot as plt # from scipy.interpolate import interp1d # y1=[35.9, 43.4, 46.1, 49.3, 50.3, 51.3, 51.4, 49.9, 49.5, 48.5, 44.8] # y2=[40.5, 48.2, 53.9 , 56.9, 57.8, 59.2, 58.3, 57.9, 57.5, 57.2, 56.8] # y3=[61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5] # x = np.linspace(0, 1, num=11, endpoint=True) # # f1 = interp1d(x, y1, kind='cubic') # f2 = interp1d(x, y2, kind='cubic') # f3 = interp1d(x, y3, kind='cubic') # xnew = np.linspace(0, 1, num=101, endpoint=True) # plt.plot(xnew, f3(xnew), '--', color='fuchsia') # plt.plot(xnew, f1(xnew), '--', color='blue') # plt.plot(xnew, f2(xnew), '--', color='green') # # plt.plot([0],[40.5], 'D', color = 'red') # plt.plot([1.0],[44.8], 'D', color = 'black') # plt.plot([0],[35.9], 'D', color = 'red') # plt.plot([1.0],[56.8], 'D', color = 'black') # plt.plot(x, y3, 'o', color = 'fuchsia') # plt.plot(x, y1, 'o', color = 'blue') # plt.plot(x, y2, 'o', color = 'green') # plt.plot([0],[40.5], 'D', color = 'red') # plt.plot([1.0],[44.8], 'D', color = 'black') # plt.plot([0],[35.9], 'D', color = 'red') # plt.plot([1.0],[56.8], 'D', color = 'black') # plt.legend(['teacher','0.25x', '0.5x', 'full-feature-imitation', 'only GT supervison'], loc='best') # plt.xlabel('Thresholding factor') # plt.ylabel('mAP') # plt.title('Resulting mAPs of varying thresholding factors') # #plt.legend(['0.5x']) # # plt.savefig('varying_thresh.eps', format='eps', dpi=1000) # plt.show()
[ 198, 11748, 2603, 29487, 8019, 198, 11748, 2603, 29487, 8019, 13, 9078, 29487, 355, 458, 83, 198, 11748, 299, 32152, 355, 45941, 198, 11748, 10688, 198, 6738, 2603, 29487, 8019, 13, 83, 15799, 1330, 18980, 13290, 8479, 1436, 198, 6738, ...
2.049358
1,479
import discord from discord.ext.commands import Bot TOKEN = "<discordtoken>" client = discord.Client() bot = Bot(command_prefix="!") bot.run(TOKEN)
[ 11748, 36446, 201, 198, 6738, 36446, 13, 2302, 13, 9503, 1746, 1330, 18579, 201, 198, 201, 198, 10468, 43959, 796, 33490, 15410, 585, 30001, 24618, 201, 198, 201, 198, 16366, 796, 36446, 13, 11792, 3419, 201, 198, 13645, 796, 18579, 7, ...
2.5625
64
import pandas as pd # Global variable to set the base path to our dataset folder base_url = '../dataset/' def update_mailing_list_pandas(filename): """ Your docstring documentation starts here. For more information on how to proper document your function, please refer to the official PEP8: https://www.python.org/dev/peps/pep-0008/#documentation-strings. """ df = # Read your csv file with pandas return # Your logic to filter only rows with the `active` flag the return the number of rows # Calling the function to test your code print(update_mailing_list_pandas('mailing_list.csv'))
[ 11748, 19798, 292, 355, 279, 67, 201, 198, 201, 198, 2, 8060, 7885, 284, 900, 262, 2779, 3108, 284, 674, 27039, 9483, 201, 198, 8692, 62, 6371, 796, 705, 40720, 19608, 292, 316, 14, 6, 201, 198, 201, 198, 201, 198, 4299, 4296, 62,...
2.851528
229
#!/usr/bin/env python3 # "This service will check your statement that a directed graph you provide us admits an eulerian walk (of the specified type)"" from os import EX_TEMPFAIL from sys import stderr, exit import collections from multilanguage import Env, Lang, TALcolors from TALinputs import TALinput from euler_dir_lib import * # METADATA OF THIS TAL_SERVICE: args_list = [ ('walk_type',str), ('feedback',str), ('eulerian',bool), ('MAXN',int), ('MAXM',int), ] ENV =Env(args_list) TAc =TALcolors(ENV) LANG=Lang(ENV, TAc, lambda fstring: eval(f"f'{fstring}'")) MAXN = ENV['MAXN'] MAXM = ENV['MAXM'] # START CODING YOUR SERVICE: print(f"#? waiting for your directed graph.\nFormat: each line two numbers separated by space. On the first line the number of nodes (an integer n in the interval [1,{MAXN}]) and the number of arcs (an integer m in the interval [1,{MAXM}]). Then follow m lines, one for each arc, each with two numbers in the interval [0,n). These specify the tail node and the head node of the arc, in this order.\nAny line beggining with the '#' character is ignored.\nIf you prefer, you can use the 'TA_send_txt_file.py' util here to send us the lines of a file. Just plug in the util at the 'rtal connect' command like you do with any other bot and let the util feed in the file for you rather than acting by copy and paste yourself.") n, m = TALinput(int, 2, TAc=TAc) if n < 1: TAc.print(LANG.render_feedback("n-LB", f"# ERRORE: il numero di nodi del grafo deve essere almeno 1. Invece il primo dei numeri che hai inserito n={n}."), "red") exit(0) if m < 0: TAc.print(LANG.render_feedback("m-LB", f"# ERRORE: il numero di archi del grafo non pu essere negativo. Invece il secondo dei numeri che hai inserito m={m}."), "red") exit(0) if n > MAXN: TAc.print(LANG.render_feedback("n-UB", f"# ERRORE: il numero di nodi del grafo non pu eccedere {ENV['MAXN']}. Invece il primo dei numeri che hai inserito n={n}>{ENV['MAXN']}."), "red") exit(0) if m > MAXM: TAc.print(LANG.render_feedback("m-UB", f"# ERRORE: il numero di archi del grafo non pu eccedere {ENV['MAXM']}. Invece il secondo dei numeri che hai inserito n={n}>{ENV['MAXM']}."), "red") exit(0) g = Graph(int(n)) adj = [ [] for _ in range(n)] for i in range(m): head, tail = TALinput(int, 2, TAc=TAc) if tail >= n or head >= n or tail < 0 or head < 0: TAc.print(LANG.render_feedback("n-at-least-1", f"# ERRORE: entrambi gli estremi di un arco devono essere nodi del grafo, ossia numeri interi ricompresi nell'intervallo [0,{ENV['MAXN']}."), "red") exit(0) g.addEdge(int(head),int(tail)) adj[int(head)].append(int(tail)) eul = ENV['eulerian'] if eul == 1: if ENV['walk_type'] == "closed": answer1 = g.isEulerianCycle() if answer1 == eul: TAc.OK() if answer1 == True: TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"green") if ENV['feedback'] == "with_YES_certificate": TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green") printCircuit(adj) exit(0) else: TAc.print(LANG.render_feedback("not-eulerian", f"Il grafo NON contiene alcun eulerian cycle!"),"red") exit(0) else: TAc.NO() exit(0) if ENV['walk_type'] == "open": answer1 = g.isEulerianWalk() answer2 = g.isEulerianCycle() if answer1 == eul and answer2==False and answer1 ==True : TAc.OK() if answer1 == True: TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"green") if ENV['feedback'] == "with_YES_certificate": TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green") printCircuit(adj) exit(0) else: TAc.print(LANG.render_feedback("not-eulerian", f"Il grafo NON contiene alcun eulerian walk!"),"red") exit(0) else: TAc.NO() exit(0) if ENV['walk_type'] == "any": answer1 = g.isEulerianCycle() answer2 = g.isEulerianWalk() if answer1 == eul or answer2 == eul: TAc.OK() if answer1 == eul: TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"green") if ENV['feedback'] == "with_YES_certificate": TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green") printCircuit(adj) exit(0) if answer2 == eul: TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"green") if ENV['feedback'] == "with_YES_certificate": TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green") g.printEulerTour() exit(0) else: TAc.print(LANG.render_feedback("not-eulerian", f"Il grafo NON contiene alcun eulerian walk/cycle!"),"red") exit(0) if eul == 0: if ENV['walk_type'] == "closed": answer1 = g.isEulerianCycle() if answer1 == eul: TAc.OK() else: TAc.NO() if answer1 == True: TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"red") if ENV['feedback'] == "with_YES_certificate": TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red") printCircuit(adj) exit(0) exit(0) if ENV['walk_type'] == "open": answer1 = g.isEulerianWalk() answer2 = g.isEulerianCycle() if answer1 == eul: TAc.OK() else: TAc.NO() TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"red") if ENV['feedback'] == "with_YES_certificate": TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red") printCircuit(adj) exit(0) if ENV['walk_type'] == "any": answer1 = g.isEulerianCycle() answer2 = g.isEulerianWalk() if answer1 == True or answer2 == True: TAc.NO() if answer1 == True: TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"red") if ENV['feedback'] == "with_YES_certificate": TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red") printCircuit(adj) exit(0) if answer2 == True: TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"red") if ENV['feedback'] == "with_YES_certificate": TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red") g.printEulerTour() exit(0) else: TAc.OK() exit(0)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 198, 2, 366, 1212, 2139, 481, 2198, 534, 2643, 326, 257, 7924, 4823, 345, 2148, 514, 15534, 281, 304, 18173, 666, 2513, 357, 1659, 262, 7368, 2099, 8, 15931, 198, 198, 6738, 2868...
2.004363
3,667
import sys import argparse from hgraph import * from rdkit import Chem from multiprocessing import Pool if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--ncpu', type=int, default=1) args = parser.parse_args() data = [mol for line in sys.stdin for mol in line.split()[:2]] data = list(set(data)) batch_size = len(data) // args.ncpu + 1 batches = [data[i : i + batch_size] for i in range(0, len(data), batch_size)] pool = Pool(args.ncpu) vocab_list = pool.map(process, batches) vocab = [(x,y) for vocab in vocab_list for x,y in vocab] vocab = list(set(vocab)) for x,y in sorted(vocab): print(x, y)
[ 11748, 25064, 198, 11748, 1822, 29572, 220, 198, 6738, 289, 34960, 1330, 1635, 198, 6738, 374, 67, 15813, 1330, 12870, 198, 6738, 18540, 305, 919, 278, 1330, 19850, 198, 198, 361, 11593, 3672, 834, 6624, 366, 834, 12417, 834, 1298, 628,...
2.505415
277
# -*- coding: utf-8 -*- """ test """ from __future__ import unicode_literals from django.template.loader import get_template from django.contrib import messages # Create your views here. from django.http import HttpResponse def index(request): """ index """ template = get_template('cornwall/index.html') messages.set_level(request, messages.DEBUG) list(messages.get_messages(request))# clear out the previous messages messages.add_message(request, messages.INFO, 'Hello world.') context = {'nbar': 'cornwall'} html = template.render(context, request) return HttpResponse(html)
[ 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 37811, 1332, 37227, 198, 6738, 11593, 37443, 834, 1330, 28000, 1098, 62, 17201, 874, 198, 6738, 42625, 14208, 13, 28243, 13, 29356, 1330, 651, 62, 28243, 198, 6738, 42625,...
3.075377
199
# coding = utf-8 # env = python3.5.2 # author = lujianxin # time = 201x-xx-xx # purpose= - - - from django.urls import re_path from . import views urlpatterns = [ # re_path(r'usercenter$', views.UserCenter.as_view()), re_path(r'details/(\d+)$', views.UserDetails.as_view()), re_path(r'login$', views.Login.as_view()), re_path(r'regist$', views.Regist.as_view()), re_path(r'logout$', views.Logout.as_view()), re_path(r'securecenter$', views.SecureCenter.as_view()), re_path(r'write_article$', views.WriteArticle.as_view()), re_path(r'change_art/(\d+)$', views.ChangeArt.as_view()), re_path(r'cpwd$', views.ModifyPwd.as_view()), re_path(r'findpwd$', views.FindPwd.as_view()), re_path(r'cpwdsafe$', views.ModifyPwdSafe.as_view()), ] if __name__ == '__main__': pass
[ 2, 19617, 796, 3384, 69, 12, 23, 198, 2, 17365, 220, 220, 220, 796, 21015, 18, 13, 20, 13, 17, 198, 2, 1772, 796, 300, 23577, 666, 87, 259, 198, 2, 640, 220, 220, 796, 580, 87, 12, 5324, 12, 5324, 220, 198, 2, 4007, 28, 532,...
2.164491
383
import math oxygen_rating = 0 co2_rating = 0 length = 0 n_bits = 12 common = [0] * n_bits anti = [0] * n_bits numbers = [] with open("data.txt", "r") as f: lines = f.readlines() length = len(lines) for line in lines: bitmap = list(line.strip("\n")) bitmap = [int(bit) for bit in bitmap] numbers.append(bitmap) #print(bitmap) for j, bit in enumerate(bitmap): common[j] += bit # Let's find oxygen generator rating first numbers_copy = [number for number in numbers] for i in range(n_bits): # Update common common = new_bitmap(numbers) # if more 1s in bit i if common[i] >= len(numbers)/2: most_c = 1 else: most_c = 0 #print(f"In round {i+1}, most common: {most_c}") numbers[:] = [number for number in numbers if (number[i] == most_c)] #print(numbers) if len(numbers) < 2: break oxygen_rating = int("".join(str(bit) for bit in numbers[0]), 2) print("O2:",oxygen_rating) for i in range(n_bits): # Update common common = new_bitmap(numbers_copy) # if more 1s in bit i if common[i] >= len(numbers_copy)/2: most_c = 1 else: most_c = 0 #print(f"In round {i+1}, most common: {most_c}") numbers_copy[:] = [number for number in numbers_copy if (number[i] != most_c)] #print(numbers_copy) if len(numbers_copy) < 2: break co2_rating = int("".join(str(bit) for bit in numbers_copy[0]), 2) print("CO2:", co2_rating) print("Answer: ", oxygen_rating*co2_rating)
[ 11748, 10688, 201, 198, 201, 198, 23536, 5235, 62, 8821, 796, 657, 201, 198, 1073, 17, 62, 8821, 796, 657, 201, 198, 13664, 796, 657, 201, 198, 201, 198, 77, 62, 9895, 796, 1105, 201, 198, 201, 198, 11321, 796, 685, 15, 60, 1635, ...
2.074214
795
import math import numpy as np from munch import Munch from transformers import GPT2LMHeadModel, GPT2TokenizerFast import torch from copy import deepcopy ######################################################################### ### compute perplexity ######################################################################### def compute_sent_perplexity( sentences, perplex_scorer, log=True, reduce="prod", is_normalize=False, is_cuda=True): """Compute the sentence perplexity. For filtering. Args: sentences ([type]): [description] perplex_scorer ([type]): [description] log (bool, optional): [description]. Defaults to True. reduce (str, optional): [description]. Defaults to "prod". is_normalize (bool, optional): [description]. Defaults to False. Returns: [type]: [description] """ scores = [] model, tokenizer = perplex_scorer.model, perplex_scorer.tokenizer outputs = _tokens_log_prob(sentences, model, tokenizer, is_cuda=is_cuda) for sent_log_prob, sent_ids, sent_tokens in outputs: score = reduce_perplex_prob(sent_log_prob, reduce=reduce, log=log) if is_normalize: score = normalize_score(score, len(sent_tokens)) scores.append(score) return scores def compute_delta_perplexity(edit_ops, perplex_scorer, is_normalize=False, is_cuda=True): """This is to compute the perplexity Args: edit_ops ([type]): [description] perplex_scorer ([type]): [description] is_normalize (bool, optional): [description]. Defaults to False. Returns: [type]: [description] """ tuples = [] #print(metadata.primary.acore.doc.text) #print(metadata.primary.bcore.doc.text) edit_ops = [o for o in edit_ops if o.op != "equal"] for op in edit_ops: aphrase, bphrase = (op.fromz_full, op.toz_full) if \ op.op == "insert" or op.op == "delete" else (op.fromz_core, op.toz_core) asent, bsent = aphrase.doc, bphrase.doc tuples += [(asent.text, aphrase.text), (bsent.text, bphrase.text)] #print(tuples) scores = compute_phrase_perplexity(tuples, perplex_scorer, is_normalize=is_normalize, is_cuda=is_cuda) #print(scores) paired_scores = [] for i in range(len(edit_ops)): # because of negative, it's i - i+1; lower the better. #print(scores[2*i]) #print(scores[2*i+1]) paired_scores.append(Munch( pr_sent=scores[2*i][0]-scores[2*i+1][0], pr_phrase=scores[2*i][1]-scores[2*i+1][1])) paired_scores = sorted(paired_scores, key=lambda x: ( max(x.pr_sent, x.pr_phrase)), reverse=True) # use the most ungrammar part as the return paired_scores[0]
[ 11748, 10688, 198, 11748, 299, 32152, 355, 45941, 198, 6738, 285, 3316, 1330, 337, 3316, 198, 6738, 6121, 364, 1330, 402, 11571, 17, 31288, 13847, 17633, 11, 402, 11571, 17, 30642, 7509, 22968, 198, 198, 11748, 28034, 198, 6738, 4866, 1...
2.460644
1,118
from tdw.controller import Controller from tdw.tdw_utils import TDWUtils from tdw.add_ons.image_capture import ImageCapture from tdw.backend.paths import EXAMPLE_CONTROLLER_OUTPUT_PATH """ Get the _flow pass. """ c = Controller() object_id_0 = c.get_unique_id() object_id_1 = c.get_unique_id() object_id_2 = c.get_unique_id() object_id_3 = c.get_unique_id() object_names = {object_id_0: "small_table_green_marble", object_id_1: "rh10", object_id_2: "jug01", object_id_3: "jug05"} output_directory = EXAMPLE_CONTROLLER_OUTPUT_PATH.joinpath("flow") # Enable image capture for the _flow pass. print(f"Images will be saved to: {output_directory}") capture = ImageCapture(path=output_directory, pass_masks=["_flow"], avatar_ids=["a"]) c.add_ons.append(capture) commands = [TDWUtils.create_empty_room(12, 12), c.get_add_object(object_names[object_id_0], object_id=object_id_0), c.get_add_object(object_names[object_id_1], position={"x": 0.7, "y": 0, "z": 0.4}, rotation={"x": 0, "y": 30, "z": 0}, object_id=object_id_1), c.get_add_object(model_name=object_names[object_id_2], position={"x": -0.3, "y": 0.9, "z": 0.2}, object_id=object_id_2), c.get_add_object(object_names[object_id_3], position={"x": 0.3, "y": 0.9, "z": -0.2}, object_id=object_id_3), {"$type": "apply_force_to_object", "id": object_id_1, "force": {"x": 0, "y": 5, "z": -200}}] commands.extend(TDWUtils.create_avatar(position={"x": 2.478, "y": 1.602, "z": 1.412}, look_at={"x": 0, "y": 0.2, "z": 0}, avatar_id="a")) c.communicate(commands) for i in range(3): c.communicate([]) c.communicate({"$type": "terminate"})
[ 6738, 41560, 86, 13, 36500, 1330, 22741, 198, 6738, 41560, 86, 13, 8671, 86, 62, 26791, 1330, 13320, 54, 18274, 4487, 198, 6738, 41560, 86, 13, 2860, 62, 684, 13, 9060, 62, 27144, 495, 1330, 7412, 49630, 198, 6738, 41560, 86, 13, 18...
1.827183
1,111
# Copyright 2013 Google, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import modelCourse as model import webapp2 from google.appengine.api import users APP = webapp2.WSGIApplication([ ('/rest/query', QueryHandler), ('/rest/insert', InsertHandler), ('/rest/delete', DeleteHandler), ('/rest/update', UpdateHandler), ('/rest/user', GetUser), ], debug=True)
[ 2, 15069, 2211, 3012, 11, 3457, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, 351, 262, 13789, 13, 198, 2, 921, 743, ...
3.350554
271
from sys import platform try: from yaml import CSafeLoader as Loader # use the C loader when possible except ImportError: from yaml import SafeLoader as Loader import yaml with open("config.yml") as f: config = yaml.load(f, Loader=Loader) # load the config yaml if platform in ("linux", "linux2", "win32"): import PySimpleGUI elif ( platform == "darwin" ): # Have to use web/remi on MacOS as the normal tkinter version causes a OS error # TODO: Test on MacOS with tkinter possibly figure out how to get it working. import PySimpleGUIWeb as PySimpleGUI
[ 6738, 25064, 1330, 3859, 198, 198, 28311, 25, 198, 220, 220, 220, 422, 331, 43695, 1330, 9429, 8635, 17401, 355, 8778, 263, 220, 1303, 779, 262, 327, 40213, 618, 1744, 198, 16341, 17267, 12331, 25, 198, 220, 220, 220, 422, 331, 43695,...
3.036269
193
# # PySNMP MIB module CISCO-IETF-PW-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-IETF-PW-MIB # Produced by pysmi-0.3.4 at Mon Apr 29 17:43:40 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsUnion") CpwVcType, CpwGroupID, CpwVcIndexType, CpwOperStatus, CpwVcIDType = mibBuilder.importSymbols("CISCO-IETF-PW-TC-MIB", "CpwVcType", "CpwGroupID", "CpwVcIndexType", "CpwOperStatus", "CpwVcIDType") ciscoExperiment, = mibBuilder.importSymbols("CISCO-SMI", "ciscoExperiment") InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType") SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString") ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup") Counter32, MibIdentifier, experimental, ModuleIdentity, Unsigned32, NotificationType, IpAddress, TimeTicks, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Gauge32, ObjectIdentity, Counter64, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "MibIdentifier", "experimental", "ModuleIdentity", "Unsigned32", "NotificationType", "IpAddress", "TimeTicks", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "Gauge32", "ObjectIdentity", "Counter64", "Integer32") TruthValue, TimeStamp, StorageType, RowStatus, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "TimeStamp", "StorageType", "RowStatus", "TextualConvention", "DisplayString") cpwVcMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 10, 106)) cpwVcMIB.setRevisions(('2004-03-17 12:00', '2003-02-26 12:00', '2002-05-26 12:00', '2002-01-30 12:00', '2001-11-07 12:00', '2001-07-11 12:00',)) if mibBuilder.loadTexts: cpwVcMIB.setLastUpdated('200403171200Z') if mibBuilder.loadTexts: cpwVcMIB.setOrganization('Cisco Systems, Inc.') cpwVcObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 1)) cpwVcNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 2)) cpwVcConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 3)) cpwVcIndexNext = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 1), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcIndexNext.setStatus('current') cpwVcTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2), ) if mibBuilder.loadTexts: cpwVcTable.setStatus('current') cpwVcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex")) if mibBuilder.loadTexts: cpwVcEntry.setStatus('current') cpwVcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 1), CpwVcIndexType()) if mibBuilder.loadTexts: cpwVcIndex.setStatus('current') cpwVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 2), CpwVcType()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcType.setStatus('current') cpwVcOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("manual", 1), ("maintenanceProtocol", 2), ("other", 3)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcOwner.setStatus('current') cpwVcPsnType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("mpls", 1), ("l2tp", 2), ("ip", 3), ("mplsOverIp", 4), ("gre", 5), ("other", 6)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcPsnType.setStatus('current') cpwVcSetUpPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcSetUpPriority.setStatus('current') cpwVcHoldingPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcHoldingPriority.setStatus('current') cpwVcInboundMode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("loose", 1), ("strict", 2))).clone('loose')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcInboundMode.setStatus('current') cpwVcPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 8), InetAddressType().clone('ipv4')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcPeerAddrType.setStatus('current') cpwVcPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 9), InetAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcPeerAddr.setStatus('current') cpwVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 10), CpwVcIDType()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcID.setStatus('current') cpwVcLocalGroupID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 11), CpwGroupID()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcLocalGroupID.setStatus('current') cpwVcControlWord = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 12), TruthValue().clone('false')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcControlWord.setStatus('current') cpwVcLocalIfMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 13), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcLocalIfMtu.setStatus('current') cpwVcLocalIfString = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 14), TruthValue().clone('false')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcLocalIfString.setStatus('current') cpwVcRemoteGroupID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 15), CpwGroupID()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcRemoteGroupID.setStatus('current') cpwVcRemoteControlWord = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("noControlWord", 1), ("withControlWord", 2), ("notYetKnown", 3)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcRemoteControlWord.setStatus('current') cpwVcRemoteIfMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 17), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcRemoteIfMtu.setStatus('current') cpwVcRemoteIfString = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 18), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcRemoteIfString.setStatus('current') cpwVcOutboundVcLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 19), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcOutboundVcLabel.setStatus('current') cpwVcInboundVcLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 20), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcInboundVcLabel.setStatus('current') cpwVcName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 21), SnmpAdminString()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcName.setStatus('current') cpwVcDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 22), SnmpAdminString()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcDescr.setStatus('current') cpwVcCreateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 23), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcCreateTime.setStatus('current') cpwVcUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 24), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcUpTime.setStatus('current') cpwVcAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcAdminStatus.setStatus('current') cpwVcOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 26), CpwOperStatus()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcOperStatus.setStatus('current') cpwVcInboundOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 27), CpwOperStatus()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcInboundOperStatus.setStatus('current') cpwVcOutboundOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 28), CpwOperStatus()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcOutboundOperStatus.setStatus('current') cpwVcTimeElapsed = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 29), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 900))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcTimeElapsed.setStatus('current') cpwVcValidIntervals = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 30), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 96))).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcValidIntervals.setStatus('current') cpwVcRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 31), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcRowStatus.setStatus('current') cpwVcStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 32), StorageType()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cpwVcStorageType.setStatus('current') cpwVcPerfCurrentTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3), ) if mibBuilder.loadTexts: cpwVcPerfCurrentTable.setStatus('current') cpwVcPerfCurrentEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex")) if mibBuilder.loadTexts: cpwVcPerfCurrentEntry.setStatus('current') cpwVcPerfCurrentInHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 1), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfCurrentInHCPackets.setStatus('current') cpwVcPerfCurrentInHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 2), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfCurrentInHCBytes.setStatus('current') cpwVcPerfCurrentOutHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfCurrentOutHCPackets.setStatus('current') cpwVcPerfCurrentOutHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfCurrentOutHCBytes.setStatus('current') cpwVcPerfIntervalTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4), ) if mibBuilder.loadTexts: cpwVcPerfIntervalTable.setStatus('current') cpwVcPerfIntervalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex"), (0, "CISCO-IETF-PW-MIB", "cpwVcPerfIntervalNumber")) if mibBuilder.loadTexts: cpwVcPerfIntervalEntry.setStatus('current') cpwVcPerfIntervalNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 96))) if mibBuilder.loadTexts: cpwVcPerfIntervalNumber.setStatus('current') cpwVcPerfIntervalValidData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 2), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfIntervalValidData.setStatus('current') cpwVcPerfIntervalTimeElapsed = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 3), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfIntervalTimeElapsed.setStatus('current') cpwVcPerfIntervalInHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfIntervalInHCPackets.setStatus('current') cpwVcPerfIntervalInHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 5), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfIntervalInHCBytes.setStatus('current') cpwVcPerfIntervalOutHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 6), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfIntervalOutHCPackets.setStatus('current') cpwVcPerfIntervalOutHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 7), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfIntervalOutHCBytes.setStatus('current') cpwVcPerfTotalTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5), ) if mibBuilder.loadTexts: cpwVcPerfTotalTable.setStatus('current') cpwVcPerfTotalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex")) if mibBuilder.loadTexts: cpwVcPerfTotalEntry.setStatus('current') cpwVcPerfTotalInHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 1), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfTotalInHCPackets.setStatus('current') cpwVcPerfTotalInHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 2), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfTotalInHCBytes.setStatus('current') cpwVcPerfTotalOutHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfTotalOutHCPackets.setStatus('current') cpwVcPerfTotalOutHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfTotalOutHCBytes.setStatus('current') cpwVcPerfTotalDiscontinuityTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 5), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfTotalDiscontinuityTime.setStatus('current') cpwVcPerfTotalErrorPackets = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 6), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPerfTotalErrorPackets.setStatus('current') cpwVcIdMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7), ) if mibBuilder.loadTexts: cpwVcIdMappingTable.setStatus('current') cpwVcIdMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingVcType"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingVcID"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingPeerAddrType"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingPeerAddr"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingVcIndex")) if mibBuilder.loadTexts: cpwVcIdMappingEntry.setStatus('current') cpwVcIdMappingVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 1), CpwVcType()) if mibBuilder.loadTexts: cpwVcIdMappingVcType.setStatus('current') cpwVcIdMappingVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 2), CpwVcIDType()) if mibBuilder.loadTexts: cpwVcIdMappingVcID.setStatus('current') cpwVcIdMappingPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 3), InetAddressType()) if mibBuilder.loadTexts: cpwVcIdMappingPeerAddrType.setStatus('current') cpwVcIdMappingPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 4), InetAddress()) if mibBuilder.loadTexts: cpwVcIdMappingPeerAddr.setStatus('current') cpwVcIdMappingVcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 5), CpwVcIndexType()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcIdMappingVcIndex.setStatus('current') cpwVcPeerMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8), ) if mibBuilder.loadTexts: cpwVcPeerMappingTable.setStatus('current') cpwVcPeerMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingPeerAddrType"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingPeerAddr"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcType"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcID"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcIndex")) if mibBuilder.loadTexts: cpwVcPeerMappingEntry.setStatus('current') cpwVcPeerMappingPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 1), InetAddressType()) if mibBuilder.loadTexts: cpwVcPeerMappingPeerAddrType.setStatus('current') cpwVcPeerMappingPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 2), InetAddress()) if mibBuilder.loadTexts: cpwVcPeerMappingPeerAddr.setStatus('current') cpwVcPeerMappingVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 3), CpwVcType()) if mibBuilder.loadTexts: cpwVcPeerMappingVcType.setStatus('current') cpwVcPeerMappingVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 4), CpwVcIDType()) if mibBuilder.loadTexts: cpwVcPeerMappingVcID.setStatus('current') cpwVcPeerMappingVcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 5), CpwVcIndexType()).setMaxAccess("readonly") if mibBuilder.loadTexts: cpwVcPeerMappingVcIndex.setStatus('current') cpwVcUpDownNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 9), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: cpwVcUpDownNotifEnable.setStatus('current') cpwVcNotifRate = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 10), Unsigned32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cpwVcNotifRate.setStatus('current') cpwVcDown = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 106, 2, 1)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOperStatus")) if mibBuilder.loadTexts: cpwVcDown.setStatus('current') cpwVcUp = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 106, 2, 2)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOperStatus")) if mibBuilder.loadTexts: cpwVcUp.setStatus('current') cpwVcGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1)) cpwVcCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 2)) cpwModuleCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 2, 1)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcGroup"), ("CISCO-IETF-PW-MIB", "cpwVcPeformanceGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cpwModuleCompliance = cpwModuleCompliance.setStatus('current') cpwVcGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 1)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcIndexNext"), ("CISCO-IETF-PW-MIB", "cpwVcType"), ("CISCO-IETF-PW-MIB", "cpwVcOwner"), ("CISCO-IETF-PW-MIB", "cpwVcPsnType"), ("CISCO-IETF-PW-MIB", "cpwVcSetUpPriority"), ("CISCO-IETF-PW-MIB", "cpwVcHoldingPriority"), ("CISCO-IETF-PW-MIB", "cpwVcInboundMode"), ("CISCO-IETF-PW-MIB", "cpwVcPeerAddrType"), ("CISCO-IETF-PW-MIB", "cpwVcPeerAddr"), ("CISCO-IETF-PW-MIB", "cpwVcID"), ("CISCO-IETF-PW-MIB", "cpwVcLocalGroupID"), ("CISCO-IETF-PW-MIB", "cpwVcControlWord"), ("CISCO-IETF-PW-MIB", "cpwVcLocalIfMtu"), ("CISCO-IETF-PW-MIB", "cpwVcLocalIfString"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteGroupID"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteControlWord"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteIfMtu"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteIfString"), ("CISCO-IETF-PW-MIB", "cpwVcOutboundVcLabel"), ("CISCO-IETF-PW-MIB", "cpwVcInboundVcLabel"), ("CISCO-IETF-PW-MIB", "cpwVcName"), ("CISCO-IETF-PW-MIB", "cpwVcDescr"), ("CISCO-IETF-PW-MIB", "cpwVcCreateTime"), ("CISCO-IETF-PW-MIB", "cpwVcUpTime"), ("CISCO-IETF-PW-MIB", "cpwVcAdminStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOutboundOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcInboundOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcTimeElapsed"), ("CISCO-IETF-PW-MIB", "cpwVcValidIntervals"), ("CISCO-IETF-PW-MIB", "cpwVcRowStatus"), ("CISCO-IETF-PW-MIB", "cpwVcStorageType"), ("CISCO-IETF-PW-MIB", "cpwVcUpDownNotifEnable"), ("CISCO-IETF-PW-MIB", "cpwVcNotifRate")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cpwVcGroup = cpwVcGroup.setStatus('current') cpwVcPeformanceGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 2)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentInHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentInHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentOutHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentOutHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalValidData"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalTimeElapsed"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalInHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalInHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalOutHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalOutHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalInHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalInHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalOutHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalOutHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalDiscontinuityTime"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalErrorPackets")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cpwVcPeformanceGroup = cpwVcPeformanceGroup.setStatus('current') cpwVcMappingTablesGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 3)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcIdMappingVcIndex"), ("CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcIndex")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cpwVcMappingTablesGroup = cpwVcMappingTablesGroup.setStatus('current') cpwVcNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 4)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcUp"), ("CISCO-IETF-PW-MIB", "cpwVcDown")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cpwVcNotificationsGroup = cpwVcNotificationsGroup.setStatus('current') mibBuilder.exportSymbols("CISCO-IETF-PW-MIB", cpwVcDown=cpwVcDown, cpwVcIdMappingVcType=cpwVcIdMappingVcType, cpwVcControlWord=cpwVcControlWord, cpwVcPerfIntervalValidData=cpwVcPerfIntervalValidData, cpwVcSetUpPriority=cpwVcSetUpPriority, cpwVcPsnType=cpwVcPsnType, cpwVcStorageType=cpwVcStorageType, cpwVcPeerMappingVcID=cpwVcPeerMappingVcID, cpwVcPeerMappingTable=cpwVcPeerMappingTable, cpwVcPerfTotalInHCBytes=cpwVcPerfTotalInHCBytes, PYSNMP_MODULE_ID=cpwVcMIB, cpwVcPerfIntervalTimeElapsed=cpwVcPerfIntervalTimeElapsed, cpwVcIdMappingPeerAddrType=cpwVcIdMappingPeerAddrType, cpwVcPeerAddrType=cpwVcPeerAddrType, cpwVcHoldingPriority=cpwVcHoldingPriority, cpwVcPerfTotalInHCPackets=cpwVcPerfTotalInHCPackets, cpwVcIndexNext=cpwVcIndexNext, cpwVcIdMappingTable=cpwVcIdMappingTable, cpwVcMappingTablesGroup=cpwVcMappingTablesGroup, cpwVcPeformanceGroup=cpwVcPeformanceGroup, cpwVcEntry=cpwVcEntry, cpwVcPeerAddr=cpwVcPeerAddr, cpwVcInboundVcLabel=cpwVcInboundVcLabel, cpwVcPerfTotalOutHCBytes=cpwVcPerfTotalOutHCBytes, cpwVcMIB=cpwVcMIB, cpwVcValidIntervals=cpwVcValidIntervals, cpwVcOwner=cpwVcOwner, cpwVcRemoteGroupID=cpwVcRemoteGroupID, cpwVcPerfIntervalTable=cpwVcPerfIntervalTable, cpwVcPeerMappingPeerAddr=cpwVcPeerMappingPeerAddr, cpwVcConformance=cpwVcConformance, cpwVcPerfIntervalOutHCPackets=cpwVcPerfIntervalOutHCPackets, cpwVcInboundOperStatus=cpwVcInboundOperStatus, cpwVcPerfCurrentTable=cpwVcPerfCurrentTable, cpwVcPerfTotalDiscontinuityTime=cpwVcPerfTotalDiscontinuityTime, cpwVcOutboundVcLabel=cpwVcOutboundVcLabel, cpwVcUp=cpwVcUp, cpwVcIdMappingVcID=cpwVcIdMappingVcID, cpwVcLocalIfString=cpwVcLocalIfString, cpwVcUpTime=cpwVcUpTime, cpwVcPeerMappingPeerAddrType=cpwVcPeerMappingPeerAddrType, cpwVcType=cpwVcType, cpwVcPeerMappingVcType=cpwVcPeerMappingVcType, cpwVcPerfIntervalEntry=cpwVcPerfIntervalEntry, cpwVcPerfIntervalNumber=cpwVcPerfIntervalNumber, cpwVcName=cpwVcName, cpwVcPerfIntervalOutHCBytes=cpwVcPerfIntervalOutHCBytes, cpwVcRemoteIfMtu=cpwVcRemoteIfMtu, cpwVcIdMappingPeerAddr=cpwVcIdMappingPeerAddr, cpwVcID=cpwVcID, cpwVcPerfIntervalInHCPackets=cpwVcPerfIntervalInHCPackets, cpwVcPerfTotalEntry=cpwVcPerfTotalEntry, cpwVcNotificationsGroup=cpwVcNotificationsGroup, cpwVcCreateTime=cpwVcCreateTime, cpwVcNotifRate=cpwVcNotifRate, cpwVcPerfCurrentInHCBytes=cpwVcPerfCurrentInHCBytes, cpwVcRemoteControlWord=cpwVcRemoteControlWord, cpwVcLocalIfMtu=cpwVcLocalIfMtu, cpwVcNotifications=cpwVcNotifications, cpwVcInboundMode=cpwVcInboundMode, cpwVcRemoteIfString=cpwVcRemoteIfString, cpwVcGroup=cpwVcGroup, cpwVcPerfTotalTable=cpwVcPerfTotalTable, cpwVcPerfTotalOutHCPackets=cpwVcPerfTotalOutHCPackets, cpwVcPeerMappingEntry=cpwVcPeerMappingEntry, cpwVcTable=cpwVcTable, cpwVcGroups=cpwVcGroups, cpwVcPerfIntervalInHCBytes=cpwVcPerfIntervalInHCBytes, cpwModuleCompliance=cpwModuleCompliance, cpwVcPerfCurrentOutHCPackets=cpwVcPerfCurrentOutHCPackets, cpwVcObjects=cpwVcObjects, cpwVcPeerMappingVcIndex=cpwVcPeerMappingVcIndex, cpwVcCompliances=cpwVcCompliances, cpwVcLocalGroupID=cpwVcLocalGroupID, cpwVcTimeElapsed=cpwVcTimeElapsed, cpwVcIndex=cpwVcIndex, cpwVcRowStatus=cpwVcRowStatus, cpwVcPerfTotalErrorPackets=cpwVcPerfTotalErrorPackets, cpwVcIdMappingEntry=cpwVcIdMappingEntry, cpwVcDescr=cpwVcDescr, cpwVcPerfCurrentEntry=cpwVcPerfCurrentEntry, cpwVcPerfCurrentInHCPackets=cpwVcPerfCurrentInHCPackets, cpwVcIdMappingVcIndex=cpwVcIdMappingVcIndex, cpwVcOperStatus=cpwVcOperStatus, cpwVcOutboundOperStatus=cpwVcOutboundOperStatus, cpwVcAdminStatus=cpwVcAdminStatus, cpwVcUpDownNotifEnable=cpwVcUpDownNotifEnable, cpwVcPerfCurrentOutHCBytes=cpwVcPerfCurrentOutHCBytes)
[ 2, 198, 2, 9485, 15571, 7378, 337, 9865, 8265, 36159, 8220, 12, 40, 22274, 12, 47, 54, 12, 8895, 33, 357, 4023, 1378, 16184, 76, 489, 8937, 13, 785, 14, 79, 893, 11632, 8, 198, 2, 7054, 45, 13, 16, 2723, 2393, 1378, 14, 14490, ...
2.299039
11,139
_base_ = [ '../_base_/models/fpn_r50.py', '../_base_/datasets/onaho.py', '../_base_/default_runtime.py', '../_base_/schedules/schedule_160k.py' ] model = dict(decode_head=dict(num_classes=2))
[ 62, 8692, 62, 796, 685, 198, 220, 220, 220, 705, 40720, 62, 8692, 62, 14, 27530, 14, 69, 21999, 62, 81, 1120, 13, 9078, 3256, 705, 40720, 62, 8692, 62, 14, 19608, 292, 1039, 14, 261, 17108, 13, 9078, 3256, 198, 220, 220, 220, 70...
2.105263
95
""" Expression Dataset for analysis of matrix (RNASeq/microarray) data with annotations """ import pandas as PD import numpy as N from matplotlib import pylab as P from collections import OrderedDict from ast import literal_eval # from ..plot.matrix import matshow_clustered def read_bioinfo3_data(fname): """ read bioinfo3.table.dataset type of data """ fobj = open(fname) groups = OrderedDict() cnt = 0 for line in fobj: cnt += 1 if line[:2]=='#%': if line.startswith('#%groups:'): gname, members = line[len('#%groups:'):].split('=') gname = gname.strip() members = members.strip().split(',') groups[gname] = members datafields = line.strip().split('=')[1].strip().split(',') elif line.startswith('#%fields'): fields = line.strip().split('=')[1].strip().split(',') elif not line.strip(): continue # empty line else: break df = PD.read_table(fname, skiprows=cnt-1) f2g = {} for g,m in groups.items(): for f in m: f2g[f] = g df.columns = PD.MultiIndex.from_tuples([(x, f2g.get(x,'')) for x in df.columns], names=['samplename','group']) e = ExpressionSet(df) return e def read_multiindex_data(fname, tupleize=True, index_names = ['samplename','group']): """ read dataset table with MultiIndex in the header """ if not tupleize: df = PD.read_table(fname, header=range(len(index_names)), index_col=[0], tupleize_cols=False) e = ExpressionSet(df) return e df = PD.read_table(fname, index_col=0) df.columns = PD.MultiIndex.from_tuples(df.columns.map(literal_eval).tolist(), names=index_names) e = ExpressionSet(df) return e def read_grouped_table(fname, groupfn=lambda x: '_'.join(x.split('_')[:-1])): """ Read dataset whose group is encoded in the colname. Column 0 is index. """ df = PD.read_table(fname) f2g = {x:groupfn(x) for x in df.columns} df.columns = PD.MultiIndex.from_tuples([(x, f2g[x]) for x in df.columns], names=['samplename','group']) e = ExpressionSet(df) return e def concatenate(dic): """ dic: dict of DataFrames merge all using index and outer join """ keys = list(dic) d = dic[keys[0]].merge(dic[keys[1]], left_index=True, right_index=True, how='outer', suffixes=('.'+keys[0],'.'+keys[1])) for k in keys[2:]: d = d.merge(dic[k], left_index=True, right_index=True, how='outer', suffixes=('','.'+k)) return d
[ 37811, 220, 198, 220, 220, 220, 41986, 16092, 292, 316, 329, 3781, 286, 17593, 357, 49, 18293, 27363, 14, 24055, 18747, 8, 1366, 351, 37647, 198, 198, 37811, 198, 198, 11748, 19798, 292, 355, 14340, 198, 11748, 299, 32152, 355, 399, 1...
2.23479
1,167
from __future__ import annotations from django_perf_rec.sql import sql_fingerprint
[ 6738, 11593, 37443, 834, 1330, 37647, 198, 198, 6738, 42625, 14208, 62, 525, 69, 62, 8344, 13, 25410, 1330, 44161, 62, 35461, 4798, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628, 628 ]
2.860465
43
from rest_framework import serializers from user_auth_api.models import User # User Serializer # Register Serializer
[ 6738, 1334, 62, 30604, 1330, 11389, 11341, 198, 6738, 2836, 62, 18439, 62, 15042, 13, 27530, 1330, 11787, 198, 198, 2, 11787, 23283, 7509, 198, 198, 2, 17296, 23283, 7509 ]
3.933333
30
"""Main module.""" import json import os import re import shutil import subprocess import sys from pathlib import Path from typing import List, Optional import yaml ENV_FILE = "environment.yml"
[ 37811, 13383, 8265, 526, 15931, 198, 198, 11748, 33918, 198, 11748, 28686, 198, 11748, 302, 198, 11748, 4423, 346, 198, 11748, 850, 14681, 198, 11748, 25064, 198, 6738, 3108, 8019, 1330, 10644, 198, 6738, 19720, 1330, 7343, 11, 32233, 198...
3.327869
61
import sys import rospy import types #from std_msgs.msg import String from sensor_msgs.msg import Image from cibr_img_processing.msg import Ints from cv_bridge import CvBridge, CvBridgeError #make int msgs #TODO: get the img size from camera_indo topics
[ 11748, 25064, 198, 11748, 686, 2777, 88, 198, 11748, 3858, 198, 198, 2, 6738, 14367, 62, 907, 14542, 13, 19662, 1330, 10903, 198, 6738, 12694, 62, 907, 14542, 13, 19662, 1330, 7412, 198, 6738, 269, 2889, 62, 9600, 62, 36948, 13, 19662...
3.072289
83
# DO NOT EDIT: File is generated by code generator. from pokepay_partner_python_sdk.pokepay.request.request import PokepayRequest from pokepay_partner_python_sdk.pokepay.response.shop_with_accounts import ShopWithAccounts
[ 2, 8410, 5626, 48483, 25, 9220, 318, 7560, 416, 2438, 17301, 13, 198, 198, 6738, 22620, 15577, 62, 3911, 1008, 62, 29412, 62, 21282, 74, 13, 35924, 15577, 13, 25927, 13, 25927, 1330, 41163, 15577, 18453, 198, 6738, 22620, 15577, 62, 3...
3.393939
66
from abc import abstractproperty from ..backend_config.bucket_config import S3BucketConfig from ..storage.helper import StorageHelper
[ 6738, 450, 66, 1330, 12531, 26745, 198, 198, 6738, 11485, 1891, 437, 62, 11250, 13, 27041, 316, 62, 11250, 1330, 311, 18, 33, 38811, 16934, 198, 6738, 11485, 35350, 13, 2978, 525, 1330, 20514, 47429, 628 ]
3.777778
36
import os import unittest import pandas as pd from application.ParcelsParser import ParcelsParser if __name__ == '__main__': unittest.main()
[ 11748, 28686, 198, 11748, 555, 715, 395, 198, 11748, 19798, 292, 355, 279, 67, 198, 6738, 3586, 13, 10044, 5276, 82, 46677, 1330, 2547, 5276, 82, 46677, 628, 198, 198, 361, 11593, 3672, 834, 6624, 705, 834, 12417, 834, 10354, 198, 220...
2.901961
51
import binTree import queue t = binTree.BinaryTree(1) t.insertLeft(2) t.insertRight(3) t.getRightChild().insertLeft(5) t.getRightChild().insertRight(6) print complete(t)
[ 11748, 9874, 27660, 198, 11748, 16834, 198, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 628, 198, 83, 796, 9874, 27660, 13, 33, 3219, 27660, 7, 16, 8, 198, 83, 13, 28463, 18819, 7, 17, 8, 198, 83, 13, 28463, 1102...
2.341772
79
# -*- encoding: utf-8 """ Copyright (c) 2014, Philipp Krhenbhl All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Stanford University nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY Philipp Krhenbhl ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Philipp Krhenbhl BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ from .gop import * import numpy as np from .util import * LATEX_OUTPUT=True for bnd in ['st','sf','mssf','ds']: # Load the dataset over_segs,segmentations,boxes = loadVOCAndOverSeg( "test", detector=bnd, year="2012" ) has_box = [len(b)>0 for b in boxes] boxes = [np.vstack(b).astype(np.int32) if len(b)>0 else np.zeros((0,4),dtype=np.int32) for b in boxes] # Generate the proposals s = [] s.append( (50,5,0.7) ) # ~250 props s.append( (100,5,0.75) ) # ~450 props s.append( (180,5,0.8) ) # ~650 props s.append( (200,7,0.85) ) # ~1100 props s.append( (250,10,0.9) ) # ~2200 props s.append( (290,20,0.9) ) # ~4400 props for N_S,N_T,iou in s: prop_settings = setupBaseline( N_S, N_T, iou ) bo,b_bo,pool_s,box_pool_s = dataset.proposeAndEvaluate( over_segs, segmentations, boxes, proposals.Proposal( prop_settings ) ) if LATEX_OUTPUT: print(( "Baseline %s ($%d$,$%d$) & %d & %0.3f & %0.3f & %0.3f & %0.3f & \\\\"%(bnd, N_S,N_T,np.mean(pool_s),np.mean(bo[:,0]),np.sum(bo[:,0]*bo[:,1])/np.sum(bo[:,1]), np.mean(bo[:,0]>=0.5), np.mean(bo[:,0]>=0.7) ) )) else: print(( "ABO ", np.mean(bo[:,0]) )) print(( "cover ", np.sum(bo[:,0]*bo[:,1])/np.sum(bo[:,1]) )) print(( "recall ", np.mean(bo[:,0]>=0.5), "\t", np.mean(bo[:,0]>=0.6), "\t", np.mean(bo[:,0]>=0.7), "\t", np.mean(bo[:,0]>=0.8), "\t", np.mean(bo[:,0]>=0.9), "\t", np.mean(bo[:,0]>=1) )) print(( "# props ", np.mean(pool_s) )) print(( "box ABO ", np.mean(b_bo) )) print(( "box recall ", np.mean(b_bo>=0.5), "\t", np.mean(b_bo>=0.6), "\t", np.mean(b_bo>=0.7), "\t", np.mean(b_bo>=0.8), "\t", np.mean(b_bo>=0.9), "\t", np.mean(b_bo>=1) )) print(( "# box ", np.mean(box_pool_s[~np.isnan(box_pool_s)]) ))
[ 2, 532, 9, 12, 21004, 25, 3384, 69, 12, 23, 198, 37811, 198, 220, 220, 220, 15069, 357, 66, 8, 1946, 11, 9844, 13685, 831, 65, 18519, 198, 220, 220, 220, 1439, 2489, 10395, 13, 198, 197, 198, 220, 220, 220, 2297, 396, 3890, 290,...
2.401159
1,381
import pickle import pandas as pd # cat aa ab ac > dataset.pkl from https://github.com/zhougr1993/DeepInterestNetwork with open('dataset.pkl', 'rb') as f: train_set = pickle.load(f, encoding='bytes') test_set = pickle.load(f, encoding='bytes') cate_list = pickle.load(f, encoding='bytes') user_count, item_count, cate_count = pickle.load(f, encoding='bytes') train_data = [] for sample in train_set: user_id = sample[0] item_id = sample[2] item_history = "^".join([str(i) for i in sample[1]]) label = sample[3] cate_id = cate_list[item_id] cate_history = "^".join([str(i) for i in cate_list[sample[1]]]) train_data.append([label, user_id, item_id, cate_id, item_history, cate_history]) train_df = pd.DataFrame(train_data, columns=['label', 'user_id', 'item_id', 'cate_id', 'item_history', 'cate_history']) train_df.to_csv("train.csv", index=False) test_data = [] for sample in test_set: user_id = sample[0] item_pair = sample[2] item_history = "^".join([str(i) for i in sample[1]]) cate_history = "^".join([str(i) for i in cate_list[sample[1]]]) test_data.append([1, user_id, item_pair[0], cate_list[item_pair[0]], item_history, cate_history]) test_data.append([0, user_id, item_pair[1], cate_list[item_pair[1]], item_history, cate_history]) test_df = pd.DataFrame(test_data, columns=['label', 'user_id', 'item_id', 'cate_id', 'item_history', 'cate_history']) test_df.to_csv("test.csv", index=False)
[ 11748, 2298, 293, 198, 11748, 19798, 292, 355, 279, 67, 198, 198, 2, 3797, 257, 64, 450, 936, 1875, 27039, 13, 79, 41582, 422, 3740, 1378, 12567, 13, 785, 14, 38536, 2164, 24465, 14, 29744, 19302, 26245, 198, 198, 4480, 1280, 10786, ...
2.414754
610
from email_extras.settings import USE_GNUPG if USE_GNUPG: from django.contrib import admin from email_extras.models import Key, Address from email_extras.forms import KeyForm admin.site.register(Key, KeyAdmin) admin.site.register(Address, AddressAdmin)
[ 201, 198, 6738, 3053, 62, 2302, 8847, 13, 33692, 1330, 23210, 62, 16630, 52, 6968, 201, 198, 201, 198, 201, 198, 361, 23210, 62, 16630, 52, 6968, 25, 201, 198, 220, 220, 220, 422, 42625, 14208, 13, 3642, 822, 1330, 13169, 201, 198, ...
2.636364
110
# ----------------------------------------------------------------------------- # # This file is the copyrighted property of Tableau Software and is protected # by registered patents and other applicable U.S. and international laws and # regulations. # # You may adapt this file and modify it to fit into your context and use it # as a template to start your own projects. # # ----------------------------------------------------------------------------- import shutil from pathlib import Path from tableauhyperapi import HyperProcess, Telemetry, \ Connection, CreateMode, \ NOT_NULLABLE, NULLABLE, SqlType, TableDefinition, \ Inserter, \ escape_name, escape_string_literal, \ TableName, Name, \ HyperException # The table is called "Extract" and will be created in the "Extract" schema. # This has historically been the default table name and schema for extracts created by Tableau extract_table = TableDefinition( table_name=TableName("Extract", "Extract"), columns=[ TableDefinition.Column(name='Order ID', type=SqlType.int(), nullability=NOT_NULLABLE), TableDefinition.Column(name='Ship Timestamp', type=SqlType.timestamp(), nullability=NOT_NULLABLE), TableDefinition.Column(name='Ship Mode', type=SqlType.text(), nullability=NOT_NULLABLE), TableDefinition.Column(name='Ship Priority', type=SqlType.int(), nullability=NOT_NULLABLE) ] ) def run_insert_data_with_expressions(): """ An example of how to push down computations to Hyper during insertion with expressions. """ print("EXAMPLE - Push down computations to Hyper during insertion with expressions") path_to_database = Path("orders.hyper") # Starts the Hyper Process with telemetry enabled to send data to Tableau. # To opt out, simply set telemetry=Telemetry.DO_NOT_SEND_USAGE_DATA_TO_TABLEAU. with HyperProcess(telemetry=Telemetry.SEND_USAGE_DATA_TO_TABLEAU) as hyper: # Creates new Hyper file "orders.hyper". # Replaces file with CreateMode.CREATE_AND_REPLACE if it already exists. with Connection(endpoint=hyper.endpoint, database=path_to_database, create_mode=CreateMode.CREATE_AND_REPLACE) as connection: connection.catalog.create_schema(schema=extract_table.table_name.schema_name) connection.catalog.create_table(table_definition=extract_table) # Hyper API's Inserter allows users to transform data during insertion. # To make use of data transformation during insertion, the inserter requires the following inputs # 1. The connection to the Hyper instance containing the table. # 2. The table name or table defintion into which data is inserted. # 3. List of Inserter.ColumnMapping. # This list informs the inserter how each column in the target table is tranformed. # The list must contain all the columns into which data is inserted. # "Inserter.ColumnMapping" maps a valid SQL expression (if any) to a column in the target table. # For example Inserter.ColumnMapping('target_column_name', f'{escape_name("colA")}*{escape_name("colB")}') # The column "target_column" contains the product of "colA" and "colB" after successful insertion. # SQL expression string is optional in Inserter.ColumnMapping. # For a column without any transformation only the column name is required. # For example Inserter.ColumnMapping('no_data_transformation_column') # 4. The Column Definition of all input values provided to the Inserter # Inserter definition contains the column definition for the values that are inserted inserter_definition = [ TableDefinition.Column(name='Order ID', type=SqlType.int(), nullability=NOT_NULLABLE), TableDefinition.Column(name='Ship Timestamp Text', type=SqlType.text(), nullability=NOT_NULLABLE), TableDefinition.Column(name='Ship Mode', type=SqlType.text(), nullability=NOT_NULLABLE), TableDefinition.Column(name='Ship Priority Text', type=SqlType.text(), nullability=NOT_NULLABLE)] # Column 'Order Id' is inserted into "Extract"."Extract" as-is # Column 'Ship Timestamp' in "Extract"."Extract" of timestamp type is computed from Column 'Ship Timestamp Text' of text type using 'to_timestamp()' # Column 'Ship Mode' is inserted into "Extract"."Extract" as-is # Column 'Ship Priority' is "Extract"."Extract" of integer type is computed from Colum 'Ship Priority Text' of text type using 'CASE' statement shipPriorityAsIntCaseExpression = f'CASE {escape_name("Ship Priority Text")} ' \ f'WHEN {escape_string_literal("Urgent")} THEN 1 ' \ f'WHEN {escape_string_literal("Medium")} THEN 2 ' \ f'WHEN {escape_string_literal("Low")} THEN 3 END' column_mappings = [ 'Order ID', Inserter.ColumnMapping( 'Ship Timestamp', f'to_timestamp({escape_name("Ship Timestamp Text")}, {escape_string_literal("YYYY-MM-DD HH24:MI:SS")})'), 'Ship Mode', Inserter.ColumnMapping('Ship Priority', shipPriorityAsIntCaseExpression) ] # Data to be inserted data_to_insert = [ [399, '2012-09-13 10:00:00', 'Express Class', 'Urgent'], [530, '2012-07-12 14:00:00', 'Standard Class', 'Low'] ] # Insert data into "Extract"."Extract" table with expressions with Inserter(connection, extract_table, column_mappings, inserter_definition=inserter_definition) as inserter: inserter.add_rows(rows=data_to_insert) inserter.execute() print("The data was added to the table.") print("The connection to the Hyper file has been closed.") print("The Hyper process has been shut down.") if __name__ == '__main__': try: run_insert_data_with_expressions() except HyperException as ex: print(ex) exit(1)
[ 2, 16529, 32501, 198, 2, 198, 2, 770, 2393, 318, 262, 33696, 3119, 286, 8655, 559, 10442, 290, 318, 6861, 198, 2, 416, 6823, 21216, 290, 584, 9723, 471, 13, 50, 13, 290, 3230, 3657, 290, 198, 2, 6647, 13, 198, 2, 198, 2, 921, ...
2.653537
2,361
import boto3 import argparse import os,sys
[ 11748, 275, 2069, 18, 198, 11748, 1822, 29572, 220, 198, 11748, 28686, 11, 17597, 198 ]
2.933333
15
import numpy as np import matplotlib.pyplot as plt import seaborn as sns from sklearn.datasets import make_blobs from sklearn.mixture import GaussianMixture from sklearn.cluster import KMeans from matplotlib.patches import Ellipse # For reproducibility np.random.seed(1000) nb_samples = 300 nb_centers = 2 if __name__ == '__main__': # Create the dataset X, Y = make_blobs(n_samples=nb_samples, n_features=2, center_box=[-1, 1], centers=nb_centers, cluster_std=[1.0, 0.6], random_state=1000) # Show the dataset sns.set() fig, ax = plt.subplots(figsize=(15, 9)) ax.scatter(X[:, 0], X[:, 1], s=120) ax.set_xlabel(r'$x_0$', fontsize=14) ax.set_ylabel(r'$x_1$', fontsize=14) plt.show() # Train the model gm = GaussianMixture(n_components=2, random_state=1000) gm.fit(X) Y_pred = gm.fit_predict(X) print('Means: \n{}'.format(gm.means_)) print('Covariance matrices: \n{}'.format(gm.covariances_)) print('Weights: \n{}'.format(gm.weights_)) m1 = gm.means_[0] m2 = gm.means_[1] c1 = gm.covariances_[0] c2 = gm.covariances_[1] we1 = 1 + gm.weights_[0] we2 = 1 + gm.weights_[1] # Eigendecompose the covariances w1, v1 = np.linalg.eigh(c1) w2, v2 = np.linalg.eigh(c2) nv1 = v1 / np.linalg.norm(v1) nv2 = v2 / np.linalg.norm(v2) print('Eigenvalues 1: \n{}'.format(w1)) print('Eigenvectors 1: \n{}'.format(nv1)) print('Eigenvalues 2: \n{}'.format(w2)) print('Eigenvectors 2: \n{}'.format(nv2)) a1 = np.arccos(np.dot(nv1[:, 1], [1.0, 0.0]) / np.linalg.norm(nv1[:, 1])) * 180.0 / np.pi a2 = np.arccos(np.dot(nv2[:, 1], [1.0, 0.0]) / np.linalg.norm(nv2[:, 1])) * 180.0 / np.pi # Perform K-Means clustering km = KMeans(n_clusters=2, random_state=1000) km.fit(X) Y_pred_km = km.predict(X) # Show the comparison of the results fig, ax = plt.subplots(1, 2, figsize=(22, 9), sharey=True) ax[0].scatter(X[Y_pred == 0, 0], X[Y_pred == 0, 1], s=80, marker='o', label='Gaussian 1') ax[0].scatter(X[Y_pred == 1, 0], X[Y_pred == 1, 1], s=80, marker='d', label='Gaussian 2') g1 = Ellipse(xy=m1, width=w1[1] * 3, height=w1[0] * 3, fill=False, linestyle='dashed', angle=a1, color='black', linewidth=1) g1_1 = Ellipse(xy=m1, width=w1[1] * 2, height=w1[0] * 2, fill=False, linestyle='dashed', angle=a1, color='black', linewidth=2) g1_2 = Ellipse(xy=m1, width=w1[1] * 1.4, height=w1[0] * 1.4, fill=False, linestyle='dashed', angle=a1, color='black', linewidth=3) g2 = Ellipse(xy=m2, width=w2[1] * 3, height=w2[0] * 3, fill=False, linestyle='dashed', angle=a2, color='black', linewidth=1) g2_1 = Ellipse(xy=m2, width=w2[1] * 2, height=w2[0] * 2, fill=False, linestyle='dashed', angle=a2, color='black', linewidth=2) g2_2 = Ellipse(xy=m2, width=w2[1] * 1.4, height=w2[0] * 1.4, fill=False, linestyle='dashed', angle=a2, color='black', linewidth=3) ax[0].set_xlabel(r'$x_0$', fontsize=16) ax[0].set_ylabel(r'$x_1$', fontsize=16) ax[0].add_artist(g1) ax[0].add_artist(g1_1) ax[0].add_artist(g1_2) ax[0].add_artist(g2) ax[0].add_artist(g2_1) ax[0].add_artist(g2_2) ax[0].set_title('Gaussian Mixture', fontsize=16) ax[0].legend(fontsize=16) ax[1].scatter(X[Y_pred_km == 0, 0], X[Y_pred_km == 0, 1], s=80, marker='o', label='Cluster 1') ax[1].scatter(X[Y_pred_km == 1, 0], X[Y_pred_km == 1, 1], s=80, marker='d', label='Cluster 2') ax[1].set_xlabel(r'$x_0$', fontsize=16) ax[1].set_title('K-Means', fontsize=16) ax[1].legend(fontsize=16) # Predict the probability of some sample points print('P([0, -2]=G1) = {:.3f} and P([0, -2]=G2) = {:.3f}'.format(*list(gm.predict_proba([[0.0, -2.0]]).squeeze()))) print('P([1, -1]=G1) = {:.3f} and P([1, -1]=G2) = {:.3f}'.format(*list(gm.predict_proba([[1.0, -1.0]]).squeeze()))) print('P([1, 0]=G1) = {:.3f} and P([1, 0]=G2) = {:.3f}'.format(*list(gm.predict_proba([[1.0, 0.0]]).squeeze()))) plt.show() # Compute AICs, BICs, and log-likelihood n_max_components = 20 aics = [] bics = [] log_likelihoods = [] for n in range(1, n_max_components + 1): gm = GaussianMixture(n_components=n, random_state=1000) gm.fit(X) aics.append(gm.aic(X)) bics.append(gm.bic(X)) log_likelihoods.append(gm.score(X) * nb_samples) # Show the results fig, ax = plt.subplots(1, 3, figsize=(20, 6)) ax[0].plot(range(1, n_max_components + 1), aics) ax[0].set_xticks(range(1, n_max_components + 1)) ax[0].set_xlabel('Number of Gaussians', fontsize=14) ax[0].set_title('AIC', fontsize=14) ax[1].plot(range(1, n_max_components + 1), bics) ax[1].set_xticks(range(1, n_max_components + 1)) ax[1].set_xlabel('Number of Gaussians', fontsize=14) ax[1].set_title('BIC', fontsize=14) ax[2].plot(range(1, n_max_components + 1), log_likelihoods) ax[2].set_xticks(range(1, n_max_components + 1)) ax[2].set_xlabel('Number of Gaussians', fontsize=14) ax[2].set_title('Log-likelihood', fontsize=14) plt.show()
[ 11748, 299, 32152, 355, 45941, 198, 11748, 2603, 29487, 8019, 13, 9078, 29487, 355, 458, 83, 198, 11748, 384, 397, 1211, 355, 3013, 82, 198, 198, 6738, 1341, 35720, 13, 19608, 292, 1039, 1330, 787, 62, 2436, 8158, 198, 6738, 1341, 357...
1.976235
2,651
__all__ = [ 'DEFAULT_FILTERS', 'DEFAULT_XAR_FILTERS', 'merge_image', ] import contextlib import logging import tempfile from pathlib import Path from g1 import scripts from g1.containers import models from g1.containers import scripts as ctr_scripts from . import utils LOG = logging.getLogger(__name__) DEFAULT_FILTERS = ( # Do not leak any source codes to the application image. # Keep drydock path in sync with //bases:build. ('exclude', '/home/plumber/drydock'), ('exclude', '/home/plumber/.gradle'), ('exclude', '/home/plumber/.gsutil'), ('exclude', '/home/plumber/.python_history'), ('exclude', '/home/plumber/.vpython_cipd_cache'), ('exclude', '/home/plumber/.vpython-root'), ('exclude', '/home/plumber/.wget-hsts'), ('exclude', '/root/.cache'), ('exclude', '/usr/src'), # Include only relevant files under /etc. ('include', '/etc/'), # We use distro java at the moment. ('include', '/etc/alternatives/'), ('include', '/etc/alternatives/java'), ('include', '/etc/java*'), ('include', '/etc/java*/**'), ('include', '/etc/group'), ('include', '/etc/group-'), ('include', '/etc/gshadow'), ('include', '/etc/gshadow-'), ('include', '/etc/inputrc'), ('include', '/etc/ld.so.cache'), ('include', '/etc/passwd'), ('include', '/etc/passwd-'), ('include', '/etc/shadow'), ('include', '/etc/shadow-'), ('include', '/etc/ssl/'), ('include', '/etc/ssl/**'), ('include', '/etc/subgid'), ('include', '/etc/subgid-'), ('include', '/etc/subuid'), ('include', '/etc/subuid-'), ('include', '/etc/sudoers.d/'), ('include', '/etc/sudoers.d/**'), ('exclude', '/etc/**'), # Exclude distro binaries from application image (note that base # image includes a base set of distro binaries). ('exclude', '/bin'), ('exclude', '/sbin'), # We use distro java at the moment. ('include', '/usr/bin/'), ('include', '/usr/bin/java'), ('exclude', '/usr/bin/**'), ('exclude', '/usr/bin'), ('exclude', '/usr/sbin'), # Exclude headers. ('exclude', '/usr/include'), ('exclude', '/usr/local/include'), # Exclude distro systemd files. ('exclude', '/lib/systemd'), ('exclude', '/usr/lib/systemd'), # In general, don't exclude distro libraries since we might depend # on them, except these libraries. ('exclude', '/usr/lib/apt'), ('exclude', '/usr/lib/gcc'), ('exclude', '/usr/lib/git-core'), ('exclude', '/usr/lib/python*'), ('exclude', '/usr/lib/**/*perl*'), # Exclude these to save more space. ('exclude', '/usr/share/**'), ('exclude', '/var/**'), ) # For XAR images, we only include a few selected directories, and # exclude everything else. # # To support Python, we include our code under /usr/local in the XAR # image (like our pod image). An alternative is to use venv to install # our codebase, but this seems to be too much effort; so we do not take # this approach for now. # # We explicitly remove CPython binaries from /usr/local/bin so that the # `env` command will not (and should not) resolve to them. # # We do not include /usr/bin/java (symlink to /etc/alternatives) for # now. If you want to use Java, you have to directly invoke it under # /usr/lib/jvm/... DEFAULT_XAR_FILTERS = ( ('include', '/usr/'), ('include', '/usr/lib/'), ('exclude', '/usr/lib/**/*perl*'), ('include', '/usr/lib/jvm/'), ('include', '/usr/lib/jvm/**'), ('include', '/usr/lib/x86_64-linux-gnu/'), ('include', '/usr/lib/x86_64-linux-gnu/**'), ('include', '/usr/local/'), ('include', '/usr/local/bin/'), ('exclude', '/usr/local/bin/python*'), ('include', '/usr/local/bin/*'), ('include', '/usr/local/lib/'), ('include', '/usr/local/lib/**'), ('exclude', '**'), )
[ 834, 439, 834, 796, 685, 198, 220, 220, 220, 705, 7206, 38865, 62, 46700, 51, 4877, 3256, 198, 220, 220, 220, 705, 7206, 38865, 62, 55, 1503, 62, 46700, 51, 4877, 3256, 198, 220, 220, 220, 705, 647, 469, 62, 9060, 3256, 198, 60, ...
2.639671
1,457
""" Space : O(1) Time : O(n) """
[ 37811, 198, 14106, 220, 220, 1058, 440, 7, 16, 8, 198, 7575, 220, 220, 220, 1058, 440, 7, 77, 8, 198, 37811, 198, 220, 220, 220, 220, 220, 220, 220, 220 ]
1.483871
31
# Created by Giuseppe Paolo # Date: 27/08/2020 from gym.envs.registration import register register( id='CollectBall-v0', entry_point='gym_collectball.envs:CollectBall' )
[ 2, 15622, 416, 8118, 1904, 27768, 11243, 14057, 220, 198, 2, 7536, 25, 2681, 14, 2919, 14, 42334, 198, 6738, 11550, 13, 268, 14259, 13, 2301, 33397, 1330, 7881, 198, 198, 30238, 7, 198, 220, 4686, 11639, 31337, 23410, 12, 85, 15, 32...
2.777778
63
import logging import threading from concurrent.futures import ThreadPoolExecutor from multiprocessing import Queue from foreverbull.worker.worker import WorkerHandler from foreverbull_core.models.finance import EndOfDay from foreverbull_core.models.socket import Request from foreverbull_core.models.worker import Instance from foreverbull_core.socket.client import ContextClient, SocketClient from foreverbull_core.socket.exceptions import SocketClosed, SocketTimeout from foreverbull_core.socket.router import MessageRouter
[ 11748, 18931, 198, 11748, 4704, 278, 198, 6738, 24580, 13, 69, 315, 942, 1330, 14122, 27201, 23002, 38409, 198, 6738, 18540, 305, 919, 278, 1330, 4670, 518, 198, 198, 6738, 8097, 16308, 13, 28816, 13, 28816, 1330, 35412, 25060, 198, 673...
4.100775
129
#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for `AffiliationSearch` module.""" from collections import namedtuple from nose.tools import assert_equal, assert_true import scopus s = scopus.AffiliationSearch('af-id(60021784)', refresh=True)
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 37811, 51, 3558, 329, 4600, 35191, 15547, 18243, 63, 8265, 526, 15931, 198, 198, 6738, 17268, 1330, 3706, 83, 29291, 19...
2.931034
87
''' ''' import struct
[ 7061, 6, 198, 7061, 6, 198, 11748, 2878, 198 ]
2.444444
9
#from math import hypot import math print('='*5, 'Clculo tringulo retngulo', '='*5) cat_op = float(input('Digite o comprimento do cateto oposto: ')) cat_adj = float(input('Digite o comprimento do cateto adjacente: ')) hip = math.hypot(cat_op, cat_adj) print(f'O comprimento da hipotenusa do tringulo retngulo, cujos catetos so {cat_op:.2f} e {cat_adj:.2f} {hip:.2f}.')
[ 2, 6738, 10688, 1330, 8813, 198, 11748, 10688, 198, 4798, 10786, 11639, 9, 20, 11, 705, 2601, 3129, 78, 491, 278, 43348, 1005, 782, 43348, 3256, 705, 11639, 9, 20, 8, 198, 9246, 62, 404, 796, 12178, 7, 15414, 10786, 19511, 578, 267,...
2.418301
153
from random import randint numeros = (randint(0, 10), randint(0, 10), randint(0, 10), randint(0, 10), randint(0, 10)) print(f'Os cinco nmeros so: ', end='') for n in numeros: # Exibe nmeros sorteados print(n, end=' ') print(f'\nO MAIOR nmero {max(numeros)}') print(f'O MENOR nmero {min(numeros)}')
[ 6738, 4738, 1330, 43720, 600, 198, 77, 6975, 418, 796, 357, 25192, 600, 7, 15, 11, 838, 828, 43720, 600, 7, 15, 11, 838, 828, 43720, 600, 7, 15, 11, 838, 828, 43720, 600, 7, 15, 11, 838, 828, 43720, 600, 7, 15, 11, 838, 4008, ...
2.226277
137
#!/usr/bin/python3 # -*- coding: utf-8 -*- """ Maxwell Macros v0.7.0 by Sam Neurohack from /team/laser Launchpad set a "current path" """ from OSC3 import OSCServer, OSCClient, OSCMessage import time import numpy as np import rtmidi from rtmidi.midiutil import open_midiinput from threading import Thread from rtmidi.midiconstants import (CHANNEL_PRESSURE, CONTROLLER_CHANGE, NOTE_ON, NOTE_OFF, PITCH_BEND, POLY_PRESSURE, PROGRAM_CHANGE) import os, json import midi3 if os.uname()[1]=='raspberrypi': pass port = 8090 ip = "127.0.0.1" mididest = 'Session 1' djdest = 'Port' midichannel = 1 computerIP = ['127.0.0.1','192.168.2.95','192.168.2.52','127.0.0.1', '127.0.0.1','127.0.0.1','127.0.0.1','127.0.0.1'] computer = 0 # store current value for computer 1 cc1 =[0]*140 current = { "patch": 0, "prefixLeft": "/osc/left/X", "prefixRight": "/osc/right/X", "suffix": "/amp", "path": "/osc/left/X/curvetype", "pathLeft": "/osc/left/X/curvetype", "pathRight": "/osc/left/X/curvetype", "previousmacro": -1, "LeftCurveType": 0, "lfo": 1, "rotator": 1, "translator": 1 } specificvalues = { # Sine: 0-32, Tri: 33-64, Square: 65-96, Line: 96-127 "curvetype": {"sin": 0, "saw": 33, "squ": 95, "lin": 127}, "freqlimit": {"1": 0, "4": 26, "16": 52, "32": 80, "127": 127}, "amptype": {"constant": 0, "lfo1": 33, "lfo2": 95, "lfo3": 127}, "phasemodtype": {"linear": 0,"sin": 90}, "phaseoffsettype": {"manual": 0, "lfo1": 33, "lfo2": 95, "lfo3": 127}, "ampoffsettype": { "manual": 0, "lfo1": 33, "lfo2": 95, "lfo3": 127}, "inversion": {"off": 0, "on": 127}, "colortype": {"solid": 0, "lfo": 127}, "modtype": {"sin": 0,"linear": 127}, "switch": {"off": 0,"on": 127}, "operation": {"+": 0, "-": 50, "*": 127} } # # Maxwell CCs # # /cc cc number value # Jog send 127 to left and 1 to right # increase or decrease current CC defined in current path # Jog send 127 to left and 1 to right # increase or decrease current CC defined in current path # Parameter change : to left 127 / to right 0 or 1 # Change type : trig with only with midi value 127 on a CC event # Left cue button 127 = on 0 = off # Right cue button 127 = on 0 = off # increase/decrease a CC
[ 2, 48443, 14629, 14, 8800, 14, 29412, 18, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 198, 37811, 198, 198, 11518, 4053, 4100, 4951, 198, 85, 15, 13, 22, 13, 15, 198, 198, 1525, 3409, 13782, 31153, 220, 1...
2.301397
1,002
import numpy as np import matplotlib.pyplot as plt from pylab import * #import pyspeckit as ps from scipy import io from scipy import stats from scipy.optimize import leastsq #from lmfit import minimize, Parameters, Parameter, report_fit #from lmfit.models import GaussianModel import scipy.optimize as optimization import matplotlib.ticker as ticker import cmath as math import pickle import iminuit import astropy.io.fits as pf import os,glob #import string,math,sys,fileinput,glob,time #load modules #from pylab import * import subprocess as sub import re #from plot_components import get_ellipse_coords, ellipse_axis import urllib2 from astropy import units as u #from astropy.coordinates import SkyCoord #FUNCTION TO READ THE HEADER AND TAKE IMPORTANT PARAMETERS AS #cell #BMAJ, BMIN, BPA #date, freq and epoch def natural_keys(text): ''' alist.sort(key=natural_keys) sorts in human order http://nedbatchelder.com/blog/200712/human_sorting.html (See Toothy's implementation in the comments) ''' return [ atoi(c) for c in re.split('(\d+)', text) ] def get_ellipse_coords(a=0.0, b=0.0, x=0.0, y=0.0, angle=0.0, k=2): """ Draws an ellipse using (360*k + 1) discrete points; based on pseudo code given at http://en.wikipedia.org/wiki/Ellipse k = 1 means 361 points (degree by degree) a = major axis distance, b = minor axis distance, x = offset along the x-axis y = offset along the y-axis angle = clockwise rotation [in degrees] of the ellipse; * angle=0 : the ellipse is aligned with the positive x-axis * angle=30 : rotated 30 degrees clockwise from positive x-axis """ pts = np.zeros((360*k+1, 2)) beta = -angle * np.pi/180.0 sin_beta = np.sin(beta) cos_beta = np.cos(beta) alpha = np.radians(np.r_[0.:360.:1j*(360*k+1)]) sin_alpha = np.sin(alpha) cos_alpha = np.cos(alpha) pts[:, 0] = x + (a * cos_alpha * cos_beta - b * sin_alpha * sin_beta) pts[:, 1] = y + (a * cos_alpha * sin_beta + b * sin_alpha * cos_beta) return pts
[ 11748, 299, 32152, 355, 45941, 198, 11748, 2603, 29487, 8019, 13, 9078, 29487, 355, 458, 83, 198, 6738, 279, 2645, 397, 1330, 1635, 198, 2, 11748, 279, 893, 431, 694, 270, 355, 26692, 198, 6738, 629, 541, 88, 1330, 33245, 198, 6738, ...
2.581773
801
# Copyright 2013 Daniel Narvaez # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import shutil from distutils.sysconfig import parse_makefile from osbuild import config from osbuild import command _dist_builders = {} _dist_builders['autotools'] = _autotools_dist_builder
[ 2, 15069, 2211, 7806, 13596, 33353, 89, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, 351, 262, 13789, 13, 198, 2, 921...
3.682243
214
# node to capture and communicate game status # written by Russell on 5/18
[ 2, 10139, 284, 8006, 290, 10996, 983, 3722, 198, 2, 3194, 416, 11563, 319, 642, 14, 1507, 628 ]
4.222222
18
""" """ import sqlite3 import numpy as np import math
[ 37811, 220, 198, 198, 37811, 198, 11748, 44161, 578, 18, 198, 11748, 299, 32152, 355, 45941, 220, 198, 11748, 10688, 220, 628, 628, 220, 220, 220, 220, 628, 198 ]
2.344828
29
from .lattice import skew_normal_density, center_density,\ state_prices_from_offsets, densities_and_coefs_from_offsets, winner_of_many,\ expected_payoff, densities_from_offsets, implicit_state_prices, densitiesPlot import pandas as pd # todo: get rid of this dependency import numpy as np RACING_L = 500 RACING_UNIT = 0.1 RACING_SCALE = 1.0 RACING_A = 1.0 def make_nan_2000( x ) : """ Longshots """ if pd.isnull( x ): return 2000. else: return x def normalize( p ): """ Naive renormalization of probabilities """ S = sum( p ) return [ pr/S for pr in p ] def prices_from_dividends( dividends ): """ Risk neutral probabilities using naive renormalization """ return normalize( [ 1. / make_nan_2000(x) for x in dividends ] ) def dividends_from_prices( prices ): """ Australian style dividends """ return [ 1./d for d in normalize( prices ) ] def racing_density( loc ): """ A rough and ready distribution of performance distributions for one round """ density = skew_normal_density( L=RACING_L, unit=RACING_UNIT, loc=0, scale=RACING_SCALE, a=RACING_A ) return center_density( density ) def dividend_implied_ability( dividends, density ): """ Infer risk-neutral implied_ability from Australian style dividends :param dividends: [ 7.6, 12.0, ... ] :return: [ float ] Implied ability """ state_prices = prices_from_dividends( dividends ) implied_offsets_guess = [ 0 for _ in state_prices] L = len( density )/2 offset_samples = list( xrange( -L/4, L/4 ))[::-1] ability = implied_ability( prices = state_prices, density = density, \ offset_samples = offset_samples, implied_offsets_guess = implied_offsets_guess, nIter = 3) return ability def ability_implied_dividends( ability, density ): """ Return betfair style prices :param ability: :return: [ 7.6, 12.3, ... ] """ state_prices = state_prices_from_offsets( density=density, offsets = ability) return [ 1./sp for sp in state_prices ] def implied_ability( prices, density, offset_samples = None, implied_offsets_guess = None, nIter = 3, verbose = False, visualize = False): """ Finds location translations of a fixed density so as to replicate given state prices for winning """ L = len( density ) if offset_samples is None: offset_samples = list( xrange( -L/4, L/4 ))[::-1] # offset_samples should be descending TODO: add check for this else: _assert_descending( offset_samples ) if implied_offsets_guess is None: implied_offsets_guess = range( len(prices) ) # First guess at densities densities, coefs = densities_and_coefs_from_offsets( density, implied_offsets_guess ) densityAllGuess, multiplicityAllGuess = winner_of_many( densities ) densityAll = densityAllGuess.copy() multiplicityAll = multiplicityAllGuess.copy() guess_prices = [ np.sum( expected_payoff( density, densityAll, multiplicityAll, cdf = None, cdfAll = None)) for density in densities] for _ in xrange( nIter ): if visualize: # temporary hack to check progress of optimization densitiesPlot( [ densityAll] + densities , unit=0.1 ) implied_prices = implicit_state_prices( density=density, densityAll=densityAll, multiplicityAll = multiplicityAll, offsets=offset_samples ) implied_offsets = np.interp( prices, implied_prices, offset_samples ) densities = densities_from_offsets( density, implied_offsets ) densityAll, multiplicityAll = winner_of_many( densities ) guess_prices = [ np.sum(expected_payoff(density, densityAll, multiplicityAll, cdf = None, cdfAll = None)) for density in densities ] approx_prices = [ np.round( pri, 3 ) for pri in prices] approx_guesses = [ np.round( pri, 3 ) for pri in guess_prices] if verbose: print zip( approx_prices, approx_guesses )[:5] return implied_offsets
[ 6738, 764, 75, 1078, 501, 1330, 43370, 62, 11265, 62, 43337, 11, 3641, 62, 43337, 11, 59, 198, 220, 220, 220, 1181, 62, 1050, 1063, 62, 6738, 62, 8210, 1039, 11, 29509, 871, 62, 392, 62, 1073, 891, 82, 62, 6738, 62, 8210, 1039, ...
2.505376
1,674
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import re
[ 2, 15069, 2211, 12, 1238, 2481, 13914, 45036, 3549, 2351, 4765, 11, 11419, 290, 584, 198, 2, 1338, 441, 4935, 34152, 13, 4091, 262, 1353, 12, 5715, 27975, 38162, 9947, 2393, 329, 3307, 13, 198, 2, 198, 2, 30628, 55, 12, 34156, 12, ...
3.42623
61
from aiohttp_micro.web.handlers.openapi import PayloadSchema, ResponseSchema from marshmallow import fields, post_load, Schema from wallet.core.entities.categories import CategoryFilters from wallet.web.schemas.abc import CollectionFiltersSchema
[ 6738, 257, 952, 4023, 62, 24055, 13, 12384, 13, 4993, 8116, 13, 9654, 15042, 1330, 7119, 2220, 27054, 2611, 11, 18261, 27054, 2611, 198, 6738, 22397, 42725, 1330, 7032, 11, 1281, 62, 2220, 11, 10011, 2611, 198, 198, 6738, 13008, 13, 7...
3.5
72
SConscript('Mkdocs/Common/SConscript.py') SConscript('Pandoc/Common/SConscript.py') SConscript('Doxygen/Common/SConscript.py')
[ 6173, 684, 6519, 10786, 44, 74, 31628, 14, 17227, 14, 6173, 684, 6519, 13, 9078, 11537, 198, 6173, 684, 6519, 10786, 47206, 420, 14, 17227, 14, 6173, 684, 6519, 13, 9078, 11537, 198, 6173, 684, 6519, 10786, 35, 23536, 5235, 14, 17227,...
2.54
50
#!/usr/bin/env python # -*- coding:utf-8 -*- # Author: Donny You(youansheng@gmail.com) # Some methods used by main methods. from __future__ import absolute_import from __future__ import division from __future__ import print_function import math import os from collections import OrderedDict import torch import torch.nn as nn from torch.nn.parallel.scatter_gather import gather as torch_gather from lib.extensions.parallel.data_parallel import DataParallelModel from lib.utils.tools.logger import Logger as Log from lib.utils.distributed import get_rank, is_distributed
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 40477, 12, 23, 532, 9, 12, 198, 2, 6434, 25, 2094, 3281, 921, 7, 5832, 504, 31753, 31, 14816, 13, 785, 8, 198, 2, 2773, 5050, 973, 416, 1388, 5050, 13...
3.32948
173
import numpy as np import matplotlib as mpl import matplotlib.pyplot as plt import pickle import scipy.stats as stats data_path = '../data/het_average.dat' output_dir = '../figures/' # Configure matplotlib environment helvetica_scale_factor = 0.92 # rescale Helvetica to other fonts of same size mpl.rcParams['font.size'] = 10 * helvetica_scale_factor mpl.rcParams['font.family'] = 'sans-serif' mpl.rcParams['font.sans-serif'] = 'Helvetica Neue' mpl.rcParams['axes.titlesize'] = 12 * helvetica_scale_factor single_col_width = 3.43 # = 8.7 cm double_col_width = 7.01 # = 17.8 cm if __name__ == '__main__': with open(data_path, 'rb') as f_in: het_averages = pickle.load(f_in) plot_het_comparison(het_averages) ne_global = fit_Ne(het_averages, averaging='global') ne_local = fit_Ne(het_averages, averaging='local') print('Ne (global averaging): ', ne_global) print('Ne (local averaging): ', ne_local) print('Ne difference: ', 100 * (ne_global - ne_local) / ne_global, '%')
[ 11748, 299, 32152, 355, 45941, 198, 11748, 2603, 29487, 8019, 355, 285, 489, 198, 11748, 2603, 29487, 8019, 13, 9078, 29487, 355, 458, 83, 198, 11748, 2298, 293, 198, 11748, 629, 541, 88, 13, 34242, 355, 9756, 198, 198, 7890, 62, 6978...
2.551637
397
from random import random, choice, seed, shuffle, randint from math import ceil import copy target = [ 2, 2, 3, 1, 4, 5 ] consonants_base = [ 'p', 't', 'k', 'm', 'n' ] vowels = [ [ 'a', 'i', 'u' ], [ 'a', 'i', 'u', 'e', 'o' ], [ 'a', 'A', 'i', 'I', 'u', 'U', 'e', 'E', 'o', 'O' ] ] consonants_extra = [ 'b', 'd', 'j', 's', 'z', 'y', 'q', 'G', '?', 'N', 'r', 'f', 'v', 'T', 'D', 'S', 'Z', 'x', 'h', 'w', 'l', 'C' ] sibilants = [ ['s',], [ 's', 'S' ], ['s', 'S', 'f'] ] liquids = [ ['r'], ['l'], ['r','l'], ['w','y'], ['r','l','w','y'] ] orthography1 = { 'name':'nordic', 'j':'dz', 'y':'j', 'T':'th', 'D':'', 'S':'sh', 'Z':'zh', 'N':'ng', '?':"'", 'G':'q', 'C':'ch', 'A':'', 'E':'', 'I':'', 'O':'', 'U':'' } orthography2 = { 'name':'czech', 'T':'th', 'D':'th', 'S':'', 'Z':'', 'C':'', 'G':'q', 'N':'ng', '?':'-', 'A':'', 'E':'', 'I':'', 'O':'', 'U':'' } orthography3 = { 'name':'french', 'T':'th', 'D':'th', 'S':'ch', 'G':'gh', 'C':'tc', '?':"'", 'N':'ng', 'Z':'z', 'k':'c', 'A':'', 'E':'', 'I':'', 'O':'', 'U':'' } orthography4 = { 'name':'mexica', 'k':'c', 'G':'gh', 'N':'ng', 'T':'th', 'D':'th', 'S':'x', 'C':'ch', '?':"'", 'Z':'zh', 'A':'', 'E':'', 'I':'', 'O':'', 'U':'' } orthographies = ( orthography1, orthography2, orthography3, orthography4 ) syllables = ( [ 'CV', ], [ 'CV', 'V' ], [ 'CV', 'CVC' ], [ 'CV', 'CVC', 'V' ], [ 'CVC', ], [ 'CVC', 'CRVC', 'CV', 'CRV' ], [ 'CVC', 'CRVC', 'CVRC', 'CV', 'CRV' ], [ 'CVC', 'CRVC', 'CVCC', 'CRVCC', 'CV', 'CRV' ], [ 'CVC', 'CRVC', 'CVRC', 'CVCC', 'CRVCC', 'CV', 'CRV' ], [ 'CV', 'CVC', 'SCV', 'SCVC' ], [ 'CVC', 'CVCC', 'SVC', 'SVCC', 'CV', 'SCV' ], [ 'CVC', 'CVCC', 'CRVC', 'SCVC', 'SCRVC', 'CV', 'CRV', 'SCV', 'SCRV' ] ) government = [ 'Republic of ', 'Kingdom of ', 'Confederacy of ', 'Satrapy of ','Empire of ' ] ''' lang1 = language() for j in range(10): print('Language '+str(j+1)) for i in range(5): word = lang1.cityname() print(lang1.orthographic(word).title()) lang1 = lang1.derive() print(' ') '''
[ 6738, 4738, 1330, 4738, 11, 3572, 11, 9403, 11, 36273, 11, 43720, 600, 198, 6738, 10688, 1330, 2906, 346, 198, 11748, 4866, 198, 198, 16793, 796, 685, 362, 11, 362, 11, 513, 11, 352, 11, 604, 11, 642, 2361, 198, 198, 5936, 261, 11...
1.794643
1,232
from django import template from week.models import SidebarContentPage,SidebarImagePage register = template.Library()
[ 6738, 42625, 14208, 1330, 11055, 198, 6738, 1285, 13, 27530, 1330, 12075, 5657, 19746, 9876, 11, 24819, 5657, 5159, 9876, 198, 198, 30238, 796, 11055, 13, 23377, 3419, 198 ]
4.103448
29
#------------------------------------------------------------------------------- # A mock physics package to mess around with the CRKSPH corrections. #------------------------------------------------------------------------------- from Spheral1d import *
[ 2, 10097, 24305, 198, 2, 317, 15290, 11887, 5301, 284, 2085, 1088, 351, 262, 8740, 42, 4303, 39, 26251, 13, 198, 2, 10097, 24305, 198, 6738, 1338, 372, 282, 16, 67, 1330, 1635, 628 ]
7.529412
34
import os import requests import time import uuid import configparser import datetime import fbchat import re from fbchat import Client, ImageAttachment from fbchat import FBchatException from pathlib import Path politeness_index = 0.5 # ;) epoch = datetime.datetime(1970, 1, 1) # Hack to get the login to work, see: https://github.com/fbchat-dev/fbchat/issues/615#issuecomment-716089816 fbchat._state.FB_DTSG_REGEX = re.compile(r'"name":"fb_dtsg","value":"(.*?)"') def download_file_from_url(url, target_path): """ Download image from a given URL to a specified target path. :param url: URL of file to download :param target_path: Local target path to save the file :type url: str :type target_path: str """ if url is not None: r = requests.get(url) with open(target_path, 'wb') as f: print('\tDownloading image to {path}'.format(path=target_path)) f.write(r.content) def convert_date_to_epoch(date, as_int=True): """ Convert a given date string to epoch (int in milliseconds) :param date: Date string (preferred format %Y-%m-%d) :param as_int: Return unix timestamp as an integer value, instead of a float :type date: str :type as_int: int :return: int """ try: dt = datetime.datetime.strptime(date, '%Y-%m-%d') res = ((dt - epoch).total_seconds() * 1000.0) # convert to milliseconds return int(res) if as_int else res except ValueError: return None def convert_epoch_to_datetime(timestamp, dt_format='%Y-%m-%d_%H.%M.%S'): """ Convert epoch (unix time in ms) to a datetime string :param timestamp: Unix time in ms :param dt_format: Format of datetime string :type timestamp: str :type dt_format: str :return: """ s = int(timestamp) / 1000.0 dt_str = datetime.datetime.fromtimestamp(s).strftime(dt_format) return dt_str if __name__ == '__main__': config_path = Path('.') / 'config.ini' if os.path.exists(config_path) is False: raise Exception("Please create config.ini under this script's current directory") # Load config file config = configparser.ConfigParser() config.read(config_path) download_path = config.get('Download', 'path') if os.path.exists(download_path) is False: raise Exception("The path specified in download_path does not exist ({path}). Please specify a valid path in " "config.ini".format(path=download_path)) # Initialize FB Client fb_email = config.get('Credentials', 'email') fb_pw = config.get('Credentials', 'password') user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36" fb_client = Client(fb_email, fb_pw, user_agent=user_agent) # Search for latest threads thread_search_limit = int(config.get('Threads', 'search_limit')) thread_search_before = convert_date_to_epoch(config.get('Threads', 'before_date')) if thread_search_before is not None: threads = fb_client.fetchThreadList(limit=thread_search_limit, before=thread_search_before) else: threads = fb_client.fetchThreadList(limit=thread_search_limit) # Find correct thread for given user URL my_thread = None friend_url = config.get('Friend', 'url') for thread in threads: if hasattr(thread, 'url') and (thread.url == friend_url): my_thread = thread break # Get Messages for my_thread if my_thread is not None: thread_message_count = my_thread.message_count thread_message_name = my_thread.name print('Found {count} messages in thread with {friend_name}'.format(count=thread_message_count, friend_name=thread_message_name)) message_before_date = config.get('Messages', 'before_date') message_search_limit = int(config.get('Messages', 'search_limit')) message_search_before = convert_date_to_epoch(message_before_date) if message_search_limit > thread_message_count: message_search_limit = thread_message_count print('\tWarning: Message search limit was greater than the total number of messages in thread.\n') if message_search_before is not None: messages = fb_client.fetchThreadMessages(my_thread.uid, limit=message_search_limit, before=message_search_before) print('Searching for images in the {message_limit} messages sent before {before_date}...'.format( message_limit=message_search_limit, before_date=message_before_date)) else: messages = fb_client.fetchThreadMessages(my_thread.uid, limit=message_search_limit) print('Searching for images in the last {message_limit} messages...'.format( message_limit=message_search_limit)) sender_id = None if config.getboolean('Media', 'sender_only'): sender_id = my_thread.uid print('\tNote: Only images sent by {friend_name} will be downloaded (as specified by sender_only in your ' 'config.ini)'.format(friend_name=thread_message_name)) # Extract Image attachments' full-sized image signed URLs (along with their original file extension) total_count = 0 skip_count = 0 full_images = [] last_message_date = None print('\n') extension_blacklist = str.split(config.get('Media', 'ext_blacklist'), ',') for message in messages: message_datetime = convert_epoch_to_datetime(message.timestamp) if len(message.attachments) > 0: if (sender_id is None) or (sender_id == message.author): for attachment in message.attachments: if isinstance(attachment, ImageAttachment): try: attachment_ext = str.lower(attachment.original_extension) if attachment_ext not in extension_blacklist: full_images.append({ 'extension': attachment_ext, 'timestamp': message_datetime, 'full_url': fb_client.fetchImageUrl(attachment.uid) }) print('+', sep=' ', end='', flush=True) else: skip_count += 1 print('-', sep=' ', end='', flush=True) total_count += 1 except FBchatException: pass # ignore errors last_message_date = message_datetime # Download Full Images if len(full_images) > 0: images_count = len(full_images) print('\n\nFound a total of {total_count} images. Skipped {skip_count} images that had a blacklisted ' 'extension'.format(total_count=total_count, skip_count=skip_count)) print('Attempting to download {count} images...................\n'.format(count=images_count)) for full_image in full_images: friend_name = str.lower(my_thread.name).replace(' ', '_') file_uid = str(uuid.uuid4()) file_ext = full_image['extension'] file_timestamp = full_image['timestamp'] img_url = full_image['full_url'] image_path = ''.join([download_path, '\\', 'fb-image-', file_uid, '-', friend_name, '-', file_timestamp, '.', file_ext]) download_file_from_url(img_url, image_path) # Sleep half a second between file downloads to avoid getting flagged as a bot time.sleep(politeness_index) else: print('No images to download in the last {count} messages'.format(count=message_search_limit)) # Reminder of last message found print('\nLast message scanned for image attachments was dated: {last_message_date}'.format( last_message_date=last_message_date)) else: print('Thread not found for URL provided')
[ 11748, 28686, 198, 11748, 7007, 198, 11748, 640, 198, 11748, 334, 27112, 198, 11748, 4566, 48610, 198, 11748, 4818, 8079, 198, 11748, 277, 65, 17006, 198, 11748, 302, 198, 198, 6738, 277, 65, 17006, 1330, 20985, 11, 7412, 8086, 15520, 1...
2.251465
3,754
from django.contrib.auth.models import AbstractUser from django.db.models import (BooleanField, CASCADE, CharField, FloatField, IntegerField, ManyToManyField, Model, OneToOneField, PositiveSmallIntegerField) from django.contrib.postgres.fields import ArrayField from django.urls import reverse from django.utils.translation import ugettext_lazy as _ from django.contrib.auth.models import User
[ 6738, 42625, 14208, 13, 3642, 822, 13, 18439, 13, 27530, 1330, 27741, 12982, 198, 6738, 42625, 14208, 13, 9945, 13, 27530, 1330, 357, 46120, 13087, 15878, 11, 35106, 34, 19266, 11, 3178, 15878, 11, 48436, 15878, 11, 198, 220, 220, 220, ...
2.645349
172
import os,time ## File Variable (USER INPUT) ## ========================== ## if multiple files are being accessed to create movie... ## ...specify the beginning and ending of the file names... ## ...and the date list text file in the variables below ## Please use True or False to set whether multiple files will be accessed for movie file_is_variable = False ## If file_is_variable = True ## -------------------------- ## make sure to leave trailing slash '/' on 'path_to_files' path_to_files = '/path/to/files/' ## For series of files with similar prefixes (file_part1) and filetypes (file_part2) file_part1 = 'pre.fixes.' file_part2 = '.nc' ## location of file listing (with each entry on a new line) the variable part of the filename dates_list_text_file = '/path/to/file/variable_list.txt' ## If file_is_variable = False ## --------------------------- #file = '/path/to/single/file.nc' file = '/Users/Jon/Documents/other_projects/Aluie/visuals/1-12/mapdraw/sgs.nc' ## Variables (USER INPUT) ## ====================== ## all variable lists must be the same length ## set unused variables equal to '_empty_' ## if variable requires double-quotes on command line include them --> '" ... "' ## ----------------------------------------------------------------------------- data = 'sgsflux' #cannot be '_empty_' lat = 'u_lat' #cannot be '_empty_' lon = 'u_lon' #cannot be '_empty_' depth = 'w_dep,9' #cannot be '_empty_' mask = '-1e33,#000000' maxr = '100' #use for 'max' minr = '-100' #use for 'min' norm = '_empty_' colors = '"0:#0000AA,45:#0000FF,50:#FFFFFF,55:#FF0000,100:#AA0000"' clr_min_max = '_empty_' title = '_empty_' crop = '_empty_' lines = '_empty_' ## Sphere (for mapping onto Earth's spherical representation) ## ---------------------------------------------------------- ## For use of 'sphere' set to True. If not leave False. sphere_mapping = False ## Number of images (must match other variable list lengths from above) sphere_frames = 3 ## Start and stop points of sphere rotation (leave start/stop the same for no rotation in lat/lon) sphere_lon_start = -10 sphere_lon_stop = 10 sphere_lat_start = -10 sphere_lat_stop = 10 ## 'zoom' argument described in README file (leave False if zoom = 1) zoom = 1.5 ## Primary Variable (USER INPUT) ## ============================= ## choose from the variables above ## specify without quotes ## if not a list will only output single result ## -------------------------------------------- primary_variable = file ## Save Location (USER INPUT) ## ========================== ## provide folder location (without filename(s)) ## --------------------------------------------- save = '/Users/Jon/Desktop/' ## Image Filename Prefix (USER INPUT) ## ================================== ## prefix for output filenames before auto-incremented counter ## ----------------------------------------------------------- file_prefix = 'img_' ## Image Counter Start (USER INPUT) ## ================================ ## start of auto-incremented counter ## --------------------------------- count_start = 0 ## Image File Type (USER INPUT) ## ============================ ## ex: '.png' or '.jpg' ## -------------------- img_type = '.png' ## Display Toggle (USER INPUT) ## ========================== ## toggle if each image displays in the loop ## use 'yes' or 'no' to control display preference ## ----------------------------------------------- display = 'no' # # # # # # # # # # # # # # # # # # # # # # # # # # ---- NO USER INPUTS AFTER THIS POINT ---- # # # # # # # # # # # # # # # # # # # # # # # # # # ## If 'file' is variable this establishes list of files to loop through (Do Not Alter) ## =================================================================================== if file_is_variable: file1 = [] file0 = open(dates_list_text_file,'r').read().splitlines() for line in file0: file1.append(str(path_to_files) + str(file_part1) + str(line) + str(file_part2)) file = file1 primary_variable = file ## Parsing of 'sphere' rotation inputs (Do Not Alter) ## ================================================== if sphere_mapping: lon_step = ( sphere_lon_stop - sphere_lon_start ) / ( sphere_frames - 1 ) lat_step = ( sphere_lat_stop - sphere_lat_start ) / ( sphere_frames - 1 ) sphere = [] for i in range(sphere_frames): sphere.append(str(sphere_lon_start + lon_step * i)+','+str(sphere_lat_start + lat_step * i)) primary_variable = sphere ## Defining & Executing Command Expression (Do Not Alter) ## ====================================================== displayx = 'display ' + display command = displayx if title != '_empty_': titlex = ' title ' + str(title) command = command + titlex if lines != '_empty_': linesx = ' lines ' + str(lines) command = command + linesx if type(primary_variable) is list: loop_len = len(primary_variable) else: loop_len = 1 for i in range(loop_len): savex = ' save ' + str(save) + str(file_prefix) + str(i + int(count_start)) + str(img_type) command = command + savex if type(file) is list: filei = file[i] else: filei = file if i != '_empty_': filex = ' file ' + str(filei) command = command + filex if type(data) is list: datai = data[i] else: datai = data if datai != '_empty_': datax = ' data ' + str(datai) command = command + datax if type(lat) is list: lati = lat[i] else: lati = lat if lati != '_empty_': latx = ' lat ' + str(lati) command = command + latx if type(lon) is list: loni = lon[i] else: loni = lon if loni != '_empty_': lonx = ' lon ' + str(loni) command = command + lonx if type(depth) is list: depthi = depth[i] else: depthi = depth if depthi != '_empty_': depthx = ' depth ' + str(depthi) command = command + depthx if type(mask) is list: maski = mask[i] else: maski = mask if maski != '_empty_': maskx = ' mask ' + str(maski) command = command + maskx if type(maxr) is list: maxri = maxr[i] else: maxri = maxr if maxri != '_empty_': maxrx = ' max ' + str(maxri) command = command + maxrx if type(minr) is list: minri = minr[i] else: minri = minr if minri != '_empty_': minrx = ' min ' + str(minri) command = command + minrx if type(norm) is list: normi = norm[i] else: normi = norm if normi != '_empty_': normx = ' norm ' + str(normi) command = command + normx if type(crop) is list: cropi = crop[i] else: cropi = crop if cropi != '_empty_': cropx = ' crop ' + str(cropi) command = command + cropx if type(colors) is list: colorsi = colors[i] else: colorsi = colors if colorsi != '_empty_': colorsx = ' colors ' + str(colorsi) command = command + colorsx if type(clr_min_max) is list: clr_min_maxi = clr_min_max[i] else: clr_min_maxi = clr_min_max if clr_min_maxi != '_empty_': clr_min_maxx = ' clr_min_max ' + str(clr_min_maxi) command = command + clr_min_maxx if sphere_mapping: spherei = sphere[i] spherex = ' sphere ' + str(spherei) command = command + spherex if type(zoom) is list: zoomi = zoom[i] elif zoom: zoomi = zoom if zoom: zoomx = ' zoom ' + str(zoomi) command = command + zoomx time0 = time.time() os.system('python map.py ' + command) if display == 'no': print str(i) + ' - ' + str(round((time.time() - time0),2)) + ' sec'
[ 11748, 28686, 11, 2435, 198, 198, 2235, 9220, 35748, 357, 29904, 3268, 30076, 8, 198, 2235, 36658, 2559, 28, 198, 2235, 611, 3294, 3696, 389, 852, 17535, 284, 2251, 3807, 986, 198, 2235, 2644, 16684, 1958, 262, 3726, 290, 7464, 286, 2...
2.733618
2,808
#!/usr/bin/env python # -*- coding: utf-8 -*- """gaetk2.tools.auth0.py Tools for working with auth0 Created by Maximillian Dornseif on 2017-12-05. Copyright 2017 HUDROA. MIT Licensed. """ from __future__ import unicode_literals import logging from google.appengine.api import memcache from auth0.v3.authentication import GetToken from auth0.v3.exceptions import Auth0Error from auth0.v3.management import Auth0 from gaetk2.config import gaetkconfig logger = logging.getLogger(__name__) def get_auth0_access_token(): """Get a Token for the Management-API.""" ret = memcache.get('get_auth0_access_token()') if not ret: assert gaetkconfig.AUTH0_DOMAIN != '*unset*' assert gaetkconfig.AUTH0_CLIENT_ID != '*unset*' get_token = GetToken(gaetkconfig.AUTH0_DOMAIN) token = get_token.client_credentials( gaetkconfig.AUTH0_CLIENT_ID, gaetkconfig.AUTH0_CLIENT_SECRET, 'https://{}/api/v2/'.format(gaetkconfig.AUTH0_DOMAIN)) ret = token['access_token'] memcache.set('get_auth0_access_token()', ret, token['expires_in'] / 2) return ret def create_from_credential(credential): """Create an entry in the Auth0.DefaultDatabase for a credential.""" if credential.external_uid: return if not credential.secret: return if not credential.email: return if not getattr(credential, 'name', None): credential.name = credential.text if not getattr(credential, 'name', None): credential.name = credential.org_designator auth0api = Auth0(gaetkconfig.AUTH0_DOMAIN, get_auth0_access_token()) payload = { 'connection': 'DefaultDatabase', 'email': credential.email, 'password': credential.secret, 'user_id': credential.uid, 'user_metadata': { 'name': credential.name, 'nickname': 'User fuer {}'.format(credential.org_designator) }, 'email_verified': True, 'verify_email': False, 'app_metadata': { 'org_designator': credential.org_designator, 'permissions': credential.permissions, } } newuser = None try: newuser = auth0api.users.create(payload) except Auth0Error as ex: if ex.status_code in [400, 409] and ex.message == 'The user already exists.': logger.info('The user already exists: %s %r %s', credential.uid, ex, payload) try: newuser = auth0api.users.get('auth0|{}'.format(credential.uid)) except: logger.warn('email collision? %s', credential.uid) # propbably we have an E-Mail Address collision. This means # several Credentials with the same E-Mail Adresses. reply = auth0api.users.list( connection='DefaultDatabase', q='email:"{}"'.format(credential.email), search_engine='v2') if reply['length'] > 0: logger.info('reply=%s', reply) other_uid = reply['users'][0]['user_id'] newuser = auth0api.users.get(other_uid) # doppelbelegung bei Auth0 notieren if newuser.get('app_metadata'): logger.debug('app_metadata=%r', newuser['app_metadata']) altd = newuser['app_metadata'].get('org_designator_alt', []) altd = list(set(altd + [credential.org_designator])) altu = newuser['app_metadata'].get('uid_alt', []) altu = list(set(altu + [credential.uid])) logger.warn('updating duplicate Auth0 %s %s %s %s', altd, altu, other_uid, newuser) auth0api.users.update( other_uid, {'app_metadata': {'org_designator_alt': altd, 'uid_alt': altu}}) else: logger.error('%r newuser = %s %s', 'auth0|{}'.format(credential.uid), newuser, ex) raise except: logger.warn('payload = %s', payload) raise if newuser is None or (newuser.get('error')): logger.warn('reply=%s payload = %s', newuser, payload) raise RuntimeError('Auth0-Fehler: %s' % newuser) logger.info('new auth0 user %s', newuser) credential.meta['auth0_user_id'] = credential.external_uid = newuser['user_id'] credential.put() return
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 198, 2, 532, 9, 12, 19617, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 37811, 4908, 316, 74, 17, 13, 31391, 13, 18439, 15, 13, 9078, 220, 20003, 329, 1762, 351, 6284, 15, 198, 198, 41972, ...
2.094975
2,169
# Definition for an interval. # class Interval: # def __init__(self, s=0, e=0): # self.start = s # self.end = e
[ 2, 30396, 329, 281, 16654, 13, 198, 2, 1398, 4225, 2100, 25, 198, 2, 220, 220, 220, 220, 825, 11593, 15003, 834, 7, 944, 11, 264, 28, 15, 11, 304, 28, 15, 2599, 198, 2, 220, 220, 220, 220, 220, 220, 220, 220, 2116, 13, 9688, ...
2.0625
64
#!/usr/bin/env python3 from sys import argv from pathlib import Path from re import compile as re_compile PACKAGE_RE = re_compile("symbiflow-arch-defs-([a-zA-Z0-9_-]+)-([a-z0-9])") with (Path(__file__).parent.parent.parent / 'packages.list').open('r') as rptr: for artifact in rptr.read().splitlines(): m = PACKAGE_RE.match(artifact) assert m, f"Package name not recognized! {artifact}" package_name = m.group(1) if package_name == "install": package_name == "toolchain" with (Path("install") / f"symbiflow-{package_name}-latest").open("w") as wptr: wptr.write( 'https://storage.googleapis.com/symbiflow-arch-defs/artifacts/prod/' f'foss-fpga-tools/symbiflow-arch-defs/continuous/install/{argv[1]}/{artifact}' )
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 198, 198, 6738, 25064, 1330, 1822, 85, 198, 6738, 3108, 8019, 1330, 10644, 198, 6738, 302, 1330, 17632, 355, 302, 62, 5589, 576, 198, 198, 47, 8120, 11879, 62, 2200, 796, 302, 62, 558...
2.128788
396
from django.conf.urls import url, include urlpatterns = [ url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')), url(r'^api/viet_ocr/', include('viet_ocr.api.urls', namespace="viet_ocr-api")), url(r'^api/post_process/', include('post_process.api.urls', namespace="post_process-api")), url(r'^api/pre_process/', include('pre_process.api.urls', namespace="pre_process-api")), url(r'^api/doc_ocr/', include('doc_ocr.api.urls', namespace="doc_ocr-api")), ]
[ 6738, 42625, 14208, 13, 10414, 13, 6371, 82, 1330, 19016, 11, 2291, 628, 198, 6371, 33279, 82, 796, 685, 198, 220, 220, 220, 19016, 7, 81, 6, 61, 15042, 12, 18439, 14, 3256, 2291, 10786, 2118, 62, 30604, 13, 6371, 82, 3256, 25745, ...
2.512563
199
import torch import torch.nn as nn import neuron.ops as ops from neuron.config import registry
[ 11748, 28034, 198, 11748, 28034, 13, 20471, 355, 299, 77, 198, 198, 11748, 43164, 13, 2840, 355, 39628, 198, 6738, 43164, 13, 11250, 1330, 20478, 628 ]
3.730769
26
from pyspark.sql import SparkSession from pyspark.sql.types import DateType from pyspark.sql.functions import col from pyspark.sql import types as t import sys from pyspark.sql.window import Window from pyspark.sql.functions import spark_partition_id from pyspark.sql import Row spark = SparkSession \ .builder \ .appName("Python Spark SQL basic example") \ .config("spark.some.config.option", "some-value") \ .getOrCreate() # https://dwbi.org/pages/75/methods-of-incremental-loading-in-data-warehouse customers = [ Row(1, "John", "Individual", "22-Mar-2012"), Row(2, "Ryan", "Individual", "22-Mar-2012"), Row(3, "Bakers", "Corporate", "23-Mar-2012"), ] sales = [ Row(1, 1, "White sheet (A4)", 100, 4.00, "22-Mar-2012"), Row(2, 1, "James Clip (Box)", 1, 2.50, "22-Mar-2012"), Row(3, 2, "Whiteboard Maker", 1, 2.00, "22-Mar-2012"), Row(4, 3, "Letter Envelop", 200, 75.00, "23-Mar-2012"), Row(5, 1, "Paper Clip", 12, 4.00, "23-Mar-2012"), ] batch = [ Row(1, "22-Mar-2012", "Success"), ] customersDF = spark.createDataFrame(customers, schema=["customer_id", "customer_name", "type", "entry_date"]) salesDF = spark.createDataFrame(sales, schema=["id", "customer_id", "product_description", "qty", "revenue", "sales_date"]) batchDF = spark.createDataFrame(batch, schema=["batch_id", "loaded_untill", "status"]) customersDF.createOrReplaceTempView("customers") salesDF.createOrReplaceTempView("sales") batchDF.createOrReplaceTempView("batch") _23_march_customers = spark.sql(""" select t.* from customers t where t.entry_date > (select nvl( max(b.loaded_untill), to_date("01-01-1900", "MM-DD-YYYY") ) from batch b where b.status = "Success") """) _23_march_sales = spark.sql(""" select t.* from sales t where t.sales_date > (select nvl( max(b.loaded_untill), to_date("01-01-1900", "MM-DD-YYYY") ) from batch b where b.status = "Success") """) print("customers table") _23_march_customers.show() print("sales table") _23_march_sales.show() # Incremental Data Load Patterns # https://www.youtube.com/watch?v=INuucWEg3sY # 1) Stage / left Outer Join (moving to another server, make a staging and left join, check null on right table, you know this data is new) # 2) Control Table # Load | Cust | Table | Date # Id | Table |Id |Date # 3) Change Data Capture # Source based incremental loading # https://support.timextender.com/hc/en-us/articles/115001301963-How-incremental-loading-works # The source table have a reliable natural or surrogate key and reliable incremental field such as "ModifiedDateTime" or "TimeStamp"
[ 6738, 279, 893, 20928, 13, 25410, 1330, 17732, 36044, 198, 6738, 279, 893, 20928, 13, 25410, 13, 19199, 1330, 7536, 6030, 198, 6738, 279, 893, 20928, 13, 25410, 13, 12543, 2733, 1330, 951, 198, 6738, 279, 893, 20928, 13, 25410, 1330, ...
2.138003
1,442
from django.contrib import admin from .models import Asset # Register your models here. admin.site.register(Asset)
[ 6738, 42625, 14208, 13, 3642, 822, 1330, 13169, 198, 198, 6738, 764, 27530, 1330, 31433, 198, 198, 2, 17296, 534, 4981, 994, 13, 198, 28482, 13, 15654, 13, 30238, 7, 45869, 8 ]
3.625
32
from .mapper import ApiResponse, ApiResponseInterface from .mapper.types import Timestamp, AnyType __all__ = ['SendConfirmEmailResponse']
[ 6738, 764, 76, 11463, 1330, 5949, 72, 31077, 11, 5949, 72, 31077, 39317, 198, 6738, 764, 76, 11463, 13, 19199, 1330, 5045, 27823, 11, 4377, 6030, 198, 198, 834, 439, 834, 796, 37250, 25206, 18546, 2533, 15333, 31077, 20520, 628, 198 ]
3.439024
41
from django.shortcuts import render, redirect from django.contrib import messages from .models import Contact from django.contrib.auth.decorators import login_required
[ 6738, 42625, 14208, 13, 19509, 23779, 1330, 8543, 11, 18941, 198, 6738, 42625, 14208, 13, 3642, 822, 1330, 6218, 198, 6738, 764, 27530, 1330, 14039, 198, 6738, 42625, 14208, 13, 3642, 822, 13, 18439, 13, 12501, 273, 2024, 1330, 17594, 6...
3.333333
54
# # (C) Copyright 2003,2004 Hewlett-Packard Development Company, L.P. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. # # Author: Martin Pool <mbp@hp.com> # """ tupletree - Convert XML DOM objects to and from tuple trees. DOM is the standard in-memory representation of XML documents, but it is very cumbersome for some types of processing where XML encodes object structures rather than text documents. Direct mapping to Python classes may not be a good match either. tupletrees may be created from an in-memory DOM using dom_to_tupletree(), or from a string using xml_to_tupletree(). Since the Python XML libraries deal mostly with Unicode strings they are also returned here. If plain Strings are passed in they will be converted by xmldom. Each node of the tuple tree is a Python 4-tuple, corresponding to an XML Element (i.e. <tag>): (NAME, ATTRS, CONTENTS, None) The NAME is the name of the element. The ATTRS are a name-value hash of element attributes. The CONTENTS is a list of child elements. The fourth element is reserved. """ import xml.dom.minidom from pywbemReq.cim_types import is_text __all__ = ['dom_to_tupletree', 'xml_to_tupletree'] def dom_to_tupletree(node): """Convert a DOM object to a pyRXP-style tuple tree. Each element is a 4-tuple of (NAME, ATTRS, CONTENTS, None). Very nice for processing complex nested trees. """ if node.nodeType == node.DOCUMENT_NODE: # boring; pop down one level return dom_to_tupletree(node.firstChild) assert node.nodeType == node.ELEMENT_NODE name = node.nodeName attrs = {} contents = [] for child in node.childNodes: if child.nodeType == child.ELEMENT_NODE: contents.append(dom_to_tupletree(child)) elif child.nodeType == child.TEXT_NODE: assert is_text(child.nodeValue), \ "text node %s is not a string" % repr(child) contents.append(child.nodeValue) elif child.nodeType == child.CDATA_SECTION_NODE: contents.append(child.nodeValue) else: raise RuntimeError("can't handle %s" % child) for i in range(node.attributes.length): attr_node = node.attributes.item(i) attrs[attr_node.nodeName] = attr_node.nodeValue # XXX: Cannot yet handle comments, cdata, processing instructions and # other XML batshit. # it's so easy in retrospect! return name, attrs, contents, None def xml_to_tupletree(xml_string): """Parse XML straight into tupletree.""" dom_xml = xml.dom.minidom.parseString(xml_string) return dom_to_tupletree(dom_xml)
[ 2, 198, 2, 357, 34, 8, 15069, 5816, 11, 15724, 30446, 15503, 12, 11869, 446, 7712, 5834, 11, 406, 13, 47, 13, 198, 2, 198, 2, 770, 5888, 318, 1479, 3788, 26, 345, 460, 17678, 4163, 340, 290, 14, 273, 198, 2, 13096, 340, 739, 2...
2.916444
1,125
#!/usr/bin/env python3* import unicodedata
[ 2, 48443, 14629, 14, 8800, 14, 24330, 21015, 18, 9, 198, 11748, 28000, 9043, 1045 ]
2.8
15
# Say you have an array for which the ith element is the price of a given stock on day i. # Design an algorithm to find the maximum profit. You may complete at most two transactions. # Note: You may not engage in multiple transactions at the same time # (i.e., you must sell the stock before you buy again). # Example 1: # Input: [3,3,5,0,0,3,1,4] # Output: 6 # Explanation: Buy on day 4 (price = 0) and sell on day 6 (price = 3), profit = 3-0 = 3. # Then buy on day 7 (price = 1) and sell on day 8 (price = 4), profit = 4-1 = 3. # Example 2: # Input: [1,2,3,4,5] # Output: 4 # Explanation: Buy on day 1 (price = 1) and sell on day 5 (price = 5), profit = 5-1 = 4. # Note that you cannot buy on day 1, buy on day 2 and sell them later, as you are # engaging multiple transactions at the same time. You must sell before buying again. # Example 3: # Input: [7,6,4,3,1] # Output: 0 # Explanation: In this case, no transaction is done, i.e. max profit = 0.
[ 2, 13816, 345, 423, 281, 7177, 329, 543, 262, 340, 71, 5002, 318, 262, 2756, 286, 257, 1813, 4283, 319, 1110, 1312, 13, 198, 2, 8495, 281, 11862, 284, 1064, 262, 5415, 7630, 13, 921, 743, 1844, 379, 749, 734, 8945, 13, 198, 2, 5...
2.797753
356
import robotconn.rpc.baxterrobot.baxter_server_pb2 as bxtsp import robotconn.rpc.baxterrobot.baxter_server_pb2_grpc as bxtspgc import grpc import pickle import numpy as np if __name__=="__main__": import time bc = BaxterClient(host = "10.1.0.24:18300") # tic = time.time() # imgx = hcc.getimgbytes() # toc = time.time() # td = toc-tic # tic = time.time() # imgxs = hcc.getimgstr() # toc = time.time() # td2 = toc-tic # print(td, td2) angle_rgt = bc.bxt_get_jnts("rgt") # print angle_rgt # print(angle_rgt[-1]) # # # angle_rgt[-1] = angle_rgt[-1] - 50.0 # # bc.bxt_movejnts(angle_rgt) print(bc.bxt_get_jnts(armname="rgt")) print(bc.bxt_get_jnts(armname="lft")) import cv2 as cv cv.imshow("w",bc.bxt_get_image("head_camera")) cv.waitKey(0) # print bc.bxt_get_jnts("rgt") # print(eval("a="+bc.bxt_get_jnts()))
[ 11748, 9379, 37043, 13, 81, 14751, 13, 65, 40864, 305, 13645, 13, 65, 40864, 62, 15388, 62, 40842, 17, 355, 275, 742, 2777, 198, 11748, 9379, 37043, 13, 81, 14751, 13, 65, 40864, 305, 13645, 13, 65, 40864, 62, 15388, 62, 40842, 17, ...
1.955032
467
# Copyright 2021 The Pigweed Authors # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """Copies files from CIPD to a specified directory. By default, Pigweed installs packages from a manifest file to a CIPD subdirectory as part of environment setup. This script will copy files from this directory into a specified output directory. Here's an example of how to use this script: Let's say you have a package with a static library: CIPD path: `pigweed/third_party/libsomething` Files: ./libsomething/include/something.h ./libsomething/libsomething.a And this package was referenced in my_project_packages.json, which was provided as a --cipd-package-file in your bootstrap script. To copy the static libraryto $PW_PROJECT_ROOT/static_libraries, you'd have an invocation something like this: copy_from_cipd --package-name=pigweed/third_party/libsomething \ --mainfest=$PW_PROJECT_ROOT/tools/my_project_packages.json \ --file=libsomething/libsomething.a \ --out=$PW_PROJECT_ROOT/static_libraries """ import argparse import json import logging import os import shutil import subprocess import sys from pathlib import Path import pw_env_setup.cipd_setup.update logger = logging.getLogger(__name__) if __name__ == '__main__': logging.basicConfig() main()
[ 2, 15069, 33448, 383, 23097, 39054, 46665, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 345, 743, 407, 198, 2, 779, 428, 2393, 2845, 287, 11846, 351, 262, 13789, 13, 921, 743, ...
3.246429
560
""" Copyright 2018 Globo.com Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from flask_restplus import reqparse search_query_parser = reqparse.RequestParser() search_query_parser.add_argument( 'page', type=int, required=False, default=1, help='Page number' ) search_query_parser.add_argument( 'per_page', type=int, required=False, default=10, help='Items number per page' ) search_query_parser.add_argument( 'query', type=str, required=False, default='[[{"field":"name","operator":"LIKE","value":""}]]', help='Query' ) execute_query_parser = reqparse.RequestParser() execute_query_parser.add_argument( 'variable', type=str, required=False, help='Variable' )
[ 37811, 198, 220, 220, 15069, 2864, 2671, 20391, 13, 785, 628, 220, 220, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 220, 220, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, 351, 262, 1...
3.007229
415
# -*- encoding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from os import fork import sys import click from loguru import logger from auto_pull_request.pull_request import Auto from auto_pull_request import __version__ # Creates a GitHub pull-request.
[ 198, 2, 532, 9, 12, 21004, 25, 3384, 69, 12, 23, 532, 9, 12, 198, 2, 198, 2, 49962, 739, 262, 24843, 13789, 11, 10628, 362, 13, 15, 357, 1169, 366, 34156, 15341, 198, 2, 345, 743, 407, 779, 428, 2393, 2845, 287, 11846, 351, 26...
3.64455
211
""" fit1d package is designed to provide an organized toolbox for different types of 1D fits that can be performed. It is easy to add new fits and other functionalities """ from abc import ABC, abstractmethod import numpy as np from typing import List,Tuple from fit1d.common.model import Model, ModelMock from fit1d.common.outlier import OutLier from fit1d.common.fit_data import FitData class Fit1DMock(Fit1D): """ Mock class. Used only for tests """
[ 37811, 198, 11147, 16, 67, 5301, 318, 3562, 284, 2148, 281, 8389, 2891, 3524, 329, 1180, 3858, 286, 198, 16, 35, 11414, 326, 460, 307, 6157, 13, 198, 1026, 318, 2562, 284, 751, 649, 11414, 290, 584, 10345, 871, 198, 37811, 198, 198,...
3.422222
135