index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
65,434 | kamils224/STXNext_training_program | refs/heads/main | /stx_training_program/celery.py | from __future__ import absolute_import
import os
from celery import Celery
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "stx_training_program.settings")
# Get the base REDIS URL, default to redis' default
BASE_REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379")
app = Celery("stx_training_program")
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object("django.conf:settings", namespace="CELERY")
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
app.conf.broker_url = BASE_REDIS_URL
# this allows you to schedule items in the Django admin.
app.conf.beat_scheduler = "django_celery_beat.schedulers.DatabaseScheduler"
| {"/api_projects/models.py": ["/stx_training_program/celery.py", "/api_projects/tasks.py"], "/api_projects/serializers.py": ["/api_projects/models.py"], "/api_projects/urls.py": ["/api_projects/views.py"], "/api_accounts/views.py": ["/api_accounts/serializers.py", "/api_accounts/utils.py"], "/api_projects/views.py": ["/api_projects/models.py", "/api_projects/serializers.py", "/api_projects/permissions.py"], "/api_projects/admin.py": ["/api_projects/models.py"], "/api_accounts/urls.py": ["/api_accounts/views.py"], "/api_projects/tests.py": ["/api_projects/models.py"], "/api_projects/permissions.py": ["/api_projects/models.py"], "/api_accounts/serializers.py": ["/api_accounts/utils.py"], "/api_projects/tasks.py": ["/api_projects/models.py"]} |
65,435 | kamils224/STXNext_training_program | refs/heads/main | /api_projects/tests.py | from typing import Dict
from datetime import datetime
from django.contrib.auth import get_user_model
from rest_framework.test import APITestCase
from rest_framework.reverse import reverse_lazy, reverse
from rest_framework import status
from api_projects.models import Project, Issue
User = get_user_model()
class ProjectsTest(APITestCase):
OBTAIN_TOKEN_URL = reverse_lazy("api_accounts:token_obtain_pair")
PROJECT_LIST = "api_projects:project-list"
PROJECT_DETAILS = "api_projects:project-detail"
def _init_db(self) -> None:
# NOTE: It's better option to create some test fixtures in future
self.owners = [
{"email": "project_owner1@example.com", "password": "password000"},
{"email": "project_owner2@example.com", "password": "password999"},
]
self.no_project_users = [
{"email": "no_project_user@example.com", "password": "passwordxxx"},
]
self.members = [
{"email": "member_owner1@example.com", "password": "password111"},
{"email": "member_owner2@example.com", "password": "password222"},
{"email": "member_owner3@example.com", "password": "password333"},
]
self.users = [
User.objects.create_user(**user)
for user in self.owners + self.no_project_users
]
members = [User.objects.create_user(**member) for member in self.members]
User.objects.all().update(is_active=True)
project_1 = Project.objects.create(
name="Project1 with members", owner=self.users[0]
)
project_1.members.add(*members)
Project.objects.create(name="Project1 without members", owner=self.users[0])
Project.objects.create(name="Project2 empty", owner=self.users[1])
example_date = datetime(2030, 10, 10, hour=12, minute=30)
Issue.objects.create(
title="Issue 1",
description="Desc...",
owner=members[0],
project=project_1,
due_date=example_date,
)
def setUp(self):
self._init_db()
def _login_user(self, user: Dict[str, str]) -> None:
response = self.client.post(self.OBTAIN_TOKEN_URL, user, format="json")
access_token = response.data["access"]
self.client.credentials(HTTP_AUTHORIZATION=f"Bearer {access_token}")
def test_get_projects(self):
url = reverse(self.PROJECT_LIST)
# anonymous user
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
# logged in as owner
user = self.owners[0]
self._login_user(user)
expected_count = Project.objects.filter(owner__email=user["email"]).count()
response = self.client.get(url)
self.assertEqual(len(response.data), expected_count)
# logged in as member
user = self.members[0]
self._login_user(user)
expected_count = Project.objects.filter(members__email=user["email"]).count()
response = self.client.get(url)
self.assertEqual(len(response.data), expected_count)
# logged in as user without projects
self._login_user(self.no_project_users[0])
expected_count = 0
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), expected_count)
def test_get_project_details(self):
user_1 = self.owners[0]
user_2 = self.owners[1]
project = Project.objects.filter(owner__email=user_1["email"]).first()
projects_init_count = Project.objects.count()
url = reverse(self.PROJECT_DETAILS, kwargs={"pk": project.pk})
self._login_user(user_1)
response_ok = self.client.get(url)
self._login_user(user_2)
response_bad = self.client.get(url)
self.assertEqual(response_ok.status_code, status.HTTP_200_OK)
self.assertEqual(response_bad.status_code, status.HTTP_404_NOT_FOUND)
issues_count = Issue.objects.filter(project=project).count()
response_issues = response_ok.data["issues"]
self.assertEqual(len(response_issues), issues_count)
def test_create_project(self):
url = reverse(self.PROJECT_LIST)
new_project = {"name": "New project"}
response_bad = self.client.post(url, new_project)
user = self.no_project_users[0]
self._login_user(user)
expected_count = Project.objects.filter(owner__email=user["email"]).count() + 1
response_ok = self.client.post(url, new_project)
current_projects_count = Project.objects.filter(
owner__email=user["email"]
).count()
self.assertEqual(response_bad.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(response_ok.status_code, status.HTTP_201_CREATED)
self.assertEqual(current_projects_count, expected_count)
def test_update_project(self):
user_1 = self.owners[0]
user_2 = self.owners[1]
project = Project.objects.filter(owner__email=user_1["email"]).first()
projects_init_count = Project.objects.count()
url = reverse(self.PROJECT_DETAILS, kwargs={"pk": project.pk})
new_name = "new name"
self._login_user(user_1)
response_ok = self.client.put(url, {"name": new_name})
self._login_user(user_2)
response_bad = self.client.put(url, {"name": new_name})
self.assertEqual(response_ok.status_code, status.HTTP_200_OK)
self.assertEqual(response_ok.data["name"], new_name)
self.assertEqual(response_bad.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_project(self):
user = self.owners[0]
project = Project.objects.filter(owner__email=user["email"]).first()
projects_init_count = Project.objects.count()
url = reverse(self.PROJECT_DETAILS, kwargs={"pk": project.pk})
response_bad = self.client.delete(url)
projects_count_non_auth_delete = Project.objects.count()
self._login_user(user)
response_ok = self.client.delete(url)
projects_count_delete = Project.objects.count()
self.assertEqual(projects_count_non_auth_delete, projects_init_count)
self.assertEqual(response_bad.status_code, status.HTTP_401_UNAUTHORIZED)
self.assertEqual(projects_count_delete, projects_init_count - 1)
self.assertEqual(response_ok.status_code, status.HTTP_204_NO_CONTENT)
| {"/api_projects/models.py": ["/stx_training_program/celery.py", "/api_projects/tasks.py"], "/api_projects/serializers.py": ["/api_projects/models.py"], "/api_projects/urls.py": ["/api_projects/views.py"], "/api_accounts/views.py": ["/api_accounts/serializers.py", "/api_accounts/utils.py"], "/api_projects/views.py": ["/api_projects/models.py", "/api_projects/serializers.py", "/api_projects/permissions.py"], "/api_projects/admin.py": ["/api_projects/models.py"], "/api_accounts/urls.py": ["/api_accounts/views.py"], "/api_projects/tests.py": ["/api_projects/models.py"], "/api_projects/permissions.py": ["/api_projects/models.py"], "/api_accounts/serializers.py": ["/api_accounts/utils.py"], "/api_projects/tasks.py": ["/api_projects/models.py"]} |
65,436 | kamils224/STXNext_training_program | refs/heads/main | /api_projects/permissions.py | from rest_framework.permissions import BasePermission, SAFE_METHODS
from api_projects.models import Project, Issue, IssueAttachment
class IsOwner(BasePermission):
"""
Object-level permission to only allow owners of an object to edit it.
"""
def has_object_permission(self, request, view, obj):
# Instance must have an attribute named `owner`.
user = request.user
if isinstance(obj, Project):
return obj.owner == user
if isinstance(obj, Issue):
return obj.owner == user or obj.project.owner == user
if isinstance(obj, IssueAttachment):
return obj.issue.owner == user or obj.issue.project.owner == user
class MemberReadOnly(BasePermission):
"""
Object-level permission to only allow members of an object to view it.
"""
def has_object_permission(self, request, view, obj):
# Instance must have an attribute named `members`.
return request.method in SAFE_METHODS and request.user in obj.members.all()
class IsProjectMember(BasePermission):
"""
Checks if current user is member of the project.
"""
def has_object_permission(self, request, view, obj):
# Instance must have an attribute named `project`.
if isinstance(obj, Issue):
return obj.project in request.user.projects.all()
if isinstance(obj, Project):
return obj in request.user.projects.all()
if isinstance(obj, IssueAttachment):
return obj.issue.project in request.user.projects.all()
| {"/api_projects/models.py": ["/stx_training_program/celery.py", "/api_projects/tasks.py"], "/api_projects/serializers.py": ["/api_projects/models.py"], "/api_projects/urls.py": ["/api_projects/views.py"], "/api_accounts/views.py": ["/api_accounts/serializers.py", "/api_accounts/utils.py"], "/api_projects/views.py": ["/api_projects/models.py", "/api_projects/serializers.py", "/api_projects/permissions.py"], "/api_projects/admin.py": ["/api_projects/models.py"], "/api_accounts/urls.py": ["/api_accounts/views.py"], "/api_projects/tests.py": ["/api_projects/models.py"], "/api_projects/permissions.py": ["/api_projects/models.py"], "/api_accounts/serializers.py": ["/api_accounts/utils.py"], "/api_projects/tasks.py": ["/api_projects/models.py"]} |
65,437 | kamils224/STXNext_training_program | refs/heads/main | /api_accounts/serializers.py | from django.core.validators import MinLengthValidator
from django.contrib.auth import get_user_model
from django.utils.http import urlsafe_base64_decode
from django.utils.encoding import force_bytes, force_text
from django.core.exceptions import ObjectDoesNotExist
from rest_framework import serializers
from api_accounts.models import User
from api_accounts.utils import VerificationTokenGenerator
class UserRegistrationSerializer(serializers.ModelSerializer):
password = serializers.CharField(validators=[MinLengthValidator(8)])
class Meta:
model = User
fields = ["email", "password"]
extra_kwargs = {"password": {"required": True, "write_only": True}}
def create(self, validated_data):
user = User.objects.create_user(
validated_data["email"], validated_data["password"]
)
return user
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ["id", "email"]
class ActivateAccountSerializer(serializers.Serializer):
uid = serializers.CharField()
token = serializers.CharField()
def validate(self, data) -> User:
"""
Overloaded validation checks if uid and token are correct and returns corresponding User object.
"""
uid = data["uid"]
token = data["token"]
User = get_user_model()
try:
uid = force_text(urlsafe_base64_decode(uid))
user = User.objects.get(pk=uid)
except (ObjectDoesNotExist, ValueError):
raise serializers.ValidationError("Given user does not exist")
activation_token = VerificationTokenGenerator()
if not activation_token.check_token(user, token):
raise serializers.ValidationError("Given token is wrong")
return user
| {"/api_projects/models.py": ["/stx_training_program/celery.py", "/api_projects/tasks.py"], "/api_projects/serializers.py": ["/api_projects/models.py"], "/api_projects/urls.py": ["/api_projects/views.py"], "/api_accounts/views.py": ["/api_accounts/serializers.py", "/api_accounts/utils.py"], "/api_projects/views.py": ["/api_projects/models.py", "/api_projects/serializers.py", "/api_projects/permissions.py"], "/api_projects/admin.py": ["/api_projects/models.py"], "/api_accounts/urls.py": ["/api_accounts/views.py"], "/api_projects/tests.py": ["/api_projects/models.py"], "/api_projects/permissions.py": ["/api_projects/models.py"], "/api_accounts/serializers.py": ["/api_accounts/utils.py"], "/api_projects/tasks.py": ["/api_projects/models.py"]} |
65,438 | kamils224/STXNext_training_program | refs/heads/main | /api_projects/tasks.py | from celery import shared_task
from django.apps import apps
from django.core.mail import send_mail
@shared_task
def send_issue_notification(email: str, subject: str, message: str) -> None:
send_mail(subject, message, None, recipient_list=[
email], fail_silently=False)
@shared_task
def notify_issue_deadline(pk: int, email: str, subject: str, message: str) -> None:
# to prevent circular imports
from api_projects.models import Issue
if issue := Issue.objects.filter(pk=pk).exclude(assigne=None, status=Issue.Status.DONE).first():
send_issue_notification(
email,
"Issue deadline",
f"The {issue.title} is not finished after deadline!",
)
issue.issue_task.delete()
| {"/api_projects/models.py": ["/stx_training_program/celery.py", "/api_projects/tasks.py"], "/api_projects/serializers.py": ["/api_projects/models.py"], "/api_projects/urls.py": ["/api_projects/views.py"], "/api_accounts/views.py": ["/api_accounts/serializers.py", "/api_accounts/utils.py"], "/api_projects/views.py": ["/api_projects/models.py", "/api_projects/serializers.py", "/api_projects/permissions.py"], "/api_projects/admin.py": ["/api_projects/models.py"], "/api_accounts/urls.py": ["/api_accounts/views.py"], "/api_projects/tests.py": ["/api_projects/models.py"], "/api_projects/permissions.py": ["/api_projects/models.py"], "/api_accounts/serializers.py": ["/api_accounts/utils.py"], "/api_projects/tasks.py": ["/api_projects/models.py"]} |
65,439 | shubham860/React-django | refs/heads/master | /project/api/admin.py | from django.contrib import admin
from .models import employee
from django.db import models
class employee(admin.ModelAdmin):
fieldsets = [
("Content",{'fields':["firstname","lastname","emp_id"]}),
]
admin.site.register(employee)
| {"/project/api/admin.py": ["/project/api/models.py"], "/project/api/views.py": ["/project/api/models.py"]} |
65,440 | shubham860/React-django | refs/heads/master | /project/api/models.py | from django.db import models
class employee(models.Model):
firstname = models.CharField(max_length=200)
lastname = models.CharField(max_length=200)
emp_id = models.IntegerField()
def __str__(self):
return self.firstname
| {"/project/api/admin.py": ["/project/api/models.py"], "/project/api/views.py": ["/project/api/models.py"]} |
65,441 | shubham860/React-django | refs/heads/master | /project/api/views.py | from django.shortcuts import render
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from .models import employee
from .serializer import employeeSerializer
class employeeList(APIView):
def get(self,request):
employees = employee.objects.all()
Serializer = employeeSerializer(employees,many=True)
return Response(Serializer.data)
def post(self,request):
pass
| {"/project/api/admin.py": ["/project/api/models.py"], "/project/api/views.py": ["/project/api/models.py"]} |
65,442 | sannahan/ravintolasovellus | refs/heads/main | /routes.py | from app import app
from db import db
import users
import restaurants
import reviews
import tags
from flask import redirect, render_template, request, session
@app.route("/")
def index():
tag_list = tags.get_tags()
return render_template("index.html", tags=tag_list)
@app.route("/login", methods=["POST"])
def login():
username = request.form["username"]
password = request.form["password"]
if users.login(username, password):
return redirect("/")
else:
tag_list = tags.get_tags()
return render_template("index.html", errormessage="Väärä tunnus tai salasana.", tags=tag_list)
@app.route("/logout")
def logout():
users.logout()
return redirect("/")
@app.route("/signup", methods=["GET","POST"])
def signup():
if request.method == "GET":
return render_template("signup.html")
if request.method == "POST":
errormessage = ""
username = request.form["username"]
if len(username) < 1:
errormessage = "Rekisteröinti ei onnistunut. Käyttäjänimi ei saa olla tyhjä"
password = request.form.getlist("password")
if len(password[0]) < 1:
errormessage = "Rekisteröinti ei onnistunut. Salasana ei saa olla tyhjä"
if password[0] != password[1]:
errormessage = "Rekisteröinti ei onnistunut. Salasanat eivät täsmää."
role = request.form["role"]
if role != "1" and role != "2":
errormessage = "Rekisteröinti ei onnistunut. Tuntematon rooli."
if len(errormessage) > 0:
return render_template("signup.html", errormessage=errormessage)
if users.signup(username, password[0], role):
return redirect("/")
else:
return render_template("signup.html", errormessage="Rekisteröinti ei onnistunut. Valitse toinen käyttäjätunnus.")
@app.route("/map")
def map():
info = restaurants.get_info_for_map()
return render_template("map.html", info=info)
@app.route("/addrestaurant", methods=["GET","POST"])
def add_restaurant():
days_of_the_week = ["Maanantai", "Tiistai", "Keskiviikko", "Torstai", "Perjantai", "Lauantai", "Sunnuntai"]
if not users.is_admin():
return render_template("forbidden.html", message="Sinulla ei ole oikeutta nähdä tätä sivua")
if request.method == "GET":
return render_template("add_restaurant.html", days=days_of_the_week)
if request.method == "POST":
errormessage = ""
name = request.form["name"]
if len(name) < 1:
errormessage = "Lisääminen ei onnistunut. Ravintolan nimi ei saa olla tyhjä"
description = request.form["description"]
if len(description) < 1:
errormessage = "Lisääminen ei onnistunut. Ravintolan kuvaus ei saa olla tyhjä"
if len(name) > 500 or len(description) > 500:
errormessage = "Lisääminen ei onnistunut. Nimen ja kuvauksen tulee olla alle 500 merkkiä"
address = request.form["address"]
if len(address) < 1:
errormessage = "Lisääminen ei onnistunut. Ravintolan osoite ei saa olla tyhjä"
if len(errormessage) > 0:
return render_template("add_restaurant.html", errormessage=errormessage, days=days_of_the_week)
opening_times = {}
for day in days_of_the_week:
key = days_of_the_week.index(day)
status = request.form["closed_" + day]
if status == "closed":
opening_times[key] = ("kiinni", "kiinni")
elif status == "open":
opening = request.form["opening_" + day]
closing = request.form["closing_" + day]
opening_times[key] = (opening, closing)
check_csfr(request.form["csrf_token"], users.get_csrf())
if restaurants.add_restaurant(name, description, address, opening_times):
return redirect("/")
else:
return render_template("add_restaurant.html", errormessage="Lisääminen ei onnistu. Onhan ravintolalla oikea osoite?", days=days_of_the_week)
@app.route("/removerestaurant", methods=["GET","POST"])
def remove_restaurant():
if not users.is_admin():
return render_template("forbidden.html", message="Sinulla ei ole oikeutta nähdä tätä sivua")
if request.method == "GET":
restaurantnames = restaurants.get_list()
return render_template("remove_restaurant.html", restaurants=restaurantnames)
if request.method == "POST":
restaurant_id = request.form["restaurant_to_be_removed"]
check_csfr(request.form["csrf_token"], users.get_csrf())
restaurants.remove_restaurant(restaurant_id)
return redirect("/")
@app.route("/restaurant/<int:id>", methods=["GET","POST"])
def restaurant(id):
info = restaurants.get_info(id)
reviews_list = reviews.get_list(id)
if request.method == "POST":
check_csfr(request.form["csrf_token"], users.get_csrf())
if "lisays" in request.form:
stars = int(request.form["stars"])
comment = request.form["comment"]
if len(comment) > 500:
return render_template("restaurant.html", errormessage="Arvostelun tulee olla alle 500 merkkiä", info=info[0], open=info[1], id=id, reviews=reviews_list)
user_id = users.get_id()
reviews.add_review(id, user_id, stars, comment)
if "poisto" in request.form:
review_id = request.form["review_id"]
reviews.remove_review(review_id)
info = restaurants.get_info(id)
reviews_list = reviews.get_list(id)
return render_template("restaurant.html", info=info[0], open=info[1], id=id, reviews=reviews_list)
@app.route("/restaurantlist")
def restaurantlist():
restaurant_list = restaurants.get_list_based_on_reviews()
return render_template("restaurantlist.html", restaurants=restaurant_list)
@app.route("/search", methods=["GET"])
def search():
query = request.args["query"]
restaurant_list = restaurants.search(query)
return render_template("restaurantlist.html", restaurants=restaurant_list)
@app.route("/tagsearch", methods=["GET"])
def tagsearch():
tag = request.args["tag_list"]
restaurant_list = tags.searchtag(tag)
return render_template("restaurantlist.html", restaurants=restaurant_list)
@app.route("/tags", methods=["GET","POST"])
def tagging():
if not users.is_admin():
return render_template("forbidden.html", message="Sinulla ei ole oikeutta nähdä tätä sivua")
restaurants_list = restaurants.get_list()
tags_list = tags.get_tags()
if request.method == "GET":
return render_template("tags.html", tags=tags_list, restaurants=restaurants_list)
if request.method == "POST":
written_tag = request.form["tag"]
list_tag = request.form["existing_tag"]
if is_empty(written_tag) and is_empty(list_tag):
return render_template("tags.html", errormessage="Et lisännyt tägiä", tags=tags_list, restaurants=restaurants_list)
elif not is_empty(written_tag) and not is_empty(list_tag):
return render_template("tags.html", errormessage="Lisää yksi tägi kerrallaan", tags=tags_list, restaurants=restaurants_list)
else:
tag_to_be_added = ""
if is_empty(written_tag):
tag_to_be_added = list_tag
else:
tag_to_be_added = written_tag
if len(tag_to_be_added) > 50:
return render_template("tags.html", errormessage="Tägi on liian pitkä. Sen tulee olla alle 50 merkkiä", tags=tags_list, restaurants=restaurants_list)
if "selected_restaurants" in request.form:
restaurants_to_be_added = request.form.getlist("selected_restaurants")
check_csfr(request.form["csrf_token"], users.get_csrf())
tags.add_tags(restaurants_to_be_added, tag_to_be_added)
else:
return render_template("tags.html", errormessage="Et antanut ravintoloita", tags=tags_list, restaurants=restaurants_list)
return redirect("/")
def is_empty(word):
return len(word) == 0
def check_csfr(from_site, from_session):
if from_site != from_session:
abort(403) | {"/routes.py": ["/users.py", "/restaurants.py", "/reviews.py", "/tags.py"]} |
65,443 | sannahan/ravintolasovellus | refs/heads/main | /restaurants.py | from db import db
from geopy.geocoders import Nominatim
geolocator = Nominatim(user_agent="my_test_app")
def add_restaurant(name, description, address, opening_times):
try:
# testing to see if address is valid
location = geolocator.geocode(address)
if location == None:
return False
sql = "INSERT INTO restaurants (name, description, address, visible) VALUES (:name, :description, :address, 1) RETURNING id"
result = db.session.execute(sql, {"name":name, "description":description, "address":address})
restaurant_id = result.fetchone()[0]
for i in range(7):
sql = "INSERT INTO opening_times (restaurant_id, day, opening, closing) VALUES (:restaurant_id, :day, :opening, :closing)"
day = int(i)
opening = opening_times[i][0]
closing = opening_times[i][1]
db.session.execute(sql, {"restaurant_id":restaurant_id, "day":day, "opening":opening, "closing":closing})
db.session.commit()
return True
except:
return False
def get_info_for_map():
sql = "SELECT id, name, address, description FROM restaurants WHERE visible=1"
result = db.session.execute(sql)
restaurants = result.fetchall()
info_for_map = []
for restaurant in restaurants:
location = geolocator.geocode(restaurant[2])
info_for_map.append([restaurant[0], restaurant[1], location.latitude, location.longitude, restaurant[3]])
return info_for_map
def get_info(id):
return get_restaurant_details(id), get_opening_times(id)
def get_restaurant_details(id):
sql = "SELECT name, description, address FROM restaurants WHERE id=:id AND visible=1"
result = db.session.execute(sql, {"id":id})
restaurant_details = result.fetchone()
return restaurant_details
def get_opening_times(id):
sql = "SELECT day, opening, closing FROM opening_times WHERE restaurant_id=:id"
result = db.session.execute(sql, {"id":id})
opening_times = result.fetchall()
days_of_the_week = ["Ma", "Ti", "Ke", "To", "Pe", "La", "Su"]
open = []
for o in opening_times:
if o[1] == "kiinni":
open.append([days_of_the_week[o[0]], o[1]])
else:
open.append([days_of_the_week[o[0]], o[1], o[2]])
return open
def get_list():
sql = "SELECT name, id FROM restaurants WHERE visible=1"
result = db.session.execute(sql)
restaurant_list = result.fetchall()
return restaurant_list
def remove_restaurant(id):
sql = "UPDATE restaurants SET visible=0 WHERE id=:id"
db.session.execute(sql, {"id":id})
db.session.commit()
def search(query):
sql = "SELECT name, id FROM restaurants WHERE description LIKE :query AND visible=1"
result = db.session.execute(sql, {"query":"%"+query+"%"})
restaurant_list = result.fetchall()
return restaurant_list
def get_list_based_on_reviews():
sql = "SELECT r.name, r.id FROM restaurants AS r LEFT JOIN (SELECT restaurant_id, AVG(stars) as a FROM reviews GROUP BY restaurant_id) AS x ON r.id = x.restaurant_id WHERE r.visible = 1 ORDER BY x.a DESC"
result = db.session.execute(sql)
return result.fetchall() | {"/routes.py": ["/users.py", "/restaurants.py", "/reviews.py", "/tags.py"]} |
65,444 | sannahan/ravintolasovellus | refs/heads/main | /reviews.py | from db import db
def add_review(restaurant_id, user_id, stars, comment):
sql = "INSERT INTO reviews (restaurant_id, user_id, stars, comment, visible, sent_at) VALUES (:restaurant_id, :user_id, :stars, :comment, 1, NOW())"
db.session.execute(sql, {"restaurant_id":restaurant_id, "user_id":user_id, "stars":stars, "comment":comment})
db.session.commit()
def get_list(id):
sql = "SELECT R.stars, R.comment, U.username, R.id, R.sent_at FROM reviews AS R, users AS U WHERE R.restaurant_id=:id AND R.user_id=U.id AND R.visible=1 ORDER BY R.id DESC"
result = db.session.execute(sql, {"id":id})
return result.fetchall()
def remove_review(id):
sql = "UPDATE reviews SET visible=0 WHERE id=:id"
db.session.execute(sql, {"id":id})
db.session.commit() | {"/routes.py": ["/users.py", "/restaurants.py", "/reviews.py", "/tags.py"]} |
65,445 | sannahan/ravintolasovellus | refs/heads/main | /users.py | from werkzeug.security import check_password_hash, generate_password_hash
from flask import session
from db import db
import secrets
def login(username, password):
sql = "SELECT username, password, role, id FROM users WHERE username=:username"
result = db.session.execute(sql, {"username":username})
user = result.fetchone()
if user == None:
return False
else:
if check_password_hash(user[1], password):
session["username"] = user[0]
session["userrole"] = user[2]
session["user_id"] = user[3]
session["csrf_token"] = secrets.token_hex(16)
return True
else:
return False
def signup(username, password, role):
hash_value = generate_password_hash(password)
try:
sql = "INSERT INTO users (username, password, role) VALUES (:username, :password, :role)"
db.session.execute(sql, {"username":username, "password":hash_value, "role":role})
db.session.commit()
except:
return False
return login(username, password)
def logout():
del session["username"]
del session["userrole"]
del session["user_id"]
del session["csrf_token"]
def get_id():
return session.get("user_id")
def get_csrf():
return session.get("csrf_token")
def is_admin():
return session.get("userrole") == 2
| {"/routes.py": ["/users.py", "/restaurants.py", "/reviews.py", "/tags.py"]} |
65,446 | sannahan/ravintolasovellus | refs/heads/main | /tags.py | from db import db
def get_tags():
sql = "SELECT tag FROM tags GROUP BY tag"
result = db.session.execute(sql)
return result.fetchall()
def add_tags(restaurants, tag):
for restaurant in restaurants:
sql = "INSERT INTO tags (restaurant_id, tag) VALUES (:restaurant_id, :tag)"
db.session.execute(sql, {"restaurant_id":restaurant, "tag":tag})
db.session.commit()
def searchtag(tag):
sql = "SELECT r.name, r.id FROM restaurants AS r, tags AS t WHERE r.id = t.restaurant_id AND r.visible = 1 AND t.tag=:tag"
result = db.session.execute(sql, {"tag":tag})
return result.fetchall()
| {"/routes.py": ["/users.py", "/restaurants.py", "/reviews.py", "/tags.py"]} |
65,448 | gtmeier/pyscript | refs/heads/master | /pyscript/circle.py | from . import Shape
class Circle(Shape):
def __init__(self, radius):
self._radius = radius
def _get_postscript(self, center):
return self._join_lines(
"newpath",
f"{center.x} {center.y} {self._radius} 0 360 arc",
"stroke"
)
def _get_width(self):
return self._radius * 2
def _get_height(self):
return self._get_width()
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,449 | gtmeier/pyscript | refs/heads/master | /tests/test_triangle.py | import unittest
from pyscript import Triangle
# TODO: tests for _get_height, export_postscript
class TriangleTestCase(unittest.TestCase):
def test_side_length_0(self):
triangle = Triangle(0)
self.assertEqual(triangle._side_length, 0)
def test_side_length_1(self):
triangle = Triangle(1)
self.assertEqual(triangle._side_length, 1)
def test_side_length_54(self):
triangle = Triangle(54)
self.assertEqual(triangle._side_length, 54)
def test_num_sides_3(self):
triangle = Triangle(1)
self.assertEqual(triangle._num_sides, 3)
def test_get_width_1(self):
triangle = Triangle(1)
self.assertEqual(triangle._get_width(), 1)
def test_get_width_39(self):
triangle = Triangle(39)
self.assertEqual(triangle._get_width(), 39)
def test_get_width_71(self):
triangle = Triangle(71)
self.assertEqual(triangle._get_width(), 71)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,450 | gtmeier/pyscript | refs/heads/master | /tests/test_scaled.py | from pyscript import ScaledShape, Rectangle
from shape_test_case import ShapeTestCase
# TODO: tests for export_postscript
class ScaledShapeTestCase(ShapeTestCase):
def test_get_width_0(self):
rectangle = ScaledShape(Rectangle(0, 5), 2, 3)
self.assertEqual(rectangle._get_width(), 0 * 2)
def test_get_width_1(self):
rectangle = ScaledShape(Rectangle(1, 5), 2, 3)
self.assertEqual(rectangle._get_width(), 1 * 2)
def test_get_width_37(self):
rectangle = ScaledShape(Rectangle(37, 5), 2, 3)
self.assertEqual(rectangle._get_width(), 37 * 2)
def test_get_height_0(self):
rectangle = ScaledShape(Rectangle(5, 0), 2, 3)
self.assertEqual(rectangle._get_height(), 0 * 3)
def test_get_height_1(self):
rectangle = ScaledShape(Rectangle(5, 1), 2, 3)
self.assertEqual(rectangle._get_height(), 1 * 3)
def test_get_height_37(self):
rectangle = ScaledShape(Rectangle(5, 37), 2, 3)
self.assertEqual(rectangle._get_height(), 37 * 3)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,451 | gtmeier/pyscript | refs/heads/master | /tests/test_rotated.py | from pyscript import RotatedShape, Rectangle
from shape_test_case import ShapeTestCase
# TODO: tests for export_postscript
class RotatedShapeTestCase(ShapeTestCase):
def test_create_90(self):
RotatedShape(Rectangle(10, 20), 90)
def test_create_180(self):
RotatedShape(Rectangle(10, 20), 180)
def test_create_270(self):
RotatedShape(Rectangle(10, 20), 270)
def test_value_error_0(self):
with self.assertRaises(ValueError):
RotatedShape(Rectangle(10, 20), 0)
def test_value_error_negative(self):
with self.assertRaises(ValueError):
RotatedShape(Rectangle(10, 20), -90)
def test_value_error_45(self):
with self.assertRaises(ValueError):
RotatedShape(Rectangle(10, 20), 45)
def test_value_error_100(self):
with self.assertRaises(ValueError):
RotatedShape(Rectangle(10, 20), 100)
def test_value_error_360(self):
with self.assertRaises(ValueError):
RotatedShape(Rectangle(10, 20), 360)
def test_value_error_720(self):
with self.assertRaises(ValueError):
RotatedShape(Rectangle(10, 20), 720)
def test_get_width_90(self):
shape = RotatedShape(Rectangle(20, 30), 90)
self.assertEqual(shape._get_width(), 30)
def test_get_width_180(self):
shape = RotatedShape(Rectangle(20, 30), 180)
self.assertEqual(shape._get_width(), 20)
def test_get_width_270(self):
shape = RotatedShape(Rectangle(20, 30), 270)
self.assertEqual(shape._get_width(), 30)
def test_get_height_90(self):
shape = RotatedShape(Rectangle(20, 30), 90)
self.assertEqual(shape._get_height(), 20)
def test_get_height_180(self):
shape = RotatedShape(Rectangle(20, 30), 180)
self.assertEqual(shape._get_height(), 30)
def test_get_height_270(self):
shape = RotatedShape(Rectangle(20, 30), 270)
self.assertEqual(shape._get_height(), 20)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,452 | gtmeier/pyscript | refs/heads/master | /pyscript/__init__.py | from .point import Point
from .shape import Shape
from .circle import Circle
from .rectangle import Rectangle
from .spacer import Spacer
from .polygon import Polygon
from .square import Square
from .triangle import Triangle
from .scaled import ScaledShape
from .rotated import RotatedShape
from .layered import LayeredShapes
from .vertical import VerticalShapes
from .horizontal import HorizontalShapes
from .fractals import sierpinski_triangle
from .fractals import sierpinski_triangle_pages
from .fractals import write_postscript
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,453 | gtmeier/pyscript | refs/heads/master | /tests/test_horizontal.py | from pyscript import HorizontalShapes, Circle, Rectangle, Point
from shape_test_case import ShapeTestCase
class HorizontalShapesTestCase(ShapeTestCase):
def test_get_width_no_shapes(self):
horizontal_shapes = HorizontalShapes()
self.assertEqual(horizontal_shapes._get_width(), 0)
def test_get_width_single_shape(self):
horizontal_shapes = HorizontalShapes(Circle(3))
self.assertEqual(horizontal_shapes._get_width(), 6)
def test_get_width_multiple_shapes(self):
horizontal_shapes = HorizontalShapes(
Circle(1),
Rectangle(5, 10),
Circle(21),
Rectangle(0, 1),
Rectangle(3, 9)
)
self.assertEqual(horizontal_shapes._get_width(), 2 + 5 + 42 + 0 + 3)
def test_get_height_no_shapes(self):
horizontal_shapes = HorizontalShapes()
self.assertEqual(horizontal_shapes._get_height(), 0)
def test_get_height_single_shape(self):
horizontal_shapes = HorizontalShapes(Circle(3))
self.assertEqual(horizontal_shapes._get_height(), 6)
def test_get_height_multiple_shapes(self):
horizontal_shapes = HorizontalShapes(
Circle(1),
Rectangle(5, 10),
Circle(21),
Rectangle(0, 1),
Rectangle(3, 9)
)
self.assertEqual(horizontal_shapes._get_height(), 42)
def test_export_postscript_circles_half_off_page(self):
self._test_export_postscript(
HorizontalShapes(
Circle(10),
Circle(20),
Circle(30),
Circle(20),
Circle(10)
),
Point(0, 30)
)
def test_export_postscript_circles_on_page(self):
self._test_export_postscript(
HorizontalShapes(
Circle(10),
Circle(20),
Circle(30),
Circle(20),
Circle(10)
),
Point(90, 30)
)
def test_export_postscript_circles_and_rectangles(self):
self._test_export_postscript(
HorizontalShapes(
Circle(10),
Rectangle(20, 20),
Circle(20),
Rectangle(40, 40),
Circle(30),
Rectangle(120, 60),
Circle(20),
Rectangle(80, 40),
Circle(10),
Rectangle(40, 20),
Rectangle(20, 40)
),
Point(300, 200)
)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,454 | gtmeier/pyscript | refs/heads/master | /tests/test_square.py | import unittest
from pyscript import Square
# TODO: tests for export_postscript
class SquareTestCase(unittest.TestCase):
def test_side_length_0(self):
square = Square(0)
self.assertEqual(square._side_length, 0)
def test_side_length_1(self):
square = Square(1)
self.assertEqual(square._side_length, 1)
def test_side_length_54(self):
square = Square(54)
self.assertEqual(square._side_length, 54)
def test_num_sides_4(self):
square = Square(1)
self.assertEqual(square._num_sides, 4)
def test_width_1(self):
square = Square(1)
self.assertEqual(square._get_width(), 1)
def test_width_23(self):
square = Square(23)
self.assertEqual(square._get_width(), 23)
def test_width_59(self):
square = Square(59)
self.assertEqual(square._get_width(), 59)
def test_height_31(self):
square = Square(31)
self.assertEqual(square._get_height(), 31)
def test_height_79(self):
square = Square(79)
self.assertEqual(square._get_height(), 79)
def test_height_131(self):
square = Square(131)
self.assertEqual(square._get_height(), 131)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,455 | gtmeier/pyscript | refs/heads/master | /weird-snowperson.py | #!/usr/bin/env python3
# Laura originally created this shape for use in an automated test, but it also
# serves as a nice demonstration of the pyscript module.
from pyscript import (
Point, Rectangle, Spacer, Square, Circle, HorizontalShapes, VerticalShapes,
LayeredShapes, ScaledShape, RotatedShape, Triangle, Polygon
)
if __name__ == "__main__":
base_circle = Circle(80)
rectangle = Rectangle(100, 60)
spacer = Spacer(40, 40)
square = Square(80)
vertical_shapes = VerticalShapes(
base_circle,
LayeredShapes(
ScaledShape(base_circle, 0.75, 0.75), Polygon(5, 20)
),
LayeredShapes(
ScaledShape(base_circle, 0.5, 0.5), RotatedShape(Triangle(20), 180)
)
)
shape = HorizontalShapes(
rectangle,
spacer,
square,
vertical_shapes,
square,
spacer,
rectangle
)
shape.export_postscript(
center=Point(305, 300), filename="weird-snowperson.ps"
)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,456 | gtmeier/pyscript | refs/heads/master | /tests/test_spacer.py | import unittest
from pyscript import Spacer
# TODO: tests for export_postscript
class SpacerTestCase(unittest.TestCase):
def test_get_width_0(self):
spacer = Spacer(0, 5)
self.assertEqual(spacer._get_width(), 0)
def test_get_width_1(self):
spacer = Spacer(1, 5)
self.assertEqual(spacer._get_width(), 1)
def test_get_width_37(self):
spacer = Spacer(37, 5)
self.assertEqual(spacer._get_width(), 37)
def test_get_height_0(self):
spacer = Spacer(5, 0)
self.assertEqual(spacer._get_height(), 0)
def test_get_height_1(self):
spacer = Spacer(5, 1)
self.assertEqual(spacer._get_height(), 1)
def test_get_height_37(self):
spacer = Spacer(5, 37)
self.assertEqual(spacer._get_height(), 37)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,457 | gtmeier/pyscript | refs/heads/master | /pyscript/spacer.py | from . import Shape
# TODO: maybe subclass from Rectangle
class Spacer(Shape):
def __init__(self, width, height):
self._width = width
self._height = height
def _get_postscript(self, center):
return self._join_lines(
f"% spacer centered at ({center.x}, {center.y})"
)
def _get_width(self):
return self._width
def _get_height(self):
return self._height
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,458 | gtmeier/pyscript | refs/heads/master | /pyscript/triangle.py | from . import Polygon
class Triangle(Polygon):
def __init__(self, sideLength):
super().__init__(3, sideLength)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,459 | gtmeier/pyscript | refs/heads/master | /pyscript/fractals.py | from . import Triangle, RotatedShape, Point
# TODO: replace the functions defined below with a SierpinskiTriangle class
# derived from Shape
# http://jwilson.coe.uga.edu/emat6680/parsons/mvp6690/essay1/sierpinski.html
def write_postscript(postscript_code, filename):
with open(filename, "w+") as output_file:
output_file.write(postscript_code)
def sierpinski_triangle_pages(side_len, center, max_recursion_depth):
return "\nshowpage\n\n".join(
sierpinski_triangle(side_len, center, recursion_depth)
for recursion_depth in range(max_recursion_depth + 1)
)
# TODO: try to reduce size of postscript code (becomes unreasonably large at
# higher recursion depths)
def sierpinski_triangle(side_len, center, recursion_depth):
outer_triangle = Triangle(side_len)
outer_base_y = center.y - outer_triangle._get_height() / 2
inner_triangle_side_len = side_len / 2
inner_triangle_center_y = (
outer_base_y + Triangle(inner_triangle_side_len)._get_height() / 2
)
inner_triangle_center = Point(center.x, inner_triangle_center_y)
inner_triangles = _inverted_triangle_pattern(
inner_triangle_side_len, inner_triangle_center, recursion_depth
)
return _export_multiple_shapes(*inner_triangles)
def _inverted_triangle_pattern(side_len, center, recursion_depth):
assert recursion_depth >= 0
triangle = RotatedShape(Triangle(side_len), 180)
if recursion_depth == 0:
return ((triangle, center), )
small_triangle_side_len = side_len / 2
small_triangle_height = Triangle(small_triangle_side_len)._get_height()
def pattern(center):
return _inverted_triangle_pattern(
small_triangle_side_len, center, recursion_depth - 1
)
upper_pattern_center = Point(
center.x, center.y + 1.5 * small_triangle_height
)
left_pattern_center = Point(
center.x - side_len / 2, center.y - small_triangle_height / 2
)
right_pattern_center = Point(
center.x + side_len / 2, left_pattern_center.y
)
upper_pattern = pattern(upper_pattern_center)
left_pattern = pattern(left_pattern_center)
right_pattern = pattern(right_pattern_center)
return ((triangle, center), *upper_pattern, *left_pattern, *right_pattern)
def _export_multiple_shapes(*shape_center_pairs):
return "\n".join(
shape._get_postscript(center) for shape, center in shape_center_pairs
)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,460 | gtmeier/pyscript | refs/heads/master | /tests/test_all_shapes.py | from pyscript import (
Point, Rectangle, Spacer, Square, Circle, HorizontalShapes, VerticalShapes,
LayeredShapes, ScaledShape, RotatedShape, Triangle, Polygon
)
from shape_test_case import ShapeTestCase
class AllShapesTestCase(ShapeTestCase):
_base_circle = Circle(80)
_rectangle = Rectangle(100, 60)
_spacer = Spacer(40, 40)
_square = Square(80)
_vertical_shapes = VerticalShapes(
_base_circle,
LayeredShapes(
ScaledShape(_base_circle, 0.75, 0.75), Polygon(5, 20)
),
LayeredShapes(
ScaledShape(_base_circle, 0.5, 0.5),
RotatedShape(Triangle(20), 180)
)
)
_shape = HorizontalShapes(
_rectangle,
_spacer,
_square,
_vertical_shapes,
_square,
_spacer,
_rectangle
)
def test_get_width(self):
self.assertEqual(self._shape._get_width(), 600)
def test_get_height(self):
self.assertEqual(self._shape._get_height(), 360)
def test_export_postscript_all_shapes(self):
self._test_export_postscript(self._shape, Point(305, 300))
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,461 | gtmeier/pyscript | refs/heads/master | /pyscript/rotated.py | from . import Shape, Point
class RotatedShape(Shape):
def __init__(self, shape, rotation_angle):
if rotation_angle not in (90, 180, 270):
raise ValueError()
self._shape = shape
self._rotation_angle = rotation_angle
if self._rotation_angle in (90, 270):
self._width = self._shape._get_height()
self._height = self._shape._get_width()
else:
assert self._rotation_angle == 180
self._width = self._shape._get_width()
self._height = self._shape._get_height()
def _get_postscript(self, center):
shape_postscript = self._shape._get_postscript(Point(0, 0))
return self._join_lines(
"gsave",
f"{center.x} {center.y} translate ",
f"{self._rotation_angle} rotate\n",
f"{shape_postscript}",
"grestore"
)
def _get_width(self):
return self._width
def _get_height(self):
return self._height
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,462 | gtmeier/pyscript | refs/heads/master | /tests/test_layered.py | from pyscript import LayeredShapes, Circle, Rectangle
from shape_test_case import ShapeTestCase
# TODO: tests for export_postscript
class LayeredShapesTestCase(ShapeTestCase):
def test_get_width_no_shapes(self):
shape = LayeredShapes()
self.assertEqual(shape._get_width(), 0)
def test_get_width_single_shape(self):
shape = LayeredShapes(Circle(3))
self.assertEqual(shape._get_width(), 6)
def test_get_width_multiple_shapes(self):
shape = LayeredShapes(
Circle(1),
Rectangle(5, 10),
Circle(21),
Rectangle(0, 1),
Rectangle(3, 9)
)
self.assertEqual(shape._get_width(), 42)
def test_get_height_no_shapes(self):
shape = LayeredShapes()
self.assertEqual(shape._get_height(), 0)
def test_get_height_single_shape(self):
shape = LayeredShapes(Rectangle(1, 5))
self.assertEqual(shape._get_height(), 5)
def test_get_height_multiple_shapes(self):
shape = LayeredShapes(
Circle(1),
Rectangle(5, 10),
Rectangle(0, 1),
Rectangle(3, 9)
)
self.assertEqual(shape._get_height(), 10)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,463 | gtmeier/pyscript | refs/heads/master | /pyscript/point.py | from collections import namedtuple
Point = namedtuple("Point", ("x", "y"))
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,464 | gtmeier/pyscript | refs/heads/master | /tests/test_circle.py | import unittest
from pyscript import Circle, Point
class CircleTestCase(unittest.TestCase):
def test_get_width_0(self):
circle = Circle(0)
self.assertEqual(circle._get_width(), 0)
def test_get_width_1(self):
circle = Circle(1)
self.assertEqual(circle._get_width(), 2)
def test_get_width_37(self):
circle = Circle(37)
self.assertEqual(circle._get_width(), 74)
def test_get_height_0(self):
circle = Circle(0)
self.assertEqual(circle._get_height(), 0)
def test_get_height_1(self):
circle = Circle(1)
self.assertEqual(circle._get_height(), 2)
def test_get_height_37(self):
circle = Circle(37)
self.assertEqual(circle._get_height(), 74)
# TODO: store known-good code in a file
def test_get_postscript_80_80_80(self):
code = Circle(80)._get_postscript(Point(80, 80))
self.assertEqual(
code,
"newpath\n"
"80 80 80 0 360 arc\n"
"stroke\n"
)
# TODO: store known-good code in a file
def test_get_postscript_20_160_40(self):
code = Circle(40)._get_postscript(Point(20, 160))
self.assertEqual(
code,
"newpath\n"
"20 160 40 0 360 arc\n"
"stroke\n"
)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,465 | gtmeier/pyscript | refs/heads/master | /pyscript/polygon.py | from math import sin, cos, pi
from . import Shape
# TODO: tests
class Polygon(Shape):
def __init__(self, num_sides, side_length):
self._num_sides = num_sides
self._side_length = side_length
self._set_width_height()
def _set_width_height(self):
if self._num_sides % 2 != 0:
self._set_width_height_odd()
elif self._num_sides % 4 == 0:
self._set_width_height_divisible_by_4()
else:
assert self._num_sides % 2 == 0
self._set_width_height_even()
def _set_width_height_odd(self):
n = self._num_sides
self._width = (
self._side_length * sin(pi * (n - 1) / (2 * n)) / sin(pi / n)
)
self._height = (
self._side_length * (1 + cos(pi / n)) / (2 * sin(pi / n))
)
def _set_width_height_divisible_by_4(self):
n = self._num_sides
self._width = self._height = (
self._side_length * cos(pi / n) / sin(pi / n)
)
def _set_width_height_even(self):
n = self._num_sides
self._width = self._side_length / sin(pi / n)
self._height = self._side_length * cos(pi / n) / sin(pi / n)
def _get_postscript(self, center):
sum_interior_angles = (self._num_sides - 2) * 180
interior_angle = sum_interior_angles / self._num_sides
# Center bounding box.
translate_x = - self._side_length / 2
translate_y = - self._get_height() / 2
return self._join_lines(
"gsave",
f"{translate_x} {translate_y} translate",
"newpath",
f"{center.x} {center.y} moveto",
f"1 1 {self._num_sides - 1} " + "{",
f" {self._side_length} 0 rlineto",
f" {180 - interior_angle} rotate",
"} for",
"closepath",
"stroke",
"grestore"
)
def _get_width(self):
return self._width
def _get_height(self):
return self._height
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,466 | gtmeier/pyscript | refs/heads/master | /pyscript/scaled.py | from . import Shape, Point
class ScaledShape(Shape):
def __init__(self, shape, scale_factor_x, scale_factor_y):
self._shape = shape
self._scale_factor_x = scale_factor_x
self._scale_factor_y = scale_factor_y
def _get_postscript(self, center):
shape_postscript = self._shape._get_postscript(Point(0, 0))
return self._join_lines(
"gsave",
f"{center.x} {center.y} translate ",
f"{self._scale_factor_x} {self._scale_factor_y} scale\n",
f"{shape_postscript}",
"grestore"
)
def _get_width(self):
return self._shape._get_width() * self._scale_factor_x
def _get_height(self):
return self._shape._get_height() * self._scale_factor_y
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,467 | gtmeier/pyscript | refs/heads/master | /pyscript/layered.py | from . import Shape
class LayeredShapes(Shape):
def __init__(self, *shapes):
self._shapes = shapes
def _get_postscript(self, center):
return "\n".join(
shape._get_postscript(center) for shape in self._shapes
)
def _get_width(self):
return max((shape._get_width() for shape in self._shapes), default=0)
def _get_height(self):
return max((shape._get_height() for shape in self._shapes), default=0)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,468 | gtmeier/pyscript | refs/heads/master | /tests/test_rectangle.py | import unittest
from pyscript import Rectangle, Point
class RectangleTestCase(unittest.TestCase):
def test_get_width_0(self):
rectangle = Rectangle(0, 5)
self.assertEqual(rectangle._get_width(), 0)
def test_get_width_1(self):
rectangle = Rectangle(1, 5)
self.assertEqual(rectangle._get_width(), 1)
def test_get_width_37(self):
rectangle = Rectangle(37, 5)
self.assertEqual(rectangle._get_width(), 37)
def test_get_height_0(self):
rectangle = Rectangle(5, 0)
self.assertEqual(rectangle._get_height(), 0)
def test_get_height_1(self):
rectangle = Rectangle(5, 1)
self.assertEqual(rectangle._get_height(), 1)
def test_get_height_37(self):
rectangle = Rectangle(5, 37)
self.assertEqual(rectangle._get_height(), 37)
# TODO: store known-good code in a file
def test_get_postscript(self):
code = Rectangle(40, 80)._get_postscript(Point(100, 100))
self.assertEqual(
code,
"newpath\n"
"80.0 60.0 moveto\n"
"40 0 rlineto\n"
"0 80 rlineto\n"
"-40 0 rlineto\n"
"closepath\n"
"stroke\n"
)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,469 | gtmeier/pyscript | refs/heads/master | /tests/shape_test_case.py | import inspect
import os
import unittest
class ShapeTestCase(unittest.TestCase):
_postscript_code_path = os.path.join('tests', 'postscript-code')
_test_file_path = os.path.join(_postscript_code_path, 'test.ps')
def tearDown(self):
if os.path.exists(self._test_file_path):
assert os.path.isfile(self._test_file_path)
os.remove(self._test_file_path)
def _test_export_postscript(self, shape, center):
# Use plain assert because this is a precondition, not a test
# assertion.
assert not os.path.exists(self._test_file_path)
shape.export_postscript(center=center, filename=self._test_file_path)
self.assertTrue(os.path.isfile(self._test_file_path))
actual_code = self._get_actual_export_code()
expected_code = self._get_expected_export_code()
self.assertEqual(actual_code, expected_code)
def _get_actual_export_code(self):
with open(self._test_file_path, 'r') as test_file:
return test_file.read()
def _get_expected_export_code(self):
export_file_path = os.path.join(
self._postscript_code_path,
self._get_test_case_name(), # TODO: set in constructor
self._get_current_test_name() + '.ps'
)
with open(export_file_path, 'r') as export_file:
return export_file.read()
def _get_test_case_name(self):
test_case_suffix = 'TestCase'
test_case_full_name = type(self).__name__
assert test_case_full_name.endswith(test_case_suffix)
test_case_name_len = len(test_case_full_name) - len(test_case_suffix)
return test_case_full_name[:test_case_name_len]
def _get_current_test_name(self):
test_prefix = 'test_export_postscript_'
current_test_full_name = (
inspect.currentframe().f_back.f_back.f_back.f_code.co_name
)
assert current_test_full_name.startswith(test_prefix)
return current_test_full_name[len(test_prefix):]
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,470 | gtmeier/pyscript | refs/heads/master | /pyscript/shape.py | from abc import ABC, abstractmethod
from . import Point
class Shape(ABC):
def export_postscript(
self, center=Point(0, 0), show_center=False, filename="shape.ps"):
postscript_code = self._get_toplevel_postscript(center, show_center)
with open(filename, "w+") as output_file:
output_file.write(postscript_code)
def _get_toplevel_postscript(self, center, show_center):
postscript_code = self._get_postscript(center) + "\n"
if show_center:
postscript_code += self._show_center(center) + "\n"
return postscript_code + "showpage\n"
@abstractmethod
def _get_postscript(self, center):
pass
@abstractmethod
def _get_width(self):
pass
@abstractmethod
def _get_height(self):
pass
@staticmethod
def _show_center(center):
return "\n".join((
"% Show center for debugging purposes.",
"newpath",
f"{center.x} {center.y} 2 0 360 arc",
"fill"
)) + "\n"
@staticmethod
def _join_lines(*lines):
return "\n".join(lines) + "\n"
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,471 | gtmeier/pyscript | refs/heads/master | /tests/test_vertical.py | from pyscript import VerticalShapes, Circle, Rectangle
from shape_test_case import ShapeTestCase
# TODO: tests for export_postscript
class VerticalShapesTestCase(ShapeTestCase):
def test_get_width_no_shapes(self):
horizontal_shapes = VerticalShapes()
self.assertEqual(horizontal_shapes._get_width(), 0)
def test_get_width_single_shape(self):
horizontal_shapes = VerticalShapes(Circle(3))
self.assertEqual(horizontal_shapes._get_width(), 6)
def test_get_width_multiple_shapes(self):
horizontal_shapes = VerticalShapes(
Circle(1),
Rectangle(5, 10),
Circle(21),
Rectangle(0, 1),
Rectangle(3, 9)
)
self.assertEqual(horizontal_shapes._get_width(), 42)
def test_get_height_no_shapes(self):
horizontal_shapes = VerticalShapes()
self.assertEqual(horizontal_shapes._get_height(), 0)
def test_get_height_single_shape(self):
horizontal_shapes = VerticalShapes(Circle(3))
self.assertEqual(horizontal_shapes._get_height(), 6)
def test_get_height_multiple_shapes(self):
horizontal_shapes = VerticalShapes(
Circle(1),
Rectangle(5, 10),
Circle(21),
Rectangle(0, 1),
Rectangle(3, 9)
)
self.assertEqual(horizontal_shapes._get_height(), 2 + 10 + 42 + 1 + 9)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,472 | gtmeier/pyscript | refs/heads/master | /pyscript/vertical.py | from . import Shape, Point
class VerticalShapes(Shape):
def __init__(self, *shapes):
self._shapes = shapes
def _get_postscript(self, center):
shape_exports = []
current_y = center.y - self._get_height() / 2
for shape in self._shapes:
half_shape_height = shape._get_height() / 2
current_y += half_shape_height
shape_exports.append(
shape._get_postscript(Point(center.x, current_y))
)
current_y += half_shape_height
return "\n".join(shape_exports)
def _get_width(self):
return max((shape._get_width() for shape in self._shapes), default=0)
def _get_height(self):
return sum(shape._get_height() for shape in self._shapes)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,473 | gtmeier/pyscript | refs/heads/master | /pyscript/rectangle.py | from . import Shape
class Rectangle(Shape):
def __init__(self, width, height):
self._width = width
self._height = height
def _get_postscript(self, center):
return self._join_lines(
"newpath",
f"{center.x - self._get_width() / 2} "
f"{center.y - self._get_height() / 2} moveto",
f"{self._get_width()} 0 rlineto",
f"0 {self._get_height()} rlineto",
f"{-self._get_width()} 0 rlineto",
"closepath",
"stroke"
)
def _get_width(self):
return self._width
def _get_height(self):
return self._height
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,474 | gtmeier/pyscript | refs/heads/master | /pyscript/square.py | from . import Polygon
class Square(Polygon):
def __init__(self, sideLength):
super().__init__(4, sideLength)
def _get_width(self):
assert round(super()._get_width()) == self._side_length
return self._side_length
def _get_height(self):
assert round(super()._get_height()) == self._side_length
return self._side_length
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,475 | gtmeier/pyscript | refs/heads/master | /setup.py | import setuptools
setuptools.setup(
name='pyscript',
packages=['pyscript'],
python_requires='>=3.6.8'
)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,476 | gtmeier/pyscript | refs/heads/master | /pyscript/horizontal.py | from . import Shape, Point
class HorizontalShapes(Shape):
def __init__(self, *shapes):
self._shapes = shapes
def _get_postscript(self, center):
shape_exports = []
current_x = center.x - self._get_width() / 2
for shape in self._shapes:
half_shape_width = shape._get_width() / 2
current_x += half_shape_width
shape_exports.append(
shape._get_postscript(Point(current_x, center.y))
)
current_x += half_shape_width
return "\n".join(shape_exports)
def _get_width(self):
return sum(shape._get_width() for shape in self._shapes)
def _get_height(self):
return max((shape._get_height() for shape in self._shapes), default=0)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,477 | gtmeier/pyscript | refs/heads/master | /sierpinski.py | #!/usr/bin/env python3
from pyscript import sierpinski_triangle_pages, write_postscript, Point
if __name__ == "__main__":
write_postscript(
sierpinski_triangle_pages(400, Point(250, 350), 8), "sierpinski.ps"
)
| {"/pyscript/circle.py": ["/pyscript/__init__.py"], "/tests/test_triangle.py": ["/pyscript/__init__.py"], "/tests/test_scaled.py": ["/pyscript/__init__.py"], "/tests/test_rotated.py": ["/pyscript/__init__.py"], "/pyscript/__init__.py": ["/pyscript/point.py", "/pyscript/shape.py", "/pyscript/circle.py", "/pyscript/rectangle.py", "/pyscript/spacer.py", "/pyscript/polygon.py", "/pyscript/square.py", "/pyscript/triangle.py", "/pyscript/scaled.py", "/pyscript/rotated.py", "/pyscript/layered.py", "/pyscript/vertical.py", "/pyscript/horizontal.py", "/pyscript/fractals.py"], "/tests/test_horizontal.py": ["/pyscript/__init__.py"], "/tests/test_square.py": ["/pyscript/__init__.py"], "/weird-snowperson.py": ["/pyscript/__init__.py"], "/tests/test_spacer.py": ["/pyscript/__init__.py"], "/pyscript/spacer.py": ["/pyscript/__init__.py"], "/pyscript/triangle.py": ["/pyscript/__init__.py"], "/pyscript/fractals.py": ["/pyscript/__init__.py"], "/tests/test_all_shapes.py": ["/pyscript/__init__.py"], "/pyscript/rotated.py": ["/pyscript/__init__.py"], "/tests/test_layered.py": ["/pyscript/__init__.py"], "/tests/test_circle.py": ["/pyscript/__init__.py"], "/pyscript/polygon.py": ["/pyscript/__init__.py"], "/pyscript/scaled.py": ["/pyscript/__init__.py"], "/pyscript/layered.py": ["/pyscript/__init__.py"], "/tests/test_rectangle.py": ["/pyscript/__init__.py"], "/pyscript/shape.py": ["/pyscript/__init__.py"], "/tests/test_vertical.py": ["/pyscript/__init__.py"], "/pyscript/vertical.py": ["/pyscript/__init__.py"], "/pyscript/rectangle.py": ["/pyscript/__init__.py"], "/pyscript/square.py": ["/pyscript/__init__.py"], "/pyscript/horizontal.py": ["/pyscript/__init__.py"], "/sierpinski.py": ["/pyscript/__init__.py"]} |
65,478 | GLIMS-RGI/rgitools | refs/heads/master | /notebooks/dem_statistics/create_dem_example_images.py | import papermill as pm
import os
import os.path as path
# these names and RGIids are taken from the "Examples" on this page: https://rgitools.readthedocs.io/en/latest/dems.html
name_mapping = {
'RGI60-11.00897': 'hef',
'RGI60-11.01827': 'oberaletsch',
'RGI60-01.10689': 'columbia',
'RGI60-06.00477': 'iceland',
'RGI60-05.10137': 'greenland',
'RGI60-03.02489': 'devon',
'RGI60-16.02207': 'shallap',
'RGI60-19.02274': 'nordenskjoeld',
'RGI60-19.00124': 'alexander',
'RGI60-19.01251': 'gillock',
'RGI60-03.00251': 'dobbin',
'RGI60-15.02578': 'thana',
'RGI60-07.01114': 'tellbreen',
'RGI60-08.01126': 'nigards',
'RGI60-18.00854': 'olivine',
'RGI60-09.00552': 'lenin',
'RGI60-19.00783': 'balleny_islands',
'RGI60-19.00792': 'queen_maud_land',
'RGI60-19.01405': 'pine_island_bay'
}
# create the comparison plots for each example glacier in the dictionary above
# and save them in their corresponding directory
for (rgiid, name) in name_mapping.items():
output_path = path.abspath(path.join(os.getcwd(), '../../docs/_static/dems_examples/', name))
pm.execute_notebook(
'dem_comparison_for_rgitopo_docs.ipynb',
'/dev/null',
parameters=dict(rgi_id=rgiid, plot_dir=output_path)
) | {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,479 | GLIMS-RGI/rgitools | refs/heads/master | /rgitools/cli/compute_hypsometries.py | import os
import sys
from glob import glob
import argparse
import multiprocessing as mp
from rgitools import funcs
def _set_oggm_params(cfg):
cfg.PARAMS['use_multiprocessing'] = False
def run(input_dir=None, output_dir=None, *, replace_str=None,
oggm_working_dir='', set_oggm_params=None,
n_processes=None):
"""Computes the hypsometries for an entire RGI directory.
Parameters
----------
input_dir : str
path to the RGI directory
output_dir : str
path to the output directory
replace_str : callable
a function to call on the file's basename. A good example is:
``replace_str=lambda x : x.replace('rgi60', 'rgi61')``
oggm_working_dir : str
str, optional
path to the folder where oggm will write its GlacierDirectories.
Default is to use a temporary folder (not recommended)
set_oggm_params : callable
a function which sets the OGGM params on cfg. The default is to
turn multiprocessing off.
n_processes : int, optional
the number of processors to use
"""
# Input check
if set_oggm_params is None:
set_oggm_params = _set_oggm_params
# Get RGI files
fp = '*_rgi*_*.shp'
rgi_shps = list(glob(os.path.join(input_dir, "*", fp)))
rgi_shps = sorted([r for r in rgi_shps if 'Regions' not in r])
funcs.mkdir(output_dir)
out_paths = []
log_names = []
for rgi_shp in rgi_shps:
odir = os.path.basename(os.path.dirname(rgi_shp))
if replace_str:
odir = replace_str(odir)
odir = os.path.join(output_dir, odir)
funcs.mkdir(odir)
bn = os.path.basename(rgi_shp)
if replace_str:
bn = replace_str(bn)
bn = bn.replace('.shp', '')
of = os.path.join(odir, bn)
out_paths.append(of)
log_names.append(bn)
with mp.Pool(n_processes) as p:
p.starmap(funcs.mappable_func,
zip([funcs.hypsometries] * len(rgi_shps),
rgi_shps, out_paths, log_names,
[set_oggm_params] * len(rgi_shps),
[oggm_working_dir] * len(rgi_shps),
),
chunksize=1)
def parse_args(args):
"""Check input arguments"""
# CLI args
description = 'Computes the hypsometries for an entire RGI directory.'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--input-dir', type=str,
help='the rgi directory to process.')
parser.add_argument('--output-dir', type=str,
help='the directory where to write the processed '
'files.')
parser.add_argument('--oggm-working-dir', type=str,
help='the directory where to write the processed '
'files.')
parser.add_argument('--replace-str', nargs='*', type=str,
help='a string to change on the file basename. '
'A good example is: --replace-str rgi60 rgi61')
parser.add_argument('--n-processes', type=int,
help='Number of processors to use.')
args = parser.parse_args(args)
if not args.input_dir:
raise ValueError('--input-dir is required!')
if not args.output_dir:
raise ValueError('--output-dir is required!')
if args.replace_str:
if len(args.replace_str) != 2:
raise ValueError('--replace-str needs two values!')
s1, s2 = args.replace_str
def replace_str(x):
return x.replace(s1, s2)
else:
replace_str = None
# All good
return dict(input_dir=args.input_dir, output_dir=args.output_dir,
replace_str=replace_str, n_processes=args.n_processes,
oggm_working_dir=args.oggm_working_dir)
def main():
"""Script entry point"""
run(**parse_args(sys.argv[1:]))
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,480 | GLIMS-RGI/rgitools | refs/heads/master | /rgitools/cli/zip_rgi_dir.py | import os
import sys
import shutil
import tempfile
import argparse
from rgitools import funcs
def run(input_dir, output_file):
"""Zips an RGI directory and makes it look like a real one.
Parameters
----------
input_dir : str
path to the RGI directory
output_file : str
path to the output file (without zip ending!)
"""
# First zip the directories and copy the files
bname = os.path.basename(input_dir)
tmpdir = tempfile.mkdtemp()
workdir = os.path.join(tmpdir, bname)
funcs.mkdir(workdir, reset=True)
for fname in os.listdir(input_dir):
abs_p = os.path.join(input_dir, fname)
out_f = os.path.join(workdir, fname)
if os.path.isfile(abs_p):
shutil.copy(abs_p, out_f)
else:
shutil.make_archive(out_f, 'zip', abs_p)
# Compress the working directory
shutil.make_archive(output_file, 'zip', workdir)
# Delete our working dir
shutil.rmtree(tmpdir)
def parse_args(args):
"""Check input arguments"""
# CLI args
description = 'Computes the intersects for an entire RGI directory.'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--input-dir', type=str,
help='the rgi directory to process.')
parser.add_argument('--output-file', type=str,
help='path to the output file (without zip ending!)')
args = parser.parse_args(args)
if not args.input_dir:
raise ValueError('--input-dir is required!')
if not args.output_file:
raise ValueError('--output-file is required!')
# All good
return dict(input_dir=args.input_dir, output_file=args.output_file)
def main():
"""Script entry point"""
run(**parse_args(sys.argv[1:]))
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,481 | GLIMS-RGI/rgitools | refs/heads/master | /notebooks/dem_statistics/dem_post_quality_per_region.py | # This script has originally been created by Matthias Dusch(https://github.com/matthiasdusch) and got modified
# for the creation of dems_v2 statistics
import os
import pandas as pd
import geopandas as gpd
import numpy as np
import matplotlib.pyplot as plt
from oggm import utils, cfg
from my_dem_funcs import dem_barplot
import statistics_paths
wd = statistics_paths.wd
post = statistics_paths.post
sfx = statistics_paths.sfx
os.makedirs(os.path.join(post, 'out/images'), exist_ok=True)
os.makedirs(os.path.join(post, 'out/tables'), exist_ok=True)
cfg.initialize()
cfg.PATHS['working_dir'] = wd
# dataframe for all areas
dfall = pd.DataFrame()
# dataframe for statistic
cols = utils.DEM_SOURCES.copy()
cols.sort()
cols = ['RGI region', '# total'] + cols
dfstat = pd.DataFrame([], columns=cols)
# statistic on subregions
dfsub = dfstat.copy()
# rgi region & subregion file - depending on the RGI version (6.0, 6.x ..) chosen for the RGI Topo Dataset creation
# this folder- and filenames have to be adapted
regions = gpd.read_file(os.path.join(cfg.PATHS['rgi_dir'], 'RGIV62',
'00_rgi62_regions',
'00_rgi62_O1Regions.shp'))
subregs = gpd.read_file(os.path.join(cfg.PATHS['rgi_dir'], 'RGIV62',
'00_rgi62_regions',
'00_rgi62_O2Regions.shp'))
fig0, ax0 = plt.subplots(1, 1, figsize=[10, 10])
for reg in np.arange(1, 20):
fig, ax = plt.subplots(1, 1, figsize=[10, 10])
regstr = '{:02.0f}'.format(reg)
quality = pd.read_hdf(os.path.join(post, 'rgi_{}.h5'.format(regstr + sfx)),
'quality')
regname = regions.loc[regions['RGI_CODE'].astype('int') == reg, 'FULL_NAME'].iloc[0]
dem_barplot(quality, ax,
title='RGI region {}: {} ({:.0f} glaciers)'.
format(regstr, regname, len(quality)))
fig.tight_layout()
fig.savefig(os.path.join(post, 'out/images/',
'barplot_rgi{}.png'.format(regstr + sfx)))
# dfall = dfall.append(quality)
dfall = pd.concat([dfall, quality])
# FULL REGION
total = len(quality)
good = (quality > 0.9).sum()
# out = good / total
out = (good / total * 100).dropna().astype(int)
outstr = out.astype(str)
outstr.loc[out != 0] += '%'
outstr.loc[out == 0] = '--'
outstr['# total'] = total
dfstat.loc[':ref:`{0}: {1}<rgi{0}>`'.format(regstr, regname)] = outstr
# take care of subregions
regdf = gpd.read_file(utils.get_rgi_region_file(regstr))
sregs = np.unique(regdf.O2Region)
# For greenland we omit connectivity level 2. As this has also been done when generating the data with the
# prepo_levels cli, it also has to be done here.
if regstr == '05':
regdf = regdf.loc[regdf['Connect'] != 2]
for sreg in sregs:
ids = regdf.loc[regdf.O2Region == sreg, 'RGIId'].values
subq = quality.loc[ids]
# SUBREGIONS
total = len(subq)
good = (subq > 0.9).sum()
out = (good / total * 100).dropna().astype(int)
outstr = out.astype(str)
outstr.loc[out != 0] += '%'
outstr.loc[out == 0] = '--'
outstr['# total'] = total
subregstr = '-{:02.0f}'.format(int(sreg))
subregname = subregs.loc[subregs.RGI_CODE == regstr + subregstr].\
FULL_NAME.iloc[0]
dfsub.loc['{}: {}'.format(regstr + subregstr, subregname)] = outstr
# FULL RGI
total = len(dfall)
good = (dfall > 0.9).sum()
out = (good / total * 100).dropna().astype(int)
outstr = out.astype(str)
outstr.loc[out != 0] += '%'
outstr.loc[out == 0] = '--'
outstr['# total'] = total
dfstat.loc['All RGI regions'] = outstr
dfsub.sort_index(inplace=True)
# integer for number of glaciers
dfstat['# total'] = dfstat['# total'].astype(int)
dfstat['RGI region'] = dfstat.index
dfsub['# total'] = dfsub['# total'].astype(int)
dfsub['RGI region'] = dfsub.index
# write csv files for RST readthedocs
dfstat.to_csv(os.path.join(post, 'out/tables/', 'dem_allrgi{}.csv'.format(sfx)),
index=False)
# write subregion tables:
for reg in np.arange(1, 20):
regstr = '{:02.0f}'.format(reg)
sub = dfsub.loc[dfsub.index.str.contains('{}-'.format(regstr))]
sub.to_csv(os.path.join(post, 'out/tables/', 'dem_rgi{}.csv'.format(regstr + sfx)),
index=False)
# make and save plots
dem_barplot(dfall, ax0,
title='All RGI regions ({:.0f} glaciers)'.format(len(dfall)))
fig0.tight_layout()
fig0.savefig(os.path.join(post, 'out/images/',
'barplot_allregions{}.png'.format(sfx)))
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,482 | GLIMS-RGI/rgitools | refs/heads/master | /notebooks/dem_statistics/post_all_dems.py | # This script has originally been created by Matthias Dusch(https://github.com/matthiasdusch) and got modified
# for the creation of dems_v2 statistics
import os
import pandas as pd
import geopandas as gpd
import numpy as np
import matplotlib.pyplot as plt
from oggm.cli.prepro_levels import run_prepro_levels
from oggm import utils, cfg, GlacierDirectory
from oggm.workflow import execute_entity_task
from my_dem_funcs import (check_all_dems_per_gdir, gdirs_from_tar_files,
get_dem_area)
import statistics_paths
def parse_logfile(path, df=None):
# df passed or new one?
if df is None:
df = pd.DataFrame([], columns=utils.DEM_SOURCES)
for lf in os.listdir(path):
# get rgi id from file name
if '.ERROR' in lf:
rgi = lf.split('.ERROR')[0]
else:
raise RuntimeError
# read logfile
lfdf = pd.read_csv(os.path.join(path, lf), delimiter=';', header=None,
skipinitialspace=True)
# set all DEMs to True
df.loc[rgi, :] = True
# loop over dems and set erroneous ones to False
for _, dem in lfdf.iterrows():
print(dem[3])
if dem[2] == 'InvalidDEMError':
df.loc[rgi, dem[3].split()[1]] = False
if 'HTTPSConnect' in dem[3]:
print(rgi)
return df
def parse_logfiles(path):
df = pd.DataFrame([], columns=utils.DEM_SOURCES)
for root, dirs, files in os.walk(path):
if 'log.txt' in files:
logfile = (os.path.join(root, 'log.txt'))
# read logfile
lfdf = pd.read_csv(logfile, delimiter=';', header=None,
skipinitialspace=True)
# loop over dems and set erroneous ones to False
for _, line in lfdf.iterrows():
if 'DEM SOURCE' in line[1]:
rgi = line[1].split(',')[0]
dem = line[1].split(',')[2]
df.loc[rgi, dem] = True
#elif 'InvalidDEMError' in line[2]:
# rgi = line[2].split()[-1]
# assert rgi[:3] == 'RGI'
# dem = line[2].split()[2]
# assert dem in df.columns
# df.loc[rgi, dem] = 0
return df
def hgt_barplot(df1, df2, title='', savepath=None):
fig, ax = plt.subplots(figsize=[10, 7])
ax.bar(df1.index, df1.values, width=-0.4, align='edge',
label='glaciated area (all DEMs >0.9 quality)', color='C0')
ax.bar(df2.index, df2.values, width=0.4, align='edge', color='C1',
label='full area (all DEMs >0.9 quality')
ax.set_ylabel('elevation [m]')
# ax.set_ylim([0, np.ceil(len(df)/5)*5])
ax.set_title(title)
ax.legend(loc=3)
fig.tight_layout()
if savepath is not None:
fig.savefig(savepath)
wd = statistics_paths.wd
post = statistics_paths.post
sfx = statistics_paths.sfx
prepro_path = statistics_paths.prepro_path
os.makedirs(post, exist_ok=True)
cfg.initialize()
cfg.PATHS['working_dir'] = wd
dfarea = pd.DataFrame([], index=np.arange(1, 20), columns=['demarea'])
for reg in np.arange(1, 20):
regstr = '{:02.0f}'.format(reg)
try:
rgidf = gpd.read_file(utils.get_rgi_region_file(regstr, version='6'))
gdirs = [GlacierDirectory(rgiid) for rgiid in rgidf.RGIId]
print('from gdir')
except:
gdirs = gdirs_from_tar_files(prepro_path, rgi_region=regstr)
print('from tar')
dfreg = execute_entity_task(check_all_dems_per_gdir, gdirs)
dfreg = pd.concat(dfreg)
quality = dfreg.loc[dfreg['metric'] == 'quality',
dfreg.columns != 'metric']
hgt = dfreg.loc[dfreg['metric'] == 'meanhgt',
dfreg.columns != 'metric']
qualityglc = dfreg.loc[dfreg['metric'] == 'quality_glc',
dfreg.columns != 'metric']
hgtglc = dfreg.loc[dfreg['metric'] == 'meanhgt_glc',
dfreg.columns != 'metric']
rgh = dfreg.loc[dfreg['metric'] == 'roughness',
dfreg.columns != 'metric']
rghglc = dfreg.loc[dfreg['metric'] == 'roughness_glc',
dfreg.columns != 'metric']
hgt_good = (hgt[(quality > 0.9)].dropna(axis=1, how='all').
dropna(axis=0, how='any'))
hgtglc_good = (hgtglc[(qualityglc > 0.9)].dropna(axis=1, how='all').
dropna(axis=0, how='any'))
hgt_barplot(hgt_good.mean(), hgtglc_good.mean(),
title=('Mean height of RGI region {} (#{:.0f} full area, ' +
'#{:.0f} glaciated area)').format(regstr,
len(hgt_good),
len(hgtglc_good)),
savepath=os.path.join(post, 'rgi_hgt_%s.png' % (regstr + sfx)))
rgi_area = np.sum([gd.rgi_area_km2 for gd in gdirs])
dem_area = np.sum(execute_entity_task(get_dem_area, gdirs))
dfarea.loc[reg, 'demarea'] = dem_area
quality.to_hdf(os.path.join(post, 'rgi_%s.h5' % (regstr + sfx)),
mode='a', key='quality')
qualityglc.to_hdf(os.path.join(post, 'rgi_%s.h5' % (regstr + sfx)),
mode='a', key='quality_glc')
hgt.to_hdf(os.path.join(post, 'rgi_%s.h5' % (regstr + sfx)),
mode='a', key='mhgt')
hgtglc.to_hdf(os.path.join(post, 'rgi_%s.h5' % (regstr + sfx)),
mode='a', key='mhgt_glc')
rgh.to_hdf(os.path.join(post, 'rgi_%s.h5' % (regstr + sfx)),
mode='a', key='roughness')
rghglc.to_hdf(os.path.join(post, 'rgi_%s.h5' % (regstr + sfx)),
mode='a', key='roughness_glc')
dfarea.to_hdf(os.path.join(post, 'dem_area{}.h5'.format(sfx)), key='demarea')
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,483 | GLIMS-RGI/rgitools | refs/heads/master | /notebooks/dem_statistics/my_dem_funcs.py | # This script has been created by Matthias Dusch(https://github.com/matthiasdusch)
import os
import tarfile
import logging
import numpy as np
import pandas as pd
import rasterio
from oggm import utils, GlacierDirectory, entity_task
# Module logger
log = logging.getLogger(__name__)
def dem_quality(gdir, demfile):
"""Quality check based on oggm.simple_glacier_masks.
Parameters
----------
gdir : :py:class:`oggm.GlacierDirectory`
the glacier in question
demfile : str
path to a specific DEM tif-file
Returns
-------
nanpercent : float
how many grid points are NaN as a fraction of all grid points
nanpercent_glc : float
how many grid points are NaN as a fraction of all glaciated grid points
meanhgt : float
mean elevation of grid points
meanhgt_glc : float
mean elevation of glaciated grid points
roughness : float
standard deviation of 2d slope of all grid points
roughness_glc : float
standard deviation of 2d slope of all glaciated grid points
"""
# open tif-file:
with rasterio.open(demfile, 'r', driver='GTiff') as ds:
dem = ds.read(1).astype(rasterio.float32)
nx = ds.width
ny = ds.height
dx = ds.transform[0]
# assert some basics
assert nx == gdir.grid.nx
assert ny == gdir.grid.ny
assert dx == gdir.grid.dx
# open glacier mask
with rasterio.open(gdir.get_filepath('glacier_mask'),
'r', driver='GTiff') as ds:
mask = ds.read(1).astype(rasterio.int16)
# set nodata values to NaN
min_z = -999.
dem[dem <= min_z] = np.NaN
isfinite = np.isfinite(dem)
isfinite_glc = np.isfinite(dem[np.where(mask)])
# calculate fraction of NaNs in all and glaciated area
nanpercent = np.sum(isfinite) / (nx * ny)
nanpercent_glc = np.sum(isfinite_glc) / mask.sum()
# calculate mean elevation of all and glaciated area
meanhgt = np.nanmean(dem)
meanhgt_glc = np.nanmean(dem[np.where(mask)])
# calculate roughness of all area
sy, sx = np.gradient(dem, dx)
slope = np.arctan(np.sqrt(sy**2 + sx**2))
roughness = np.nanstd(slope)
# calculate roughness of glaciated area
dem_glc = np.where(mask, dem, np.nan)
sy, sx = np.gradient(dem_glc, dx)
slope = np.arctan(np.sqrt(sy**2 + sx**2))
roughness_glc = np.nanstd(slope)
return (nanpercent, nanpercent_glc, meanhgt, meanhgt_glc, roughness,
roughness_glc)
@entity_task(log)
def get_dem_area(gdir):
"""Read the glacier_mask.tif and calculated glacier area based on this
Parameters
----------
gdir : GlacierDirectory
the glacier in question
Returns
-------
float
glacier area in km2
"""
# read dem mask
with rasterio.open(gdir.get_filepath('glacier_mask'),
'r', driver='GTiff') as ds:
profile = ds.profile
data = ds.read(1).astype(profile['dtype'])
# calculate dem_mask size and test against RGI area
mask_area_km2 = data.sum() * gdir.grid.dx**2 * 1e-6
return mask_area_km2
def gdirs_from_tar_files(path, rgi_region=None):
gdirs = []
for regdir in os.listdir(path):
# only do required rgi_region
if (rgi_region is not None) and (regdir[-2:] != rgi_region):
continue
rdpath = os.path.join(path, regdir)
for file in os.listdir(rdpath):
with tarfile.open(os.path.join(rdpath, file), 'r') as tfile:
for member in tfile:
if member.isdir():
continue
tar_base = os.path.join(rdpath, member.path)
gdirs.append(GlacierDirectory(member.name[-21:-7],
from_tar=tar_base))
return gdirs
@entity_task(log)
def check_all_dems_per_gdir(gdir):
"""Will go through all available DEMs and create some metrics
DEMs musst be in GDir subfolders
:param gdir:
:return:
"""
# dataframe for results
df = pd.DataFrame([], index=[gdir.rgi_id]*6, # np.arange(3),
columns=['metric'] + utils.DEM_SOURCES)
df.iloc[0]['metric'] = 'quality'
df.iloc[1]['metric'] = 'quality_glc'
df.iloc[2]['metric'] = 'meanhgt'
df.iloc[3]['metric'] = 'meanhgt_glc'
df.iloc[4]['metric'] = 'roughness'
df.iloc[5]['metric'] = 'roughness_glc'
logfile = (os.path.join(gdir.dir, 'log.txt'))
# read logfile, specify names cause log entries have different size
lfdf = pd.read_csv(logfile, delimiter=';', header=None,
skipinitialspace=True, names=[0, 1, 2, 3])
# loop over dems and save existing ones to test
dem2test = []
for _, line in lfdf.iterrows():
if ('DEM SOURCE' in line[1]) and ('SUCCESS' in line[2]):
rgi = line[1].split(',')[0]
dem = line[1].split(',')[2]
dem2test.append(dem)
# loop over DEMs
for dem in dem2test:
demfile = os.path.join(gdir.dir, dem) + '/dem.tif'
qual, qualglc, hgt, hgt_glc, rgh, rgh_glc = dem_quality(gdir, demfile)
df.loc[df.metric == 'quality', dem] = qual
df.loc[df.metric == 'quality_glc', dem] = qualglc
df.loc[df.metric == 'meanhgt', dem] = hgt
df.loc[df.metric == 'meanhgt_glc', dem] = hgt_glc
df.loc[df.metric == 'roughness', dem] = rgh
df.loc[df.metric == 'roughness_glc', dem] = rgh_glc
return df
def dem_barplot(df, ax, title=''):
# dfexist = (df > 0).sum().sort_index()
dfgood = (df > 0.9).sum().sort_index()
# ax.bar(dfexist.index, dfexist.values, width=-0.4, align='edge',
# label='DEM exists')
# ax.bar(dfgood.index, dfgood.values, width=0.4, align='edge', color='C2',
# label='DEM with >= 90% valid pixels')
ax.bar(dfgood.index, dfgood.values, width=0.8, align='center', color='C0',
label='DEM with >= 90% valid pixels')
ax.set_ylabel('# number of glaciers')
# ax.set_ylim([0, np.ceil(len(df)/50)*50])
ax.set_ylim([0, len(df)])
ax.set_xticklabels(dfgood.index, rotation=75)
ax.set_title(title)
# ax.legend(loc=3)
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,484 | GLIMS-RGI/rgitools | refs/heads/master | /rgitools/funcs.py | import os
import shutil
import logging
from functools import wraps
import time
import tempfile
import numpy as np
import pandas as pd
import geopandas as gpd
import shapely.geometry as shpg
from shapely.ops import linemerge
import networkx as nx
from salem import wgs84
from oggm.utils import haversine, compile_glacier_statistics
from shapely.geometry import mapping
# Interface
from oggm.utils import get_demo_file, mkdir # noqa: F401
# Remove all previous handlers associated with the root logger object
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
# Recipe
# https://stackoverflow.com/questions/7003898/
# using-functools-wraps-with-a-logging-decorator
class CustomFormatter(logging.Formatter):
"""Overrides funcName with value of name_override if it exists"""
def format(self, record):
if hasattr(record, 'name_override'):
record.funcName = record.name_override
return super(CustomFormatter, self).format(record)
handler = logging.StreamHandler()
format = CustomFormatter('%(asctime)s: %(name)s.%(funcName)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
handler.setFormatter(format)
logger = logging.getLogger(__name__)
logger.addHandler(handler)
def mappable_func(*args):
"""Wrapper to unpack kwargs and pass them to args[0]"""
kwargs = dict(to_file=args[2], job_id=args[3])
if len(args) == 6:
# horrible workaround for compute hypsometries
kwargs['set_oggm_params'] = args[4]
kwargs['oggm_working_dir'] = args[5]
return args[0](args[1], **kwargs)
def io_logger(func):
"""Decorator for common IO and logging logic."""
@wraps(func)
def wrapper(*args, **kwargs):
job_id = kwargs.pop('job_id', '')
if job_id:
start_time = time.time()
logger.info('Starting job %s ...' % job_id,
extra={'name_override': func.__name__})
to_file = kwargs.get('to_file', '')
if to_file:
if os.path.exists(to_file):
raise RuntimeError("Won't overwrite existing file: " +
to_file)
nargs = []
for rgi_df in args:
if isinstance(rgi_df, str):
# A path to a file
rgi_df = gpd.read_file(rgi_df)
else:
rgi_df = rgi_df.copy()
nargs.append(rgi_df)
out_file = func(*nargs, **kwargs)
# Write and return -- only if expected output
if isinstance(out_file, gpd.GeoDataFrame):
out_file.crs = wgs84.srs
if to_file:
out_file.to_file(to_file)
if job_id:
m, s = divmod(time.time() - start_time, 60)
logger.info('Job {} done in '
'{} m {} s!'.format(job_id, int(m), round(s)),
extra={'name_override': func.__name__})
return out_file
return wrapper
def _multi_to_poly(geometry, rid=''):
"""Sometimes an RGI geometry is a multipolygon: this should not happen.
Parameters
----------
geometry : shpg.Polygon or shpg.MultiPolygon
the geometry to check
rid : str, optional
the glacier ID (for logging)
Returns
-------
the corrected geometry
"""
if 'Multi' in geometry.type:
parts = np.array(geometry)
for p in parts:
assert p.type == 'Polygon'
areas = np.array([p.area for p in parts])
parts = parts[np.argsort(areas)][::-1]
areas = areas[np.argsort(areas)][::-1]
# First case (e.g. RGIV4):
# let's assume that one poly is exterior and that
# the other polygons are in fact interiors
exterior = parts[0].exterior
interiors = []
was_interior = 0
for p in parts[1:]:
if parts[0].contains(p):
interiors.append(p.exterior)
was_interior += 1
if was_interior > 0:
# We are done here, good
geometry = shpg.Polygon(exterior, interiors)
else:
# This happens for bad geometries. We keep the largest
geometry = parts[0]
if np.any(areas[1:] > (areas[0] / 4)):
logger.warning('Geometry {} lost quite a chunk.'.format(rid))
if geometry.type != 'Polygon':
raise RuntimeError('Geometry {} is not a Polygon.'.format(rid))
return geometry
@io_logger
def check_geometries(rgi_df, to_file='', job_id=''):
"""Checks and (when possible) corrects the RGI geometries.
It adds a new column to the data: ``check_geom``, a str:
- 'WARN:WasMultiPolygon' when the entity was a MultiPolygon instead of
Polygon.
- 'WARN:WasInvalid' when the entity wasn't valid but is now corrected.
- 'ERR:isInvalid' when the entity isn't valid and cannot be corrected
Parameters
----------
rgi_df : str or geopandas.GeoDataFrame
the RGI shapefile
to_file : str, optional
set to a valid path to write the file on disk
job_id : str, optional
if you want to log what happens, give a name to this job
Returns
-------
a geopandas.GeoDataFrame
"""
for i, s in rgi_df.iterrows():
geometry = s.geometry
rgi_df.loc[i, 'check_geom'] = ''
if geometry.type != 'Polygon':
geometry = _multi_to_poly(geometry, rid=s.RGIId)
msg = 'WARN:WasMultiPolygon;'
logger.debug('{}: '.format(s.RGIId) + msg)
rgi_df.loc[i, 'check_geom'] = rgi_df.loc[i, 'check_geom'] + msg
if not geometry.is_valid:
geometry = geometry.buffer(0)
if geometry.type != 'Polygon':
raise RuntimeError('Geometry cannot be corrected: '
'{}'.format(s.RGIId))
msg = 'WARN:WasInvalid;' if geometry.is_valid else 'ERR:isInvalid'
logger.debug('{}: '.format(s.RGIId) + msg)
rgi_df.loc[i, 'check_geom'] = rgi_df.loc[i, 'check_geom'] + msg
rgi_df.loc[i, 'geometry'] = geometry
return rgi_df
@io_logger
def compute_intersects(rgi_df, to_file='', job_id=''):
"""Computes the intersection geometries between glaciers.
The output is a shapefile with three columns:
- ``RGIId_1`` and ``RGIId_2``: the RGIIds of the two intersecting entities
- ``geometry``: the intersection geometry (LineString or MultiLineString)
Parameters
----------
rgi_df : str or geopandas.GeoDataFrame
the RGI shapefile
to_file : str, optional
set to a valid path to write the file on disk
job_id : str, optional
if you want to log what happens, give a name to this job
Returns
-------
a geopandas.GeoDataFrame
"""
gdf = rgi_df.copy()
out_cols = ['RGIId_1', 'RGIId_2', 'geometry']
out = gpd.GeoDataFrame(columns=out_cols)
for _, major in gdf.iterrows():
# Exterior only
major_poly = major.geometry.exterior
# sort by distance to the current glacier
gdf['dis'] = haversine(major.CenLon, major.CenLat,
gdf.CenLon, gdf.CenLat)
gdfs = gdf.sort_values(by='dis')
# Keep glaciers in which intersect
gdfs = gdfs.loc[gdfs.dis < 200000]
gdfs = gdfs.loc[gdfs.RGIId != major.RGIId]
gdfs = gdfs.loc[gdfs.intersects(major_poly)]
for _, neighbor in gdfs.iterrows():
# Already computed?
if neighbor.RGIId in out.RGIId_1 or neighbor.RGIId in out.RGIId_2:
continue
# Exterior only
# Buffer is needed for numerical reasons
# 1e-4 seems reasonable although it should be dependant on loc
neighbor_poly = neighbor.geometry.exterior.buffer(1e-4)
# Go
mult_intersect = major_poly.intersection(neighbor_poly)
# Handle the different kind of geometry output
if isinstance(mult_intersect, shpg.Point):
continue
if isinstance(mult_intersect, shpg.linestring.LineString):
mult_intersect = shpg.MultiLineString([mult_intersect])
if len(mult_intersect.geoms) == 0:
continue
mult_intersect = [m for m in mult_intersect.geoms if
not isinstance(m, shpg.Point)]
if len(mult_intersect) == 0:
continue
# Simplify the geometries if possible
try:
mult_intersect = linemerge(mult_intersect)
except IndexError:
pass
# Add each line to the output file
if isinstance(mult_intersect, shpg.linestring.LineString):
mult_intersect = shpg.MultiLineString([mult_intersect])
for line in mult_intersect.geoms:
assert isinstance(line, shpg.linestring.LineString)
# Filter the very small ones
if line.length < 1e-3:
continue
line = gpd.GeoDataFrame([[major.RGIId, neighbor.RGIId, line]],
columns=out_cols)
out = pd.concat([out, line])
# Index and merge
out.reset_index(inplace=True, drop=True)
return out
def find_clusters(intersects_df):
"""Given a list of interlinked entities, find the glacier clusters.
Parameters
----------
intersects_df : str or geopandas.GeoDataFrame
the RGI intersects shapefile
Returns
-------
a dict wchich keys are the first RGIId of the cluster and the values are
the list of this cluster's RGIId's
"""
if isinstance(intersects_df, str):
intersects_df = gpd.read_file(intersects_df)
# Make the clusters
# https://en.wikipedia.org/wiki/Connected_component_%28graph_theory%29
graph = nx.Graph()
graph.add_edges_from(np.vstack((intersects_df.RGIId_1.values,
intersects_df.RGIId_2.values)).T)
# Convert to dict and sort
out = dict()
for c in nx.connected_components(graph):
c = sorted(list(c))
out[c[0]] = c
return out
@io_logger
def merge_clusters(rgi_df, intersects_df, keep_all=True, to_file='',
job_id=''):
"""Selects the glacier clusters out of an RGI file and merges them.
The output is an RGI shapefile with an additional column: ``OrigIds``,
which contains a string of the cluster's original RGIIds, separated
with a comma.
Parameters
----------
rgi_df : str or geopandas.GeoDataFrame
the RGI shapefile
intersects_df : str or geopandas.GeoDataFrame
the RGI intersects shapefile
keep_all : bool, default: True
Whether to keep the single glaciers in the output shapefile as well
to_file : str, optional
set to a valid path to write the file on disk
job_id : str, optional
if you want to log what happens, give a name to this job
Returns
-------
a geopandas.GeoDataFrame
"""
# Find the clusters first
clusters = find_clusters(intersects_df)
# Add the clusters
rgi_df['OrigIds'] = ''
for k, c in clusters.items():
if len(c) > 1:
rgi_df.loc[rgi_df.RGIId.isin(c), 'OrigIds'] = ';'.join(c)
# Add single glaciers
if keep_all:
d1 = rgi_df.loc[rgi_df.OrigIds == '']
else:
d1 = gpd.GeoDataFrame()
# Compute the merged geometries
rgi_df = rgi_df.loc[rgi_df.OrigIds != '']
d2 = rgi_df.dissolve(by='OrigIds')
# Process attributes
gb = rgi_df[['OrigIds', 'Area', 'Zmax', 'Zmin']].groupby('OrigIds')
d2['Area'] = gb.sum()['Area']
d2['Zmax'] = gb.max()['Zmax']
d2['Zmin'] = gb.min()['Zmin']
centers = [g.centroid.xy for g in d2.geometry]
d2['CenLat'] = [c[1][0] for c in centers]
d2['CenLon'] = [c[0][0] for c in centers]
# dummy index and merge
d2.reset_index(inplace=True)
out = pd.concat([d1, d2], sort=False)
out = out.sort_values(by='RGIId')
out.reset_index(drop=True)
return out
def _feature(ind, rowobj):
return {
'id': str(ind),
'type': 'Feature',
'properties':
dict((k, v) for k, v in rowobj.items() if k != 'geometry'),
'geometry': mapping(rowobj['geometry'])}
@io_logger
def hypsometries(rgi_df, to_file='', job_id='', oggm_working_dir='',
set_oggm_params=None):
"""
Create hypsometries for glacier geometries using the best available DEM.
We use the same convention as documented in RGIV6: bins of size 50,
from 0 m a.s.l. to max elevation in 50 m bins.
The DEM choice and grid resolution is managed by OGGM.
Parameters
----------
rgi_df : str or geopandas.GeoDataFrame
the RGI shapefile
to_file : str, optional
set to a valid path to write the file on disk
For this task: the file name should have no ending, as two files
are written to disk
job_id : str, optional
if you want to log what happens, give a name to this job
oggm_working_dir: str, optional
path to the folder where oggm will write its GlacierDirectories.
Default is to use a temporary folder (not recommended)
set_oggm_params : callable, optional
a function which sets the desired OGGM parameters
"""
if to_file:
_, ext = os.path.splitext(to_file)
if ext != '':
raise ValueError('to_file should not have an extension!')
if os.path.exists(to_file + '.csv'):
raise RuntimeError("Won't overwrite existing file: " +
to_file + '.csv')
if os.path.exists(to_file + '.shp'):
raise RuntimeError("Won't overwrite existing file: " +
to_file + '.shp')
from oggm import cfg, workflow, tasks
cfg.initialize()
if set_oggm_params is not None:
set_oggm_params(cfg)
del_dir = False
if not oggm_working_dir:
del_dir = True
oggm_working_dir = tempfile.mkdtemp()
cfg.PATHS['working_dir'] = oggm_working_dir
# Get the DEM job done by OGGM
cfg.PARAMS['use_intersects'] = False
cfg.PARAMS['continue_on_error'] = True
cfg.PARAMS['use_multiprocessing'] = False
gdirs = workflow.init_glacier_directories(rgi_df)
workflow.execute_entity_task(tasks.define_glacier_region, gdirs)
workflow.execute_entity_task(tasks.simple_glacier_masks, gdirs,
write_hypsometry=True)
compile_glacier_statistics(gdirs,
filesuffix='_{}'.format(gdirs[0].rgi_region))
out_gdf = rgi_df.copy().set_index('RGIId')
try:
is_nominal = np.array([int(s[0]) == 2 for s in out_gdf.RGIFlag])
except AttributeError:
is_nominal = np.array([int(s) == 2 for s in out_gdf.Status])
cols = ['Zmed', 'Zmin', 'Zmax', 'Slope', 'Aspect']
out_gdf.loc[~is_nominal, cols] = np.NaN
df = pd.DataFrame()
for gdir in gdirs:
rid = gdir.rgi_id
df.loc[rid, 'RGIId'] = gdir.rgi_id
df.loc[rid, 'GLIMSId'] = gdir.glims_id
df.loc[rid, 'Area'] = gdir.rgi_area_km2
if not gdir.has_file('hypsometry') or gdir.is_nominal:
continue
idf = pd.read_csv(gdir.get_filepath('hypsometry')).iloc[0]
for c in idf.index:
try:
int(c)
except ValueError:
continue
df.loc[rid, c] = idf[c]
out_gdf.loc[rid, 'Zmed'] = idf.loc['Zmed']
out_gdf.loc[rid, 'Zmin'] = idf.loc['Zmin']
out_gdf.loc[rid, 'Zmax'] = idf.loc['Zmax']
out_gdf.loc[rid, 'Slope'] = idf.loc['Slope']
out_gdf.loc[rid, 'Aspect'] = idf.loc['Aspect']
out_gdf = out_gdf.reset_index()
df = df.reset_index(drop=True)
bdf = df[df.columns[3:]].fillna(0).astype(int)
ok = bdf.sum(axis=1)
bdf.loc[ok < 1000, :] = -9
df[df.columns[3:]] = bdf
# Sort columns
df = df[np.append(df.columns[:3], sorted(df.columns[3:]))]
if del_dir:
shutil.rmtree(oggm_working_dir)
# replace io write
if to_file:
out_gdf.crs = wgs84.srs
out_gdf.to_file(to_file + '.shp')
df.to_csv(to_file + '_hypso.csv', index=False)
return df, out_gdf.reset_index()
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,485 | GLIMS-RGI/rgitools | refs/heads/master | /rgitools/cli/correct_geometries.py | import os
import sys
from glob import glob
import argparse
import multiprocessing as mp
from rgitools import funcs
def run(input_dir=None, output_dir=None, *, replace_str=None,
n_processes=None):
"""Corrects the geometries for an entire RGI directory.
Parameters
----------
input_dir : str
path to the RGI directory
output_dir : str
path to the output directory
replace_str : callable
a function to call on the file's basename. A good example is:
``replace_str=lambda x : x.replace('rgi60', 'rgi61')``
n_processes : int, optional
the number of processors to use
"""
# Download RGI files
fp = '*_rgi*_*.shp'
rgi_shps = list(glob(os.path.join(input_dir, "*", fp)))
rgi_shps = sorted([r for r in rgi_shps if 'Regions' not in r])
funcs.mkdir(output_dir)
out_paths = []
log_names = []
for rgi_shp in rgi_shps:
odir = os.path.basename(os.path.dirname(rgi_shp))
if replace_str:
odir = replace_str(odir)
odir = os.path.join(output_dir, odir)
funcs.mkdir(odir)
bn = os.path.basename(rgi_shp)
if replace_str:
bn = replace_str(bn)
of = os.path.join(odir, bn)
out_paths.append(of)
log_names.append(bn)
with mp.Pool(n_processes) as p:
p.starmap(funcs.mappable_func,
zip([funcs.check_geometries] * len(rgi_shps),
rgi_shps, out_paths, log_names),
chunksize=1)
def parse_args(args):
"""Check input arguments"""
# CLI args
description = 'Corrects the geometries for an entire RGI directory.'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--input-dir', type=str,
help='the rgi directory to process.')
parser.add_argument('--output-dir', type=str,
help='the directory where to write the processed '
'files.')
parser.add_argument('--replace-str', nargs='*', type=str,
help='a string to change on the file basename. '
'A good example is: --replace-str rgi60 rgi61')
parser.add_argument('--n-processes', type=int,
help='Number of processors to use.')
args = parser.parse_args(args)
if not args.input_dir:
raise ValueError('--input-dir is required!')
if not args.output_dir:
raise ValueError('--output-dir is required!')
if args.replace_str:
if len(args.replace_str) != 2:
raise ValueError('--replace-str needs two values!')
s1, s2 = args.replace_str
def replace_str(x):
return x.replace(s1, s2)
else:
replace_str = None
# All good
return dict(input_dir=args.input_dir, output_dir=args.output_dir,
replace_str=replace_str, n_processes=args.n_processes)
def main():
"""Script entry point"""
run(**parse_args(sys.argv[1:]))
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,486 | GLIMS-RGI/rgitools | refs/heads/master | /rgitools/tests/test_rgitools.py | """All rgitools tests.
We use the pytest package to run them.
"""
import os
import shutil
from distutils.version import LooseVersion
import pytest
import pandas as pd
import geopandas as gpd
import numpy as np
from numpy.testing import assert_equal, assert_allclose
import rgitools
from rgitools import funcs
from rgitools.funcs import get_demo_file, mkdir
def get_iceland_df(reduced=False):
df = gpd.read_file(get_demo_file('RGI6_icecap.shp'))
if reduced:
df = df.loc[(df.CenLon < -19.45) & (df.CenLat < 63.7)]
return df
def test_install():
assert LooseVersion(rgitools.__version__) >= LooseVersion('0.0.0')
assert rgitools.__isreleased__ in [False, True]
def test_correct_geometries(tmpdir):
# Simple ice cap
test_of = os.path.join(str(tmpdir), 'interfile.shp')
df = get_iceland_df(reduced=True)
out = funcs.check_geometries(df.copy(), to_file=test_of, job_id='test')
assert len(out) == len(df)
assert os.path.exists(test_of)
assert np.all(out.check_geom == '')
# All
test_of = os.path.join(str(tmpdir), 'interfile2.shp')
df = get_iceland_df()
out = funcs.check_geometries(df.copy(), to_file=test_of, job_id='test')
assert len(out) == len(df)
assert os.path.exists(test_of)
assert np.all(g.is_valid for g in out.geometry)
def test_correct_geometries_cli_args(tmpdir):
from rgitools.cli import correct_geometries
kwargs = correct_geometries.parse_args(['--input-dir', 'dd1',
'--output-dir', 'dd2',
])
assert kwargs['input_dir'] == 'dd1'
assert kwargs['output_dir'] == 'dd2'
assert kwargs['replace_str'] is None
assert kwargs['n_processes'] is None
kwargs = correct_geometries.parse_args(['--input-dir', 'dd1',
'--output-dir', 'dd2',
'--replace-str', 'r1', 'r2',
'--n-processes', '8',
])
assert kwargs['input_dir'] == 'dd1'
assert kwargs['output_dir'] == 'dd2'
assert kwargs['n_processes'] == 8
assert kwargs['replace_str']('1r1') == '1r2'
with pytest.raises(ValueError):
correct_geometries.parse_args([])
with pytest.raises(ValueError):
correct_geometries.parse_args(['--input-dir', 'dd1'])
with pytest.raises(ValueError):
correct_geometries.parse_args(['--input-dir', 'dd1',
'--output-dir', 'dd2',
'--replace-str', 'r1',
]
)
def test_correct_geometries_cli(tmpdir):
from rgitools.cli import correct_geometries
rgi_dir = os.path.join(str(tmpdir), 'RGIV60')
rgi_reg_dir = os.path.join(str(tmpdir), 'RGIV60', '06_rgi60_Iceland')
mkdir(rgi_reg_dir)
for e in ['.shp', '.prj', '.dbf', '.shx']:
shutil.copyfile(get_demo_file('RGI6_icecap' + e),
os.path.join(rgi_reg_dir, '06_rgi60_Iceland' + e))
out_dir = os.path.join(str(tmpdir), 'RGIV61')
def replace(s):
return s.replace('rgi60', 'rgi61')
correct_geometries.run(rgi_dir, out_dir, replace_str=replace)
outf = os.path.join(out_dir, '06_rgi61_Iceland', '06_rgi61_Iceland.shp')
assert os.path.exists(outf)
# All
df = get_iceland_df()
out = gpd.read_file(outf)
assert len(out) == len(df)
assert np.all(g.is_valid for g in out.geometry)
assert np.any(out.check_geom != '')
def test_intersects(tmpdir):
# Simple ice cap
df = get_iceland_df(reduced=True)
test_of = os.path.join(str(tmpdir), 'interfile.shp')
out = funcs.compute_intersects(df, to_file=test_of, job_id='test')
assert len(out) >= len(df)
assert os.path.exists(test_of)
# All elements should have an intersect with something
all_ids = np.append(out.RGIId_1.values, out.RGIId_2.values)
all_ids = np.sort(np.unique(all_ids))
assert_equal(np.sort(np.unique(df.RGIId.values)), all_ids)
def test_intersects_cli_args(tmpdir):
from rgitools.cli import compute_intersects
kwargs = compute_intersects.parse_args(['--input-dir', 'dd1',
'--output-dir', 'dd2',
])
assert kwargs['input_dir'] == 'dd1'
assert kwargs['output_dir'] == 'dd2'
assert kwargs['n_processes'] is None
kwargs = compute_intersects.parse_args(['--input-dir', 'dd1',
'--output-dir', 'dd2',
'--n-processes', '8',
])
assert kwargs['input_dir'] == 'dd1'
assert kwargs['output_dir'] == 'dd2'
assert kwargs['n_processes'] == 8
with pytest.raises(ValueError):
compute_intersects.parse_args([])
with pytest.raises(ValueError):
compute_intersects.parse_args(['--input-dir', 'dd1'])
def test_intersects_cli(tmpdir):
from rgitools.cli import compute_intersects
rgi_dir = os.path.join(str(tmpdir), 'RGIV60')
rgi_reg_dir = os.path.join(str(tmpdir), 'RGIV60', '06_rgi60_Iceland')
mkdir(rgi_reg_dir)
for e in ['.shp', '.prj', '.dbf', '.shx']:
shutil.copyfile(get_demo_file('RGI6_icecap' + e),
os.path.join(rgi_reg_dir, '06_rgi60_Iceland' + e))
out_dir = os.path.join(str(tmpdir), 'RGIV60_intersects')
compute_intersects.run(rgi_dir, out_dir)
assert os.path.exists(os.path.join(out_dir, '06_rgi60_Iceland',
'intersects_06_rgi60_Iceland.shp'))
def test_find_clusters():
# Simple ice cap
df = get_iceland_df(reduced=True)
idf = funcs.compute_intersects(df)
# Add dummy entries for testing
idf = idf.append({'RGIId_1': 'd1', 'RGIId_2': 'd2'}, ignore_index=True)
idf = idf.append({'RGIId_1': 'd1', 'RGIId_2': 'd3'}, ignore_index=True)
out = funcs.find_clusters(idf)
assert len(out) == 2
assert len(out['d1']) == 3
def test_merge_clusters():
# Simple ice cap
df = get_iceland_df(reduced=True)
# Save the area for testing later
area_ref = df.Area.sum()
# Add dummy entries for testing
from shapely.affinity import translate
idf = df.iloc[0].copy()
idf['geometry'] = translate(idf.geometry, xoff=0.15, yoff=0.0)
idf['RGIId'] = 'd1'
df = df.append(idf, ignore_index=True)
idf = df.iloc[1].copy()
idf['geometry'] = translate(idf.geometry, xoff=0.15, yoff=0.01)
idf['RGIId'] = 'd2'
df = df.append(idf, ignore_index=True)
# Intersects and go
idf = funcs.compute_intersects(df)
out = funcs.merge_clusters(df, idf)
assert len(out) == 3
assert_allclose(out.iloc[0].Area, area_ref)
s1 = df.iloc[-2]
s2 = out.loc[out.RGIId == 'd1'].iloc[0]
assert_equal(s1.CenLat, s2.CenLat)
assert_equal(s1.CenLon, s2.CenLon)
assert s1.geometry.equals(s2.geometry)
def test_merge_clusters_all():
# All
df = get_iceland_df()
# Intersects and go
idf = funcs.compute_intersects(df)
out = funcs.merge_clusters(df, idf)
assert np.all(g.is_valid for g in out.geometry)
assert np.all(g.type == 'Polygon' for g in out.geometry)
def test_zip_cli_args(tmpdir):
from rgitools.cli import zip_rgi_dir
kwargs = zip_rgi_dir.parse_args(['--input-dir', 'dd1',
'--output-file', 'dd2',
])
assert kwargs['input_dir'] == 'dd1'
assert kwargs['output_file'] == 'dd2'
with pytest.raises(ValueError):
zip_rgi_dir.parse_args([])
with pytest.raises(ValueError):
zip_rgi_dir.parse_args(['--input-dir', 'dd1'])
def test_zip_cli(tmpdir):
from rgitools.cli import zip_rgi_dir
rgi_dir = os.path.join(str(tmpdir), 'rgi_61')
outf = os.path.join(str(tmpdir), 'rgi_61')
regdirs = ['06_rgi61_Iceland', '07_rgi61_Scandinavia']
for regdir in regdirs:
rgi_reg_dir = os.path.join(rgi_dir, regdir)
mkdir(rgi_reg_dir)
for e in ['.shp', '.prj', '.dbf', '.shx']:
shutil.copyfile(get_demo_file('RGI6_icecap' + e),
os.path.join(rgi_reg_dir, '01_rgi61_Iceland' + e))
zip_rgi_dir.run(rgi_dir, outf)
assert os.path.exists(outf)
def test_hypsometry(tmpdir):
from oggm.utils import rmsd
rgi_df = gpd.read_file(get_demo_file('rgi_oetztal.shp'))
rgi_df = rgi_df.loc[['_d' not in rid for rid in rgi_df.RGIId]]
outf = os.path.join(str(tmpdir), 'rgi_62')
# Make if fail somewhere
from shapely.affinity import translate
geo = rgi_df.iloc[0, -1]
rgi_df.iloc[0, -1] = translate(geo, xoff=10)
rgi_df.loc[1, 'RGIFlag'] = '2909'
def set_oggm_params(cfg):
cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif')
cfg.PARAMS['use_multiprocessing'] = False
df, gdf = funcs.hypsometries(rgi_df, set_oggm_params=set_oggm_params,
to_file=outf)
assert np.all(df.loc[0, df.columns[3:]] == -9)
assert np.all(df.loc[1, df.columns[3:]] == -9)
assert not np.isfinite(gdf.loc[0, 'Aspect'])
assert gdf.loc[1, 'Aspect'] == rgi_df.loc[1, 'Aspect']
df = df.iloc[2:]
assert np.all(df[df.columns[3:]].sum(axis=1) == 1000)
gdf = gdf.iloc[2:]
rgi_df = rgi_df.iloc[2:]
assert rmsd(gdf['Zmed'], rgi_df['Zmed']) < 25
assert rmsd(gdf['Zmin'], rgi_df['Zmin']) < 25
assert rmsd(gdf['Zmax'], rgi_df['Zmax']) < 25
assert rmsd(gdf['Slope'], rgi_df['Slope']) < 2
# For aspect test for cos / sin because of 0 360 thing
us = np.cos(np.deg2rad(gdf.Aspect))
ref = np.cos(np.deg2rad(rgi_df.Aspect))
assert rmsd(us, ref) < 0.3
us = np.sin(np.deg2rad(gdf.Aspect))
ref = np.sin(np.deg2rad(rgi_df.Aspect))
assert rmsd(us, ref) < 0.3
##
df = pd.read_csv(outf + '_hypso.csv')
gdf = gpd.read_file(outf + '.shp')
assert np.all(df.loc[0, df.columns[3:]] == -9)
assert np.all(df.loc[1, df.columns[3:]] == -9)
assert not np.isfinite(gdf.loc[0, 'Aspect'])
df = df.iloc[2:]
assert np.all(df[df.columns[3:]].sum(axis=1) == 1000)
gdf = gdf.iloc[2:]
assert rmsd(gdf['Zmed'], rgi_df['Zmed']) < 25
assert rmsd(gdf['Zmin'], rgi_df['Zmin']) < 25
assert rmsd(gdf['Zmax'], rgi_df['Zmax']) < 25
assert rmsd(gdf['Slope'], rgi_df['Slope']) < 2
# For aspect test for cos / sin because of 0 360 thing
us = np.cos(np.deg2rad(gdf.Aspect))
ref = np.cos(np.deg2rad(rgi_df.Aspect))
assert rmsd(us, ref) < 0.3
us = np.sin(np.deg2rad(gdf.Aspect))
ref = np.sin(np.deg2rad(rgi_df.Aspect))
assert rmsd(us, ref) < 0.3
def set_oggm_params(cfg):
cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif')
def test_correct_hypsometries_cli_args(tmpdir):
from rgitools.cli import compute_hypsometries
kwargs = compute_hypsometries.parse_args(['--input-dir', 'dd1',
'--output-dir', 'dd2',
])
assert kwargs['input_dir'] == 'dd1'
assert kwargs['output_dir'] == 'dd2'
assert kwargs['replace_str'] is None
assert kwargs['n_processes'] is None
kwargs = compute_hypsometries.parse_args(['--input-dir', 'dd1',
'--output-dir', 'dd2',
'--replace-str', 'r1', 'r2',
'--n-processes', '8',
])
assert kwargs['input_dir'] == 'dd1'
assert kwargs['output_dir'] == 'dd2'
assert kwargs['n_processes'] == 8
assert kwargs['replace_str']('1r1') == '1r2'
with pytest.raises(ValueError):
compute_hypsometries.parse_args([])
with pytest.raises(ValueError):
compute_hypsometries.parse_args(['--input-dir', 'dd1'])
with pytest.raises(ValueError):
compute_hypsometries.parse_args(['--input-dir', 'dd1',
'--output-dir', 'dd2',
'--replace-str', 'r1',
]
)
def test_hypsometries_cli(tmpdir):
from rgitools.cli import compute_hypsometries, correct_geometries
rgi_dir = os.path.join(str(tmpdir), 'RGIV60')
rgi_reg_dir = os.path.join(str(tmpdir), 'RGIV60', '11_rgi60_Europe')
mkdir(rgi_reg_dir)
for e in ['.shp', '.prj', '.dbf', '.shx']:
shutil.copyfile(get_demo_file('rgi_oetztal' + e),
os.path.join(rgi_reg_dir, '11_rgi60_Europe' + e))
tmp_dir = os.path.join(str(tmpdir), 'RGIV61')
def replace(s):
return s.replace('rgi60', 'rgi61')
correct_geometries.run(rgi_dir, tmp_dir, replace_str=replace)
outf = os.path.join(tmp_dir, '11_rgi61_Europe', '11_rgi61_Europe.shp')
assert os.path.exists(outf)
# All
df = gpd.read_file(get_demo_file('rgi_oetztal.shp'))
out = gpd.read_file(outf)
assert len(out) == len(df)
assert np.all(g.is_valid for g in out.geometry)
assert np.any(out.check_geom != '')
out_dir = os.path.join(str(tmpdir), 'RGIV62')
def replace(s):
return s.replace('rgi61', 'rgi62')
compute_hypsometries.run(tmp_dir, out_dir,
replace_str=replace,
set_oggm_params=set_oggm_params)
outf = os.path.join(out_dir, '11_rgi62_Europe', '11_rgi62_Europe.shp')
assert os.path.exists(outf)
outf = os.path.join(out_dir, '11_rgi62_Europe',
'11_rgi62_Europe_hypso.csv')
assert os.path.exists(outf)
outf = os.path.join(out_dir, '11_rgi62_Europe',
'11_rgi62_Europe_hypso.csv')
assert os.path.exists(outf)
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,487 | GLIMS-RGI/rgitools | refs/heads/master | /rgitools/cli/compute_intersects.py | import os
import sys
from glob import glob
import argparse
import multiprocessing as mp
from rgitools import funcs
def run(input_dir=None, output_dir=None, *, n_processes=None):
"""Computes the intersects for an entire RGI directory.
Parameters
----------
input_dir : str
path to the RGI directory
output_dir : str
path to the output directory
n_processes : int, optional
the number of processors to use
"""
# Download RGI files
fp = '*_rgi*_*.shp'
rgi_shps = list(glob(os.path.join(input_dir, "*", fp)))
rgi_shps = sorted([r for r in rgi_shps if 'Regions' not in r])
funcs.mkdir(output_dir)
out_paths = []
log_names = []
for rgi_shp in rgi_shps:
odir = os.path.basename(os.path.dirname(rgi_shp))
odir = os.path.join(output_dir, odir)
funcs.mkdir(odir)
bn = os.path.basename(rgi_shp)
of = os.path.join(odir, 'intersects_' + bn)
out_paths.append(of)
log_names.append(bn)
with mp.Pool(n_processes) as p:
p.starmap(funcs.mappable_func,
zip([funcs.compute_intersects] * len(rgi_shps),
rgi_shps, out_paths, log_names),
chunksize=1)
def parse_args(args):
"""Check input arguments"""
# CLI args
description = 'Computes the intersects for an entire RGI directory.'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--input-dir', type=str,
help='the rgi directory to process.')
parser.add_argument('--output-dir', type=str,
help='the directory where to write the processed '
'files.')
parser.add_argument('--n-processes', type=int,
help='Number of processors to use.')
args = parser.parse_args(args)
if not args.input_dir:
raise ValueError('--input-dir is required!')
if not args.output_dir:
raise ValueError('--output-dir is required!')
# All good
return dict(input_dir=args.input_dir, output_dir=args.output_dir,
n_processes=args.n_processes)
def main():
"""Script entry point"""
run(**parse_args(sys.argv[1:]))
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,488 | GLIMS-RGI/rgitools | refs/heads/master | /rgitools/tests/conftest.py | import pytest
from oggm.utils import _downloads
from oggm.tests.conftest import secure_url_retrieve
@pytest.fixture(autouse=True)
def patch_url_retrieve(monkeypatch):
monkeypatch.setattr(_downloads, 'oggm_urlretrieve', secure_url_retrieve)
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,489 | GLIMS-RGI/rgitools | refs/heads/master | /setup.py | """Setup file for the rgitools package.
Adapted from the Python Packaging Authority template.
"""
from setuptools import setup, find_packages # Always prefer setuptools
from codecs import open # To use a consistent encoding
from os import path, walk
import sys, warnings, importlib, re
MAJOR = 0
MINOR = 0
MICRO = 2
ISRELEASED = False
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
QUALIFIER = ''
DISTNAME = 'rgitools'
LICENSE = 'BSD-3-Clause'
AUTHOR = 'rgitools contributors'
AUTHOR_EMAIL = 'fabien.maussion@uibk.ac.at'
URL = ''
CLASSIFIERS = [
# How mature is this project? Common values are
# 3 - Alpha 4 - Beta 5 - Production/Stable
'Development Status :: 4 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
'Programming Language :: Python :: 3.7',
]
DESCRIPTION = ('Python tools for processing and analyzing files from the '
'Randolph Glacier Inventory')
LONG_DESCRIPTION = """
Python tools for processing and analyzing files from the Randolph Glacier
Inventory.
"""
# code to extract and write the version copied from pandas
FULLVERSION = VERSION
write_version = True
if not ISRELEASED:
import subprocess
FULLVERSION += '.dev'
pipe = None
for cmd in ['git', 'git.cmd']:
try:
pipe = subprocess.Popen(
[cmd, "describe", "--always", "--match", "v[0-9]*"],
stdout=subprocess.PIPE)
(so, serr) = pipe.communicate()
if pipe.returncode == 0:
break
except:
pass
if pipe is None or pipe.returncode != 0:
# no git, or not in git dir
if path.exists('rgitools/version.py'):
warnings.warn("WARNING: Couldn't get git revision, using existing "
"rgitools/version.py")
write_version = False
else:
warnings.warn("WARNING: Couldn't get git revision, using generic "
"version string")
else:
# have git, in git dir, but may have used a shallow clone (travis)
rev = so.strip()
# makes distutils blow up on Python 2.7
if sys.version_info[0] >= 3:
rev = rev.decode('ascii')
if not rev.startswith('v') and re.match("[a-zA-Z0-9]{7,9}", rev):
# partial clone, manually construct version string
# this is the format before we started using git-describe
# to get an ordering on dev version strings.
rev = "v%s.dev-%s" % (VERSION, rev)
# Strip leading v from tags format "vx.y.z" to get th version string
FULLVERSION = rev.lstrip('v').replace(VERSION + '-', VERSION + '+')
else:
FULLVERSION += QUALIFIER
def write_version_py(filename=None):
cnt = """\
version = '%s'
short_version = '%s'
isreleased = %s
"""
if not filename:
filename = path.join(path.dirname(__file__), 'rgitools', 'version.py')
a = open(filename, 'w')
try:
a.write(cnt % (FULLVERSION, VERSION, ISRELEASED))
finally:
a.close()
if write_version:
write_version_py()
def check_dependencies(package_names):
"""Check if packages can be imported, if not throw a message."""
not_met = []
for n in package_names:
try:
_ = importlib.import_module(n)
except ImportError:
not_met.append(n)
if len(not_met) != 0:
errmsg = "Warning: the following packages could not be found: "
print(errmsg + ', '.join(not_met))
req_packages = ['numpy',
'scipy',
'pyproj',
'geopandas',
'pytest',
]
check_dependencies(req_packages)
def file_walk(top, remove=''):
"""
Returns a generator of files from the top of the tree, removing
the given prefix from the root/file result.
"""
top = top.replace('/', path.sep)
remove = remove.replace('/', path.sep)
for root, dirs, files in walk(top):
for file in files:
yield path.join(root, file).replace(remove, '')
setup(
# Project info
name=DISTNAME,
version=FULLVERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
# The project's main homepage.
url=URL,
# Author details
author=AUTHOR,
author_email=AUTHOR_EMAIL,
# License
license=LICENSE,
classifiers=CLASSIFIERS,
# What does your project relate to?
keywords=['geosciences', 'glaciers', 'gis'],
# We are a python 3 only shop
python_requires='>=3.4',
# Find packages automatically
packages=find_packages(exclude=['docs']),
# Decided not to let pip install the dependencies, this is too brutal
install_requires=[],
# additional groups of dependencies here (e.g. development dependencies).
extras_require={},
# data files that need to be installed
package_data={},
# Old
data_files=[],
# Executable scripts
entry_points={
'console_scripts': [
('rgitools_correct_geometries = '
'rgitools.cli.correct_geometries:main'),
('rgitools_compute_intersects = '
'rgitools.cli.compute_intersects:main'),
('rgitools_compute_hypsometries = '
'rgitools.cli.compute_hypsometries:main'),
('rgitools_zip_rgi_dir = '
'rgitools.cli.zip_rgi_dir:main'),
],
},
)
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,490 | GLIMS-RGI/rgitools | refs/heads/master | /notebooks/dem_statistics/statistics_paths.py | # these paths have to be set according to your local system in order to run the creation of the DEM statistics
# and shouldn't be changed inbetween the execution of the separate scripts
# path to your preprocessed RGI data
prepro_path = '/PATH/TO/PREPROCESSED/DATA/RGI62/b_010/L1'
# path to directory that should be used as workdir by OGGM
wd = 'PATH/TO/YOUR/OGGM_WORKDIR'
# directory where the generated h5-files, barplots and CSV-files will be saved
post = 'PATH/TO/THE/POSTPROCESSING/FOLDER'
# suffix that is added to the output filenames of this run
sfx = '_v2' | {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,491 | GLIMS-RGI/rgitools | refs/heads/master | /rgitools/__init__.py | # flake8: noqa
try:
from .version import version as __version__
from .version import isreleased as __isreleased__
except ImportError:
raise ImportError('rgitools is not properly installed. If you are running '
'from the source directory, please instead create a '
'new virtual environment (using conda or virtualenv) '
'and then install it in-place by running: '
'pip install -e .')
| {"/rgitools/cli/compute_hypsometries.py": ["/rgitools/__init__.py"], "/rgitools/cli/zip_rgi_dir.py": ["/rgitools/__init__.py"], "/rgitools/cli/correct_geometries.py": ["/rgitools/__init__.py"], "/rgitools/tests/test_rgitools.py": ["/rgitools/__init__.py", "/rgitools/funcs.py"], "/rgitools/cli/compute_intersects.py": ["/rgitools/__init__.py"]} |
65,495 | eodreports/OandaHT | refs/heads/master | /OandaHT_model.py | import math
import numpy as np
from scipy.optimize import minimize
class HFmodel:
def __init__(self, sigma):
self.sigma=sigma
self.gamma=0
self.k=1.5
self.mid_rev_price=None
def calc(self, s, q, t, T):
self.mid_rev_price=s-q*self.gamma*self.sigma**2*(T-t)
def calib(self, sprd):
x0=[3]
result = minimize(self.obj_func, x0, args=(sprd), method='nelder-mead', options={'xtol': 1e-8, 'disp': False})
self.gamma=result.x[0]
#print 'gamma: '+str(self.gamma)
def obj_func(self, gamma, sprd):
return (sprd-2/gamma*math.log(1+gamma/self.k))**2
def get_mid_rev_price(self):
return self.mid_rev_price
def get_opt_bid(self, prec):
return round(self.mid_rev_price-1/self.gamma*math.log(1+self.gamma/self.k), prec)
def get_opt_ask(self, prec):
return round(self.mid_rev_price+1/self.gamma*math.log(1+self.gamma/self.k), prec)
class SABRcalib:
def __init__(self, beta, T):
self.T=T #T for future use
self.alpha=0
self.beta=beta
self.rho=0
self.nu=0
self.vol_atm=None
self.garch_para=None
self.para=None
def calib(self, hist_price):
ret=price2ret(hist_price)
T=len(ret)
hist_alpha = np.empty(T)
d_w1=np.empty(T-1)
d_w2=np.empty(T-1)
#calibrate garch model
vol_obj=garch(ret)
vol_obj.estimation()
self.garch_para=vol_obj.theta
self.vol_atm=vol_obj.get_fitted_vol()
for i in range(0,T):
hist_alpha[i]=self.vol_atm[i]*math.pow(hist_price[i+1], 1-self.beta)
self.alpha=hist_alpha[-1]
ret_alpha=price2ret(hist_alpha)
self.nu=np.std(ret_alpha)
hist_price_tmp=hist_price[1:]
for i in range(1,T):
d_w1[i-1]=(hist_price_tmp[i]-hist_price_tmp[i-1])/(hist_alpha[i-1]*pow(hist_price_tmp[i-1],self.beta))
d_w2[i-1]=(hist_alpha[i]-hist_alpha[i-1])/(hist_alpha[i-1]*self.nu)
self.rho=np.corrcoef(d_w1, d_w2)[0, 1]
self.para = self.alpha, self.beta, self.rho, self.nu
def get_para(self):
return self.para
class garch:
def __init__(self, data):
self.data=data
self.theta=None
def logfunc(self, theta):
c, a, b=theta
ret=self.data
T = len(ret)
ret=ret-np.mean(ret)
h = np.empty(T)
h[0] = np.var(ret)
logfunc=0
for i in range(1, T):
h[i] = c + a*ret[i-1]**2 + b*h[i-1] # GARCH(1,1) model
logfunc+=-0.5*math.log(h[i])-0.5*ret[i]**2/h[i]
return -logfunc
def estimation(self):
x0=[0.5,0.1,0.85]
lb=0.0001
bnds=[(0,10), (lb,1), (lb,1)]
result = minimize(self.logfunc, x0, method='L-BFGS-B', bounds=bnds, options={'maxiter':99999999, 'disp': False})
self.theta=result.x
def get_fitted_vol(self):
ret=self.data
c, a, b=self.theta
T = len(ret)
ret=ret-np.mean(ret)
h = np.empty(T)
vol = np.empty(T)
h[0] = np.var(ret)
vol[0]=math.sqrt(h[0])
for i in range(1, T):
h[i] = c + a*ret[i-1]**2 + b*h[i-1]
vol[i]=math.sqrt(h[i])
return vol*math.sqrt(262)
def price2ret(price):
ret_tmp=[]
for i in range(1,len(price)):
ret_tmp.append(math.log(price[i]/price[i-1]))
return ret_tmp
| {"/OandaHT_main.py": ["/OandaHT_function.py"]} |
65,496 | eodreports/OandaHT | refs/heads/master | /OandaHT_function.py | from pyoanda import Order, Client, PRACTICE
import time
import datetime
import threading
import csv
import numpy as np
from OandaHT_model import *
class HFtrading:
def __init__(self, underlying, set_obj):
run_time=time.strftime("%Y%m%d_%H%M%S")
self.underlying=underlying
self.set_obj=set_obj
self.mid_price=0
self.vol=None
log_dir='/Users/MengfeiZhang/Desktop/tmp'
self.f=open(log_dir+'/'+self.underlying+'_hf_log_'+run_time+'.txt','w')
self.weekday=None
self.now=None
self.client=None
self.q=0
self.max_inventory=set_obj.get_max_inventory()
if ('JPY' in self.underlying)==True:
self.prec=3
else:
self.prec=5
#connect
self.connect()
sabr_calib=SABRcalib(0.5, 1.0/52)
sabr_calib.calib(self.get_hist_data(262*5))
self.SABRpara=sabr_calib.get_para()
def connect(self):
try:
self.client = Client(
environment=PRACTICE,
account_id=self.set_obj.get_account_id(),
access_token=self.set_obj.get_token()
)
print self.underlying+' connection succeeded...'
except:
print self.underlying+' connection failed...'
time.sleep(5)
self.connect()
def get_mid_price(self):
try:
price_resp=self.client.get_prices(instruments=self.underlying, stream=False) #, stream=True
price_resp=price_resp['prices'][0]
return (price_resp['ask']+price_resp['bid'])/2
except Exception as err:
print >>self.f, err
def get_atm_vol(self):
return self.SABRpara[0]*self.get_mid_price()**(self.SABRpara[1]-1)
def get_hist_data(self, hist_len):
hist_resp=self.client.get_instrument_history(
instrument=self.underlying,
candle_format="midpoint",
granularity="D",
count=hist_len,
)
price=[]
for i in range(0,len(hist_resp['candles'])):
price.append(hist_resp['candles'][i]['closeMid'])
return price
def get_hist_vol(self):
hist_resp=self.client.get_instrument_history(
instrument=self.underlying,
candle_format="midpoint",
granularity="S5",
count=100,
)
ret_tmp=[]
for i in range(1,len(hist_resp['candles'])):
ret_tmp.append(hist_resp['candles'][i]['closeMid']-hist_resp['candles'][i-1]['closeMid'])
return np.std(ret_tmp)
def get_live_sprd(self):
try:
price_resp=self.client.get_prices(instruments=self.underlying, stream=False) #, stream=True
price_resp=price_resp['prices'][0]
return price_resp['ask']-price_resp['bid']
except Exception as err:
print >>self.f, err
return 0
def get_current_inventory(self):
return float(self.get_position())/self.max_inventory
def get_position(self):
try:
resp=self.client.get_position(instrument=self.underlying)
if resp['side']=='buy':
return resp['units']
elif resp['side']=='sell':
return -resp['units']
except Exception as err:
return 0
def load_data(self):
self.mid_price=self.get_mid_price()
self.weekday=datetime.datetime.today().weekday()
self.now=datetime.datetime.now()
self.q=self.get_current_inventory()
#self.vol=self.get_atm_vol()
self.vol=self.get_hist_vol()
def start(self):
self.load_data()
if (int(self.weekday)==4 and int(self.now.hour)>=17): #Friday 5pm
print 'market closed...'
return None
model=HFmodel(self.vol)
model.calib(self.get_live_sprd())
model.calc(self.mid_price, self.q, 0, 1)
print >> self.f, 'market mid price: '+str(self.mid_price)
print >> self.f, 'model reservation price: '+str(model.get_mid_rev_price())
print >> self.f, 'model bid price: '+str(model.get_opt_bid(self.prec))
print >> self.f, 'model ask price: '+str(model.get_opt_ask(self.prec))
print >> self.f, 'gamma: '+str(model.gamma)
print >> self.f, 'inventory: '+str(self.q)
print >> self.f, 'volatility (5s): '+str(self.vol)
try:
print 'heartbeat('+self.underlying+') '+str(self.now)+'...'
#close all outstanding orders
resp_order=self.client.get_orders(instrument=self.underlying)
for order in resp_order['orders']:
resp_close_order=self.client.close_order(order_id=order['id'])
expiry_order=self.now + datetime.timedelta(days=1)
expiry_order=expiry_order.isoformat('T') + "Z"
order_ask = Order(
instrument=self.underlying,
units=self.set_obj.get_trade_size(),
side="sell",
type="limit",
price=model.get_opt_ask(self.prec),
expiry=expiry_order,
)
order_bid = Order(
instrument=self.underlying,
units=self.set_obj.get_trade_size(),
side="buy",
type="limit",
price=model.get_opt_bid(self.prec),
expiry=expiry_order,
)
#place order
try:
if self.q>=1: #long limit reached
resp_order_ask = self.client.create_order(order=order_ask)
elif self.q<=-1: #short limit reached
resp_order_bid = self.client.create_order(order=order_bid)
else:
resp_order_ask = self.client.create_order(order=order_ask)
resp_order_bid = self.client.create_order(order=order_bid)
except Exception as err:
print err
if ('halt' in str(err))==True:
print 'market closed...'
return None
else:
print "cannot place order..."
time.sleep(self.set_obj.get_timer())
except Exception as error:
print error
print self.underlying+' disconnected, try to reconnect '+str(self.now)+'...'
self.connect()
threading.Timer(1, self.start).start()
class set:
def __init__(self, timer, trade_size, max_inventory, login_file):
self.timer=timer
self.trade_size=trade_size
self.max_inventory=max_inventory
file = open(login_file, 'r')
i=1
try:
reader = csv.reader(file)
for row in reader:
if i==1:
self.account_id=row[0]
elif i==2:
self.token=row[0]
elif i==3:
self.email_login=row[0]
elif i==4:
self.email_pwd=row[0]
i+=1
finally:
file.close()
def get_timer(self):
return self.timer
def get_account_id(self):
return str(self.account_id)
def get_token(self):
return str(self.token)
def get_email_login(self):
return str(self.email_login)
def get_email_pwd(self):
return str(self.email_pwd)
def get_trade_size(self):
return self.trade_size
def get_max_inventory(self):
return self.max_inventory | {"/OandaHT_main.py": ["/OandaHT_function.py"]} |
65,497 | eodreports/OandaHT | refs/heads/master | /OandaHT_main.py | from OandaHT_function import *
login_file='/Users/MengfeiZhang/Desktop/tmp/login_info.csv'
set_obj=set(60, 5000, 50000, login_file)
HFobj1=HFtrading("EUR_USD", set_obj)
HFobj2=HFtrading("USD_JPY", set_obj)
HFobj3=HFtrading("GBP_USD", set_obj)
HFobj4=HFtrading("AUD_USD", set_obj)
HFobj5=HFtrading("NZD_USD", set_obj)
HFobj6=HFtrading("USD_CHF", set_obj)
HFobj7=HFtrading("USD_CAD", set_obj)
HFobj8=HFtrading("EUR_CHF", set_obj)
hf_vet=[HFobj1, HFobj2, HFobj3, HFobj4, HFobj5, HFobj6, HFobj7, HFobj8]
#start trading
threads=[]
for hf in hf_vet:
threads.append(threading.Thread(target=hf.start(),args=None))
for thread in threads:
thread.start()
for thread in threads:
thread.join() | {"/OandaHT_main.py": ["/OandaHT_function.py"]} |
65,498 | AlexeyKozlov/Flights | refs/heads/master | /test/test_buy_tickets.py | # -*- coding: utf-8 -*-
import pytest
from fixture.application import Application
from model.flight import Flight
@pytest.fixture
def app(request):
fixture = Application()
request.addfinalizer(fixture.destroy)
return fixture
def test_buy_tickets(app):
app.session.login(username="alexey", password="lolo")
app.fill_flight_details()
app.choose_flight()
app.fill_reservation(Flight(name="Alexey", lastname="Kozlov", name2="Alexey's", lastname2="Wife"))
app.session.logout()
def test_buy_tickets_2(app):
app.session.login(username="alexey", password="lolo")
app.fill_flight_details()
app.choose_flight()
app.fill_reservation(Flight(name="Vania", lastname="Taiwania", name2="Zozo", lastname2="Koleso"))
app.session.logout()
| {"/test/test_buy_tickets.py": ["/fixture/application.py", "/model/flight.py"], "/fixture/application.py": ["/fixture/session.py"]} |
65,499 | AlexeyKozlov/Flights | refs/heads/master | /fixture/application.py | from selenium.webdriver.firefox.webdriver import WebDriver
from fixture.session import SessionHelper
class Application:
def __init__(self):
self.wd = WebDriver()
self.wd.implicitly_wait(60)
self.session = SessionHelper(self)
def fill_reservation(self, flight):
wd = self.wd
self.click_book_flight()
wd.find_element_by_name("passFirst0").click()
wd.find_element_by_name("passFirst0").clear()
wd.find_element_by_name("passFirst0").send_keys(flight.name)
wd.find_element_by_name("passLast0").click()
wd.find_element_by_name("passLast0").clear()
wd.find_element_by_name("passLast0").send_keys(flight.lastname)
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[4]/td/table/tbody/tr[2]/td[3]/select//option[1]").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[4]/td/table/tbody/tr[2]/td[3]/select//option[1]").click()
wd.find_element_by_name("passFirst1").click()
wd.find_element_by_name("passFirst1").clear()
wd.find_element_by_name("passFirst1").send_keys(flight.name2)
wd.find_element_by_name("passLast1").click()
wd.find_element_by_name("passLast1").clear()
wd.find_element_by_name("passLast1").send_keys(flight.lastname2)
wd.find_element_by_name("creditnumber").click()
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[7]/td/table/tbody/tr[2]/td[1]/select//option[3]").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[7]/td/table/tbody/tr[2]/td[1]/select//option[3]").click()
wd.find_element_by_name("creditnumber").click()
wd.find_element_by_name("creditnumber").clear()
wd.find_element_by_name("creditnumber").send_keys("111222333444")
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[7]/td/table/tbody/tr[2]/td[3]/select[1]//option[2]").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[7]/td/table/tbody/tr[2]/td[3]/select[1]//option[2]").click()
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[7]/td/table/tbody/tr[2]/td[3]/select[2]//option[3]").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[7]/td/table/tbody/tr[2]/td[3]/select[2]//option[3]").click()
wd.find_element_by_name("cc_frst_name").click()
wd.find_element_by_name("cc_frst_name").clear()
wd.find_element_by_name("cc_frst_name").send_keys("Alexey")
wd.find_element_by_name("cc_mid_name").click()
wd.find_element_by_name("cc_mid_name").clear()
wd.find_element_by_name("cc_mid_name").send_keys("Kozlov")
wd.find_element_by_name("cc_mid_name").click()
wd.find_element_by_name("cc_mid_name").clear()
wd.find_element_by_name("cc_mid_name").send_keys()
wd.find_element_by_name("cc_last_name").click()
wd.find_element_by_name("cc_last_name").clear()
wd.find_element_by_name("cc_last_name").send_keys("Kozlov")
wd.find_element_by_name("billAddress1").click()
wd.find_element_by_name("billAddress1").clear()
wd.find_element_by_name("billAddress1").send_keys("9 Autumn St")
wd.find_element_by_name("billAddress2").click()
wd.find_element_by_name("billAddress2").send_keys("")
wd.find_element_by_name("billCity").click()
wd.find_element_by_name("billCity").clear()
wd.find_element_by_name("billCity").send_keys("Somerville")
wd.find_element_by_name("billState").click()
wd.find_element_by_name("billState").clear()
wd.find_element_by_name("billState").send_keys("MA")
wd.find_element_by_name("billZip").click()
wd.find_element_by_name("billZip").clear()
wd.find_element_by_name("billZip").send_keys("02145")
wd.find_element_by_name("delAddress1").click()
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[15]/td[2]/input").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[15]/td[2]/input").click()
wd.find_element_by_name("buyFlights").click()
def click_book_flight(self):
wd = self.wd
wd.find_element_by_name("reserveFlights").click()
def choose_flight(self):
wd = self.wd
self.click_select_flight()
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table[1]/tbody/tr[5]/td[1]/input").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table[1]/tbody/tr[5]/td[1]/input").click()
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table[2]/tbody/tr[5]/td[1]/input").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table[2]/tbody/tr[5]/td[1]/input").click()
def click_select_flight(self):
wd = self.wd
wd.find_element_by_name("findFlights").click()
def fill_flight_details(self):
wd = self.wd
wd.find_element_by_name("tripType").click()
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[3]/td[2]/b/select//option[2]").click()
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[4]/td[2]/select//option[4]").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[4]/td[2]/select//option[4]").click()
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[6]/td[2]/select//option[5]").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[6]/td[2]/select//option[5]").click()
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[7]/td[2]/select[1]//option[11]").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[7]/td[2]/select[1]//option[11]").click()
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[7]/td[2]/select[2]//option[2]").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[7]/td[2]/select[2]//option[2]").click()
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[9]/td[2]/font/font/input[1]").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[9]/td[2]/font/font/input[1]").click()
if not wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[10]/td[2]/select//option[3]").is_selected():
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr/td[2]/table/tbody/tr[5]/td/form/table/tbody/tr[10]/td[2]/select//option[3]").click()
def open_home_page(self):
wd = self.wd
wd.get("http://newtours.demoaut.com/mercurysignon.php")
def destroy(self):
self.wd.quit() | {"/test/test_buy_tickets.py": ["/fixture/application.py", "/model/flight.py"], "/fixture/application.py": ["/fixture/session.py"]} |
65,500 | AlexeyKozlov/Flights | refs/heads/master | /model/flight.py |
class Flight:
def __init__(self, name, lastname, name2, lastname2):
self.name = name
self.lastname = lastname
self.name2 = name2
self.lastname2 = lastname2
| {"/test/test_buy_tickets.py": ["/fixture/application.py", "/model/flight.py"], "/fixture/application.py": ["/fixture/session.py"]} |
65,501 | AlexeyKozlov/Flights | refs/heads/master | /fixture/session.py |
class SessionHelper:
def __init__(self, app):
self.app=app
def login(self, username, password):
wd = self.app.wd
self.app.open_home_page()
wd.find_element_by_name("userName").click()
wd.find_element_by_name("userName").clear()
wd.find_element_by_name("userName").send_keys(username)
wd.find_element_by_name("password").click()
wd.find_element_by_name("password").clear()
wd.find_element_by_name("password").send_keys(password)
wd.find_element_by_name("login").click()
def logout(self):
wd = self.app.wd
wd.find_element_by_xpath(
"//div/table/tbody/tr/td[2]/table/tbody/tr[4]/td/table/tbody/tr[1]/td[2]/table/tbody/tr[7]/td/table/tbody/tr/td[2]/a/img").click() | {"/test/test_buy_tickets.py": ["/fixture/application.py", "/model/flight.py"], "/fixture/application.py": ["/fixture/session.py"]} |
65,515 | jbauza/todolist | refs/heads/master | /todo/urls.py | from django.conf.urls import include, url, patterns
from rest_framework.authtoken import views
urlpatterns = patterns('',
url(r'^todo/', include('todolist.urls')),
url(r'^todo/get_token/', views.obtain_auth_token),
)
| {"/todolist/serializers.py": ["/todolist/models.py"], "/todolist/tests.py": ["/todolist/models.py"], "/todolist/views.py": ["/todolist/models.py", "/todolist/serializers.py"]} |
65,516 | jbauza/todolist | refs/heads/master | /todolist/serializers.py | from rest_framework import serializers
from rest_framework.authtoken.models import Token
from django.contrib.auth.models import User
from todolist.models import Task
class TaskSerializer(serializers.ModelSerializer):
class Meta:
model = Task
fields = ('name','status')
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields=('username','password')
def get_token(self):
user = User.objects.get(username=self.data['username'])
return Token.objects.get(user=user).key
| {"/todolist/serializers.py": ["/todolist/models.py"], "/todolist/tests.py": ["/todolist/models.py"], "/todolist/views.py": ["/todolist/models.py", "/todolist/serializers.py"]} |
65,517 | jbauza/todolist | refs/heads/master | /todolist/tests.py | from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase, force_authenticate
from rest_framework import status
from django.contrib.auth.models import User
from todolist.models import Task
class User_Registration(APITestCase):
def test_user_registration(self):
url = reverse('register_user')
data = {'username':'test','password':'test'}
response = self.client.post(url,data,format='json')
self.assertEqual(response.status_code,status.HTTP_201_CREATED)
self.assertEqual(User.objects.count(),1)
self.assertEqual(User.objects.get().username, 'test')
class Task_Test(APITestCase):
def test_add_task(self):
url = reverse('add_task')
data = {'name':'test_task'}
user = User.objects.create(username='test')
self.client.force_authenticate(user=user)
response = self.client.post(url,data,format='json')
self.assertEqual(response.status_code,status.HTTP_201_CREATED)
self.assertEqual(Task.objects.count(),1)
self.assertEqual(Task.objects.get().name, 'test_task')
def test_todolist(self):
url = reverse('todolist')
user = User.objects.create(username='test')
self.client.force_authenticate(user=user)
response = self.client.get(url)
self.assertEqual(response.status_code,status.HTTP_200_OK)
def test_resolve_task(self):
task_name = 'test_task'
url = '/todo/resolve_task/'+task_name
user = User.objects.create(username='test')
task = Task.objects.create(name=task_name)
self.assertEqual(task.status, False)
self.client.force_authenticate(user=user)
response = self.client.post(url)
self.assertEqual(response.status_code,status.HTTP_200_OK)
self.assertEqual(Task.objects.get().status, True)
| {"/todolist/serializers.py": ["/todolist/models.py"], "/todolist/tests.py": ["/todolist/models.py"], "/todolist/views.py": ["/todolist/models.py", "/todolist/serializers.py"]} |
65,518 | jbauza/todolist | refs/heads/master | /todolist/models.py | from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from django.conf import settings
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
class Task(models.Model):
name = models.CharField(max_length=50)
status = models.BooleanField(default=False) #True = Resuelta, #False = Pendiente
| {"/todolist/serializers.py": ["/todolist/models.py"], "/todolist/tests.py": ["/todolist/models.py"], "/todolist/views.py": ["/todolist/models.py", "/todolist/serializers.py"]} |
65,519 | jbauza/todolist | refs/heads/master | /todolist/views.py | from rest_framework.permissions import IsAuthenticated
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes
from rest_framework.response import Response
from todolist.models import Task
from todolist.serializers import TaskSerializer, UserSerializer
#registra usuario y retorna el token correspondiente
@api_view(['POST'])
@permission_classes(())
def register_user(request):
if request.method == 'POST':
serializer = UserSerializer(data=request.DATA)
if serializer.is_valid():
serializer.save()
return Response(serializer.get_token(),status=status.HTTP_201_CREATED)
else:
return Response(status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET','POST'])
def todolist(request):
if request.method == 'GET':
tasks = Task.objects.all()
serializer = TaskSerializer(tasks,many=True)
return Response(serializer.data)
@api_view(['POST'])
def add_task(request):
if request.method == 'POST':
serializer = TaskSerializer(data=request.DATA)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(status=status.HTTP_400_BAD_REQUEST)
@api_view(['POST'])
def resolve_task(request,name):
try:
task = Task.objects.get(name=name)
except Task.DoesNotExist:
return Response(status=status.HTTP_400_BAD_REQUEST)
if request.method == 'POST':
if task.status == False:
task.status = True #estado resuelta
task.save()
serializer = TaskSerializer(data={'name':task.name,'status':task.status})
if serializer.is_valid():
return Response(serializer.data)
else:
return Response(status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET','PUT','DELETE'])
def task_details(request,pk):
try:
task = Task.objects.get(pk=pk)
except Task.DoesNotExist:
return Response(status=status.HTTP_400_BAD_REQUEST)
if request.method == 'GET':
serializer = TaskSerializer(task)
return Response(serializer.data)
if request.method == 'PUT':
serializer = TaskSerializer(task,data=request.DATA)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
else:
return Response(serializer.error, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
task.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
| {"/todolist/serializers.py": ["/todolist/models.py"], "/todolist/tests.py": ["/todolist/models.py"], "/todolist/views.py": ["/todolist/models.py", "/todolist/serializers.py"]} |
65,520 | jbauza/todolist | refs/heads/master | /todolist/urls.py | from django.conf.urls import patterns, include, url
urlpatterns = patterns('todolist.views',
url(r'^todolist/$','todolist',name='todolist'),
url(r'^add_task/$','add_task',name='add_task'),
url(r'^register_user/$','register_user',name='register_user'),
url(r'^resolve_task/(?P<name>\w+)$','resolve_task',name='resolve_task'),
url(r'^todolist/(?P<pk>[0-9]+)$','task_details',name='task_details')
)
| {"/todolist/serializers.py": ["/todolist/models.py"], "/todolist/tests.py": ["/todolist/models.py"], "/todolist/views.py": ["/todolist/models.py", "/todolist/serializers.py"]} |
65,522 | matthewpoyner/trainernats | refs/heads/main | /memberships/urls.py | from django.urls import path
from . import views
urlpatterns = [
path('', views.membership, name='membership'),
path('order_history/<order_number>', views.order_history, name='order_history')
]
| {"/tnsclasses/views.py": ["/tnsclasses/models.py", "/tnsclasses/forms.py"], "/tnsclasses/admin.py": ["/tnsclasses/models.py"], "/cart/contexts.py": ["/tnsclasses/models.py"], "/tnsclasses/forms.py": ["/tnsclasses/models.py"]} |
65,523 | matthewpoyner/trainernats | refs/heads/main | /tnsclasses/models.py | from django.db import models
class Day(models.Model):
day = models.CharField(max_length=254)
friendly_name = models.CharField(
max_length=254,
null=True,
blank=True
)
def __str__(self):
return self.day
def get_friendly_name(self):
return self.friendly_name
class Class_Type(models.Model):
class_type = models.CharField(
max_length=254,
null=True,
blank=True
)
friendly_name = models.CharField(
max_length=254,
null=True,
blank=True
)
def __str__(self):
return self.class_type
class TNS_Class(models.Model):
day = models.ForeignKey(
'Day',
null=True,
blank=True,
on_delete=models.SET_NULL
)
class_type = models.ForeignKey(
'Class_Type',
null=True,
blank=True,
on_delete=models.SET_NULL
)
class_time = models.TimeField(
max_length=30,
null=True,
blank=True
)
class_name = models.CharField(max_length=254)
class_description = models.TextField()
class_more_detail = models.TextField(null=True,
blank=False
)
price = models.DecimalField(
max_digits=6,
decimal_places=2,
)
image_url = models.URLField(
max_length=1024,
null=True,
blank=True
)
image = models.ImageField(null=True, blank=True)
def __str__(self):
return self.class_name
| {"/tnsclasses/views.py": ["/tnsclasses/models.py", "/tnsclasses/forms.py"], "/tnsclasses/admin.py": ["/tnsclasses/models.py"], "/cart/contexts.py": ["/tnsclasses/models.py"], "/tnsclasses/forms.py": ["/tnsclasses/models.py"]} |
65,524 | matthewpoyner/trainernats | refs/heads/main | /tnsclasses/views.py | from django.shortcuts import render, get_object_or_404, redirect, reverse
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from .models import TNS_Class, Day
from .forms import TNS_ClassForm
# Create your views here.
def all_classes(request):
''' A view to show all available classes '''
all_classes = TNS_Class.objects.all()
context = {
'all_classes': all_classes,
}
return render(request, 'tnsclasses/classes.html', context)
def class_detail(request, theclass_id):
''' A view to show an individual class '''
theclass = get_object_or_404(TNS_Class, pk=theclass_id)
context = {
'theclass': theclass,
}
return render(request, 'tnsclasses/class_detail.html', context)
@login_required
def add_class(request):
""" Add a class to the site """
if not request.user.is_superuser:
messages.error(request, 'Only site administrators can add a new class')
return redirect(reverse('home'))
if request.method == 'POST':
form = TNS_ClassForm(request.POST, request.FILES)
if form.is_valid():
theclass = form.save()
messages.success(request, 'Successfully added a new class!')
return redirect(reverse('class_detail', args=[theclass.id]))
else:
messages.error(request, 'Could not add this new class - please ensure the form is valid.')
else:
form = TNS_ClassForm()
template = 'tnsclasses/add_class.html'
context = {
'form': form,
}
return render(request, template, context)
@login_required
def edit_class(request, theclass_id):
""" Edit an existing class """
if not request.user.is_superuser:
messages.error(request, 'Sorry only site admin can do that')
return redirect(reverse('home'))
theclass = get_object_or_404(TNS_Class, pk=theclass_id)
if request.method == 'POST':
form = TNS_ClassForm(request.POST, request.FILES, instance=theclass)
if form.is_valid():
form.save()
messages.success(request, 'Successfully updated product!')
return redirect(reverse('class_detail', args=[theclass.id]))
else:
messages.error(request, 'Failed to update class. Please see errors in red.')
else:
form = TNS_ClassForm(instance=theclass)
messages.info(request, f'You are editing {theclass.class_name} {theclass.day.friendly_name} {theclass.class_time}')
template = 'tnsclasses/edit_class.html'
context = {
'form': form,
'theclass': theclass,
}
return render(request, template, context)
@login_required
def delete_class_confirmation(request, theclass_id):
""" View page where deletion of class is possible """
if not request.user.is_superuser:
messages.error(request, 'Sorry only site admin can do that')
return redirect(reverse('home'))
theclass = get_object_or_404(TNS_Class, pk=theclass_id)
context = {
'theclass': theclass,
}
return render(request, 'tnsclasses/delete_class_confirmation_page.html', context)
@login_required
def delete_class(request, theclass_id):
""" Delete a class from the site """
if not request.user.is_superuser:
messages.error(request, 'Sorry only site admin can do that')
return redirect(reverse('home'))
theclass = get_object_or_404(TNS_Class, pk=theclass_id)
theclass.delete()
messages.success(request, 'Class deleted!')
all_classes = TNS_Class.objects.all()
template = 'tnsclasses/classes.html'
context = {
'all_classes': all_classes,
'template': template,
}
return render(request, template, context)
| {"/tnsclasses/views.py": ["/tnsclasses/models.py", "/tnsclasses/forms.py"], "/tnsclasses/admin.py": ["/tnsclasses/models.py"], "/cart/contexts.py": ["/tnsclasses/models.py"], "/tnsclasses/forms.py": ["/tnsclasses/models.py"]} |
65,525 | matthewpoyner/trainernats | refs/heads/main | /tnsclasses/migrations/0002_auto_20210215_1457.py | # Generated by Django 3.1.2 on 2021-02-15 01:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tnsclasses', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='tns_class',
name='class_time',
field=models.TimeField(blank=True, max_length=30, null=True),
),
]
| {"/tnsclasses/views.py": ["/tnsclasses/models.py", "/tnsclasses/forms.py"], "/tnsclasses/admin.py": ["/tnsclasses/models.py"], "/cart/contexts.py": ["/tnsclasses/models.py"], "/tnsclasses/forms.py": ["/tnsclasses/models.py"]} |
65,526 | matthewpoyner/trainernats | refs/heads/main | /memberships/views.py | from django.shortcuts import render, get_object_or_404
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from .models import UserMembership
from .forms import UserMembershipForm
from checkout.models import Order
@login_required
def membership(request):
''' Display the user's membership page '''
membership = get_object_or_404(UserMembership, user=request.user)
if request.method == 'POST':
form = UserMembershipForm(request.POST, instance=membership)
if form.is_valid():
form.save()
messages.success(
request,
'Your details have been updated successfully'
)
else:
messages.error(
request,
'Unable to update info - please check for errors'
)
form = UserMembershipForm(instance=membership)
orders = membership.orders.all()
template = 'memberships/membership.html'
context = {
'form': form,
'orders': orders,
'on_membership_page': True
}
return render(request, template, context)
def order_history(request, order_number):
order = get_object_or_404(Order, order_number=order_number)
messages.info(request, (
f'This is a past order confirmation for order number {order_number}.'
'A confirmation email was sent on the order date'
))
template = 'checkout/checkout_success.html'
context = {
'order': order,
'from_membership': True,
}
return render(request, template, context)
| {"/tnsclasses/views.py": ["/tnsclasses/models.py", "/tnsclasses/forms.py"], "/tnsclasses/admin.py": ["/tnsclasses/models.py"], "/cart/contexts.py": ["/tnsclasses/models.py"], "/tnsclasses/forms.py": ["/tnsclasses/models.py"]} |
65,527 | matthewpoyner/trainernats | refs/heads/main | /tnsclasses/admin.py | from django.contrib import admin
from .models import Day, TNS_Class, Class_Type
# Register your models here.
class DayAdmin(admin.ModelAdmin):
list_display = (
'friendly_name',
'pk',
)
ordering = ('pk',)
class Class_TypeAdmin(admin.ModelAdmin):
list_display = (
'class_type',
)
class TNS_ClassAdmin(admin.ModelAdmin):
list_display = (
'class_name',
'day',
'class_description',
'price',
'class_time',
)
admin.site.register(Day, DayAdmin)
admin.site.register(Class_Type, Class_TypeAdmin)
admin.site.register(TNS_Class, TNS_ClassAdmin)
| {"/tnsclasses/views.py": ["/tnsclasses/models.py", "/tnsclasses/forms.py"], "/tnsclasses/admin.py": ["/tnsclasses/models.py"], "/cart/contexts.py": ["/tnsclasses/models.py"], "/tnsclasses/forms.py": ["/tnsclasses/models.py"]} |
65,528 | matthewpoyner/trainernats | refs/heads/main | /cart/contexts.py | from decimal import Decimal
from django.conf import settings
from django.shortcuts import get_object_or_404
from tnsclasses.models import TNS_Class
def cart_contents(request):
cart_items = []
total = 0
product_count = 0
cart = request.session.get('cart', {})
for item_id, item_data in cart.items():
product = get_object_or_404(TNS_Class, pk=item_id)
total += item_data * product.price
product_count += item_data
cart_items.append({
'item_id': item_id,
'quantity': item_data,
'product': product,
})
grand_total = total
context = {
'cart_items': cart_items,
'total': total,
'product_count': product_count,
'grand_total': grand_total,
}
return context
| {"/tnsclasses/views.py": ["/tnsclasses/models.py", "/tnsclasses/forms.py"], "/tnsclasses/admin.py": ["/tnsclasses/models.py"], "/cart/contexts.py": ["/tnsclasses/models.py"], "/tnsclasses/forms.py": ["/tnsclasses/models.py"]} |
65,529 | matthewpoyner/trainernats | refs/heads/main | /tnsclasses/forms.py | from django import forms
from .models import TNS_Class, Day
class TNS_ClassForm(forms.ModelForm):
class Meta:
model = TNS_Class
fields = '__all__'
image = forms.ImageField(label='Image', required=False)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
day = Day.objects.all()
friendly_names = [
(d.id, d.get_friendly_name()) for d in day
]
self.fields['day'].choices = friendly_names
self.fields['class_time'].widget.attrs[
'placeholder'
] = 'Enter time (hh:mm)'
self.fields['class_name'].widget.attrs[
'placeholder'
] = 'Enter class name'
self.fields['class_description'].widget.attrs[
'placeholder'
] = 'Enter a brief class description - displayed in the classes summary'
self.fields['class_more_detail'].widget.attrs[
'placeholder'
] = 'Enter a full description of the class - displayed on the class details page'
self.fields['price'].widget.attrs[
'placeholder'
] = 'Enter price to 2 decimal places'
self.fields['image_url'].widget.attrs[
'placeholder'
] = 'Enter an image URL'
for field_name, field in self.fields.items():
field.widget.attrs['class'] = 'mb-2'
| {"/tnsclasses/views.py": ["/tnsclasses/models.py", "/tnsclasses/forms.py"], "/tnsclasses/admin.py": ["/tnsclasses/models.py"], "/cart/contexts.py": ["/tnsclasses/models.py"], "/tnsclasses/forms.py": ["/tnsclasses/models.py"]} |
65,530 | mohsseha/simple-kfp | refs/heads/master | /comp_2/__init__.py | #WARNING: do not include any dependencies that would not work in a standard python3 installation
#(this is because the kfp pipeline builder does not have access to the packages defined in this docker image)
def run(input1: float, random_str:str) -> float:
# all imports should be here:
from typing import NamedTuple
import os
import comp_2.example as eg # this can't be "seen" by the pipeline code
print(f"running in comp_2, input1 was {input1} only works if I'm running in the clojure container")
print(f"I'm doing nothing with the {random_str}")
os.system('clj -e "(+ 1 1)"') # the clojure container has a python3 interperter installed
print(f"calling a function with external dependencies {eg.ex_func()}")
return input1 | {"/comp_2/__init__.py": ["/comp_2/example.py"], "/pipeline.py": ["/comp_1/__init__.py", "/comp_2/__init__.py"]} |
65,531 | mohsseha/simple-kfp | refs/heads/master | /pipeline.py | # if running on a notebook you may need to install a few things:
# !pip3 install gitpython kfp
import datetime
import kfp as kfp
import kfp.components as comp
import git
repo = git.Repo(search_parent_directories=True)
sha = repo.head.object.hexsha
# Modeled after https://github.com/kubeflow/pipelines/blob/master/samples/core/lightweight_component/lightweight_component.ipynb
# General note debugging a pipeline is a pain because man fields don't
# exist until run-time
import comp_1 as comp_1
import comp_2 as comp_2
#might as well keep components a common variable in case you want to write multiple pipelines
comp_1_op=comp.func_to_container_op(comp_1.run,base_image=f"docker.io/mohsseha/comp_1:{sha}")
comp_2_op=comp.func_to_container_op(comp_2.run,base_image=f"docker.io/mohsseha/comp_2:{sha}")
import kfp.dsl as dsl
@dsl.pipeline(
name='Simple Calculation pipeline',
description='simple example that composes a couple of ops with different source packages'
)
def experiment_pipeline(
in_1=3.1,
in_2=323.1,
username='random_username',
):
#Passing pipeline parameters to operation:
comp_1_task=comp_1_op(in_1,in_2)
#Passing a task output reference as operation arguments
#For an operation with a single return value, the output reference can be accessed using `task.output` or `task.outputs['output_name']` syntax
comp_2_task = comp_2_op(comp_1_task.outputs['result'], username)
print(f"this is run @ compile time not runtime {comp_2_task.output}")
#Specify pipeline argument values
args = {
"in_1":3.1,
"in_2":323.1,
"username": 'random_username'
}
now=datetime.datetime.now().strftime("%Y-%m-%d%H:%M:%S")
# compiling is optional; you really should not be doing it regularly
kfp.compiler.Compiler().compile(experiment_pipeline,"experiment_pipeline.yaml")
#Submit a pipeline run
#kfp.Client().create_run_from_pipeline_func(experiment_pipeline, arguments=args,run_name=now,experiment_name="simple_Poc")
| {"/comp_2/__init__.py": ["/comp_2/example.py"], "/pipeline.py": ["/comp_1/__init__.py", "/comp_2/__init__.py"]} |
65,532 | mohsseha/simple-kfp | refs/heads/master | /comp_2/example.py | #this is a regular python file you can import from wherever you want
import bs4
def ex_func():
soup = bs4.BeautifulSoup("<p>Some<b>bad<i>HTML")
return soup.prettify()
| {"/comp_2/__init__.py": ["/comp_2/example.py"], "/pipeline.py": ["/comp_1/__init__.py", "/comp_2/__init__.py"]} |
65,533 | mohsseha/simple-kfp | refs/heads/master | /comp_1/__init__.py | #WARNING: do not include any dependencies that would not work in a standard python3 installation
#(this is because the kfp pipeline builder does not have access to the packages defined in this docker image)
from typing import NamedTuple
def run(input1: float,input2: float) -> NamedTuple('Cmp1Output', [('input1', float), ('input2', float), ('result', float)]):
# if you need external deps they should be imported here:
print(f"running in comp_1, input1 was {input1} and inptu 2= {input2}")
from collections import namedtuple
comp_output = namedtuple('Cmp1Output', ['input1', 'input2', 'result'])
return comp_output(input1, input2, input1*input2)
| {"/comp_2/__init__.py": ["/comp_2/example.py"], "/pipeline.py": ["/comp_1/__init__.py", "/comp_2/__init__.py"]} |
65,535 | WangXin81/GC-MLFNet-Submitted-to-IEEE-JSTARS | refs/heads/main | /multilabelMetrics/examplebasedranking.py | import numpy as np
from multilabelMetrics.functions import rankingMatrix, relevantIndexes, irrelevantIndexes
def oneError(y_test, probabilities):
"""
One Error
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
probabilities: sparse or dense matrix (n_samples, n_labels)
Probability of being into a class or not per each label
Returns
=======
oneError : float
One Error
"""
oneerror = 0.0
ranking = rankingMatrix(probabilities)
for i in range(y_test.shape[0]):
index = np.argmin(ranking[i,:])
if y_test[i,index] == 0:
oneerror += 1.0
oneerror = float(oneerror)/float(y_test.shape[0])
return oneerror
def coverage(y_test, probabilities):
"""
Coverage
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
probabilities: sparse or dense matrix (n_samples, n_labels)
Probability of being into a class or not per each label
Returns
=======
coverage : float
coverage
"""
coverage = 0.0
ranking = rankingMatrix(probabilities)
for i in range(y_test.shape[0]):
coverageMax = 0.0
for j in range(y_test.shape[1]):
if y_test[i,j] == 1:
if ranking[i,j] > coverageMax:
coverageMax = ranking[i,j]
coverage += coverageMax
coverage = float(coverage)/float(y_test.shape[0])
coverage -= 1.0
return coverage
def averagePrecision(y_test, probabilities):
"""
Average Precision
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
probabilities: sparse or dense matrix (n_samples, n_labels)
Probability of being into a class or not per each label
Returns
=======
averageprecision : float
Average Precision
"""
averageprecision = 0.0
averageprecisionsummatory = 0.0
ranking = rankingMatrix(probabilities)
for i in range(y_test.shape[0]):
relevantVector =relevantIndexes(y_test, i)
for j in range(y_test.shape[1]):
average = 0.0
if y_test[i, j] == 1:
for k in range(y_test.shape[1]):
if(y_test[i,k] == 1):
if ranking[i,k] <= ranking[i,j]:
average += 1.0
if ranking[i,j] != 0:
averageprecisionsummatory += average/ranking[i,j]
if len(relevantVector) == 0:
averageprecision += 1.0
else:
averageprecision += averageprecisionsummatory/float(len(relevantVector))
averageprecisionsummatory = 0.0
averageprecision /= y_test.shape[0]
return averageprecision
def rankingLoss(y_test, probabilities):
"""
Ranking Loss
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
probabilities: sparse or dense matrix (n_samples, n_labels)
Probability of being into a class or not per each label
Returns
=======
rankingloss : float
Ranking Loss
"""
rankingloss = 0.0
for i in range(y_test.shape[0]):
relevantVector = relevantIndexes(y_test, i)
irrelevantVector = irrelevantIndexes(y_test, i)
loss = 0.0
for j in range(y_test.shape[1]):
if y_test[i,j] == 1:
for k in range(y_test.shape[1]):
if y_test[i,k] == 0:
if float(probabilities[i,j]) <= float(probabilities[i,k]):
loss += 1.0
if len(relevantVector) != 0 and len(irrelevantVector) != 0:
rankingloss += loss/float(len(relevantVector)*len(irrelevantVector))
rankingloss /= y_test.shape[0]
return rankingloss | {"/multilabelMetrics/examplebasedranking.py": ["/multilabelMetrics/functions.py"], "/testUCM.py": ["/multilabelMetrics/examplebasedclassification.py", "/multilabelMetrics/examplebasedranking.py"], "/trainUCM.py": ["/MLFNet/MLFNet_GC.py"]} |
65,536 | WangXin81/GC-MLFNet-Submitted-to-IEEE-JSTARS | refs/heads/main | /multilabelMetrics/examplebasedclassification.py | def subsetAccuracy1(y_test, predictions):
"""
The subset accuracy evaluates the fraction of correctly classified examples
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
predictions: sparse or dense matrix (n_samples, n_labels)
Matrix of predicted labels given by our model
Returns
=======
subsetaccuracy : float
Subset Accuracy of our model
"""
subsetaccuracy = 0.0
for i in range(y_test.shape[0]):
same = True
for j in range(y_test.shape[1]):
if y_test[i,j] != predictions[i,j]:
same = False
break
if same:
subsetaccuracy += 1.0
return subsetaccuracy/y_test.shape[0]
def hammingLoss(y_test, predictions):
"""
The hamming loss evaluates the fraction of misclassified instance-label pairs
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
predictions: sparse or dense matrix (n_samples, n_labels)
Matrix of predicted labels given by our model
Returns
=======
hammingloss : float
Hamming Loss of our model
"""
hammingloss = 0.0
for i in range(y_test.shape[0]):
aux = 0.0
for j in range(y_test.shape[1]):
if int(y_test[i,j]) != int(predictions[i,j]):
aux = aux+1.0
aux = aux/y_test.shape[1]
hammingloss = hammingloss + aux
return hammingloss/y_test.shape[0]
def accuracy1(y_test, predictions):
"""
Accuracy of our model
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
predictions: sparse or dense matrix (n_samples, n_labels)
Matrix of predicted labels given by our model
Returns
=======
accuracy : float
Accuracy of our model
"""
accuracy = 0.0
for i in range(y_test.shape[0]):
intersection = 0.0
union = 0.0
for j in range(y_test.shape[1]):
if int(y_test[i,j]) == 1 or int(predictions[i,j]) == 1:
union += 1
if int(y_test[i,j]) == 1 and int(predictions[i,j]) == 1:
intersection += 1
if union != 0:
accuracy = accuracy + float(intersection/union)
accuracy = float(accuracy/y_test.shape[0])
return accuracy
def precision1(y_test, predictions):
"""
Precision of our model
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
predictions: sparse or dense matrix (n_samples, n_labels)
Matrix of predicted labels given by our model
Returns
=======
precision : float
Precision of our model
"""
precision = 0.0
for i in range(y_test.shape[0]):
intersection = 0.0
hXi = 0.0
for j in range(y_test.shape[1]):
hXi = hXi + int(predictions[i,j])
if int(y_test[i,j]) == 1 and int(predictions[i,j]) == 1:
intersection += 1
if hXi != 0:
precision = precision + float(intersection/hXi)
precision = float(precision/y_test.shape[0])
return precision
def recall1(y_test, predictions):
"""
Recall of our model
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
predictions: sparse or dense matrix (n_samples, n_labels)
Matrix of predicted labels given by our model
Returns
=======
recall : float
recall of our model
"""
recall = 0.0
for i in range(y_test.shape[0]):
intersection = 0.0
Yi = 0.0
for j in range(y_test.shape[1]):
Yi = Yi + int(y_test[i,j])
if y_test[i,j] == 1 and int(predictions[i,j]) == 1:
intersection = intersection + 1
if Yi != 0:
recall = recall + float(intersection/Yi)
recall = recall/y_test.shape[0]
return recall
def fbeta1(y_test, predictions, beta=1):
"""
FBeta of our model
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
predictions: sparse or dense matrix (n_samples, n_labels)
Matrix of predicted labels given by our model
Returns
=======
fbeta : float
fbeta of our model
"""
pr = precision1(y_test, predictions)
re = recall1(y_test, predictions)
num = float((1+pow(beta,2))*pr*re)
den = float(pow(beta,2)*pr + re)
if den != 0:
fbeta = num/den
else:
fbeta = 0.0
return fbeta
| {"/multilabelMetrics/examplebasedranking.py": ["/multilabelMetrics/functions.py"], "/testUCM.py": ["/multilabelMetrics/examplebasedclassification.py", "/multilabelMetrics/examplebasedranking.py"], "/trainUCM.py": ["/MLFNet/MLFNet_GC.py"]} |
65,537 | WangXin81/GC-MLFNet-Submitted-to-IEEE-JSTARS | refs/heads/main | /testUCM.py | #!/usr/bin/env python
# coding: utf-8
from models.mobilenet_v2 import MobileNetV2
from efficientnet.efficientnet.model import EfficientNetB3, EfficientNetB2,get_dropout,get_swish
from keras.utils.generic_utils import get_custom_objects
from keras import backend as K
from keras.layers import Activation
def swish_activation(x):
return (K.sigmoid(x) * x)
# get_custom_objects().update({'swish': Activation(swish_activation)})
get_custom_objects().update({'swish': Activation(get_swish)})
get_custom_objects().update({'FixedDropout': Activation(get_dropout)})
import numpy as np
from multilabelMetrics import *
import scipy.io as scio
import imageio
from keras.models import load_model
from keras.preprocessing.image import ImageDataGenerator
from keras.optimizers import SGD, Adam, Nadam, RMSprop, Adagrad
from sklearn.metrics import hamming_loss, multilabel_confusion_matrix, precision_recall_fscore_support, \
balanced_accuracy_score, recall_score, fbeta_score
from keras.callbacks import ReduceLROnPlateau, EarlyStopping
from sklearn.metrics import classification_report, confusion_matrix, precision_score,f1_score
from sklearn.metrics import accuracy_score
import keras.backend as K
####################################
from multilabelMetrics.examplebasedclassification import subsetAccuracy1, hammingLoss, recall1, precision1, accuracy1,fbeta1
from multilabelMetrics.examplebasedranking import rankingLoss, oneError, coverage, averagePrecision
from multilabelMetrics.labelbasedclassification import accuracyMacro, accuracyMicro, precisionMacro, precisionMicro, \
recallMacro, recallMicro
def cal_base(y_true, y_pred):
y_pred_positive = K.round(K.clip(y_pred, 0, 1))
y_pred_negative = 1 - y_pred_positive
y_positive = K.round(K.clip(y_true, 0, 1))
y_negative = 1 - y_positive
TP = K.sum(y_positive * y_pred_positive)
TN = K.sum(y_negative * y_pred_negative)
FP = K.sum(y_negative * y_pred_positive)
FN = K.sum(y_positive * y_pred_negative)
return TP, TN, FP, FN
def accuracy(y_true, y_pred):
TP, TN, FP, FN = cal_base(y_true, y_pred)
ACC = (TP + TN) / (TP + FP + FN + TN + K.epsilon())
return ACC
def sensitivity(y_true, y_pred):
""" recall """
TP, TN, FP, FN = cal_base(y_true, y_pred)
SE = TP/(TP + FN + K.epsilon())
return SE
def precision(y_true, y_pred):
TP, TN, FP, FN = cal_base(y_true, y_pred)
PC = TP/(TP + FP + K.epsilon())
return PC
def specificity(y_true, y_pred):
TP, TN, FP, FN = cal_base(y_true, y_pred)
SP = TN / (TN + FP + K.epsilon())
return SP
def f1_socre(y_true, y_pred):
SE = sensitivity(y_true, y_pred)
PC = precision(y_true, y_pred)
F1 = 2 * SE * PC / (SE + PC + K.epsilon())
return F1
####################################
# precision
def P(y_true, y_pred):
true_positives = K.sum(K.cast(K.greater(K.clip(y_true * y_pred, 0, 1), 0.20), 'float32'))
pred_positives = K.sum(K.cast(K.greater(K.clip(y_pred, 0, 1), 0.20), 'float32'))
precision = true_positives / (pred_positives + K.epsilon())
return precision
# recall
def R(y_true, y_pred):
true_positives = K.sum(K.cast(K.greater(K.clip(y_true * y_pred, 0, 1), 0.20), 'float32'))
poss_positives = K.sum(K.cast(K.greater(K.clip(y_true, 0, 1), 0.20), 'float32'))
recall = true_positives / (poss_positives + K.epsilon())
return recall
# f-measure
def F(y_true, y_pred):
p_val = P(y_true, y_pred)
r_val = R(y_true, y_pred)
f_val = 2 * p_val * r_val / (p_val + r_val)
return f_val
# Accuracies:
def findMetrics(yTrue, yPred):
# precision overall
positive_predictions = np.count_nonzero(yPred) # denominator
true_positives = np.sum(np.logical_and(yTrue == 1, yPred == 1)) # numerator
if positive_predictions == 0:
precision = 0
else:
precision = true_positives / positive_predictions
# recall overall
relevant_positives = np.count_nonzero(yTrue) # denominator
recall = true_positives / relevant_positives
# F Measure overall
numerator = precision * recall
denominator = precision + recall
if denominator == 0:
f_measure = 0
else:
f_measure = (2 * numerator) / denominator
# precision per row/column
positive_predictions_row = np.count_nonzero(yPred, axis=1) # denominators 420维 example-based
positive_predictions_col = np.count_nonzero(yPred, axis=0) # denominators 17维 label-based
true_positives_row = np.sum(np.logical_and(yTrue == 1, yPred == 1), axis=1) # numerators
true_positives_col = np.sum(np.logical_and(yTrue == 1, yPred == 1), axis=0) # numerators
positive_predictions_row = positive_predictions_row.astype('float')
positive_predictions_col = positive_predictions_col.astype('float')
true_positives_row = true_positives_row.astype('float')
true_positives_col = true_positives_col.astype('float')
precision_per_row = np.true_divide(true_positives_row, positive_predictions_row,
out=np.zeros_like(true_positives_row), where=positive_predictions_row != 0)
precision_per_col = np.true_divide(true_positives_col, positive_predictions_col,
out=np.zeros_like(true_positives_col), where=positive_predictions_col != 0)
avrg_precision_row = np.mean(precision_per_row)
avrg_precision_col = np.mean(precision_per_col)
# multi_label accuracy overall
accuracy2 = true_positives / (np.sum(np.logical_or(yTrue == 1, yPred == 1))) #OA
acc2_denominator_row = np.sum(np.logical_or(yTrue == 1, yPred == 1), axis=1)
acc2_denominator_row = acc2_denominator_row.astype('float')
accuracy2_row = np.true_divide(true_positives_row, acc2_denominator_row, out=np.zeros_like(true_positives_row),
where=acc2_denominator_row != 0)
avrg_acc2_row = np.mean(accuracy2_row) ##avrg_acc2_row就是example-based Acc
# recall per row/column
relevant_positives_row = np.count_nonzero(yTrue, axis=1) # denominators
relevant_positives_col = np.count_nonzero(yTrue, axis=0) # denominators
relevant_positives_row = relevant_positives_row.astype('float')
relevant_positives_col = relevant_positives_col.astype('float')
recall_per_row = np.true_divide(true_positives_row, relevant_positives_row, out=np.zeros_like(true_positives_row),
where=relevant_positives_row != 0)
recall_per_col = np.true_divide(true_positives_col, relevant_positives_col, out=np.zeros_like(true_positives_col),
where=relevant_positives_col != 0)
avrg_recall_row = np.mean(recall_per_row)
avrg_recall_col = np.mean(recall_per_col)
# F Measure per row
numerator_row = avrg_precision_row * avrg_recall_row
denominator_row = avrg_precision_row + avrg_recall_row
if denominator_row == 0:
f1_measure_row = 0
f2_measure_row = 0
else:
f1_measure_row = (2 * numerator_row) / denominator_row
f2_measure_row = ((5 * numerator_row) / ((4 * avrg_precision_row) + (avrg_recall_row)))
print("Accuracy is :: " + str(avrg_acc2_row))
print("F1 Score is :: " + str(f1_measure_row))
print("F2 Score is :: " + str(f2_measure_row))
print("Precision row :: " + str(avrg_precision_row))
print("Recall row :: " + str(avrg_recall_row))
print("Precision column :: " + str(avrg_precision_col))
print("Recall column :: " + str(avrg_recall_col))
return accuracy2, precision, recall, f_measure, avrg_precision_row, avrg_recall_row, f1_measure_row, f2_measure_row, avrg_precision_col, avrg_recall_col, avrg_acc2_row
# different threshold values
# def thresholding1(test_set, test_labels):
# model = load_model('重新整理的model/UCM/VGG1222.hdf5', custom_objects={'P': P,'R': R, 'F':F, 'precision':precision,'f1_socre':f1_socre, 'sensitivity':sensitivity,'specificity':specificity})
# out = model.predict(test_set)
# out = np.array(out)
# # threshold = np.arange(0.1,0.9,0.05)
# # for t in threshold:
# for i in range(1):
# # Thresholding function
# threshold = np.arange(0.1, 0.9, 0.01)
#
# acc = []
# accuracies = []
# otsu = []
#
# best_threshold = np.zeros(out.shape[1])
#
# for i in range(out.shape[1]):
# y_prob = np.array(out[:, i])
#
# for j in threshold:
# y_pred = [1 if prob >= j else 0 for prob in y_prob]
# # acc.append( matthews_corrcoef(test_labels[:,i],y_pred))
# # acc.append( fbeta_score(test_labels[:,i],y_pred,beta=1))
# acc.append(accuracy_score(test_labels[:, i], y_pred))
#
# acc = np.array(acc)
# index = np.where(acc == acc.max())
# accuracies.append(acc.max())
# best_threshold[i] = threshold[index[0][0]]
# acc = []
#
# # best_threshold=[0.45]*17
# print('best_threshold:',best_threshold)
#
# # y_pred = np.array([[1 if out[i, j] >= best_threshold[j] else 0 for j in range(test_labels.shape[1])] for i in
# # range(len(test_labels))])
#
# y_pred = np.array([[1 if out[i, j] >= 0.5 else 0 for j in range(test_labels.shape[1])] for i in
# range(len(test_labels))])
#
# print('hamming_loss:',hamming_loss(test_labels, y_pred))
#
#
# x = findMetrics(test_labels, y_pred)
# print(x)
# print(' Classification Report:\n', classification_report(test_labels, y_pred), '\n')
def thresholding1(test_set, test_labels):
model = load_model('generatedmodel/mobilenetv2.hdf5', custom_objects={'P': P,'R': R, 'F':F, 'precision':precision,'f1_socre':f1_socre, 'sensitivity':sensitivity,'specificity':specificity})
out = model.predict(test_set)
out = np.array(out)
y_pred = np.array([[1 if out[i, j] >= 0.5 else 0 for j in range(test_labels.shape[1])] for i in
range(len(test_labels))])
print('hamming_loss:',hamming_loss(test_labels, y_pred))
x = findMetrics(test_labels, y_pred)
print(x)
print(' Classification Report:\n', classification_report(test_labels, y_pred), '\n')
print("另一个评价结果,examplebased")
print('rankingLoss',rankingLoss(test_labels, y_pred))
print('subsetAccuracy',subsetAccuracy1(test_labels, y_pred))
print('hammingLoss', hammingLoss(test_labels, y_pred))
print('accuracy1', accuracy1(test_labels, y_pred))
print('precision1', precision1(test_labels, y_pred))
print('recall1', recall1(test_labels, y_pred))
print('fbeta1', fbeta1(test_labels, y_pred))
print('oneError', oneError(test_labels, y_pred))
print('coverage', coverage(test_labels, y_pred))
print('averagePrecision', averagePrecision(test_labels, y_pred))
print("label based")
print('accuracyMacro', accuracyMacro(test_labels, y_pred))
print('accuracyMicro', accuracyMicro(test_labels, y_pred))
print('precisionMacro', precisionMacro(test_labels, y_pred))
print('precisionMicro', precisionMicro(test_labels, y_pred))
print('recallMacro', recallMacro(test_labels, y_pred))
print('recallMicro', recallMicro(test_labels, y_pred))
OBSERVATIONS_FILE = 'UcmImages.npy' # The file containing the data samples.
LABELS_FILE = 'UcmLabels.npy' # The file containing the labels.
TESTING_DATA_NUM = 420
images = np.load(OBSERVATIONS_FILE)
labels = np.load(LABELS_FILE)
random_indices = np.arange(images.shape[0])
np.random.seed(42)
np.random.shuffle(random_indices)
labels = labels[random_indices]
images = images[random_indices]
test_set = images[:TESTING_DATA_NUM]
test_labels = labels[:TESTING_DATA_NUM]
print('shape',test_labels.shape)
thresholding1(test_set, test_labels)
| {"/multilabelMetrics/examplebasedranking.py": ["/multilabelMetrics/functions.py"], "/testUCM.py": ["/multilabelMetrics/examplebasedclassification.py", "/multilabelMetrics/examplebasedranking.py"], "/trainUCM.py": ["/MLFNet/MLFNet_GC.py"]} |
65,538 | WangXin81/GC-MLFNet-Submitted-to-IEEE-JSTARS | refs/heads/main | /models/othermodel.py |
import numpy as np
import scipy.io as scio
import scipy.ndimage as im
import imageio
import matplotlib.pyplot as plt
import keras
from keras import models
from keras import layers
from keras import optimizers
from keras import applications
from keras import backend as K
from keras.preprocessing.image import ImageDataGenerator
from keras.models import load_model, Model
from keras.applications.xception import Xception
from keras.applications.resnet50 import ResNet50
from keras.applications.inception_v3 import InceptionV3
from keras.applications.inception_resnet_v2 import InceptionResNetV2
from keras.layers import Dense, Activation, Flatten, Conv2D, RepeatVector
from keras.layers import GlobalAveragePooling2D, BatchNormalization, ZeroPadding2D, UpSampling2D
from keras.models import Sequential
from keras.layers import Reshape, Add, Multiply, Lambda, AveragePooling2D
from keras.layers import concatenate
from keras.layers import MaxPooling2D, Dropout, Input, MaxPool2D
from keras.optimizers import SGD, Adam, Nadam, RMSprop, Adagrad
from keras.regularizers import l2
from keras.callbacks import ModelCheckpoint
from sklearn.metrics import matthews_corrcoef
from sklearn.metrics import hamming_loss
from keras.layers import LSTM
from keras.layers import TimeDistributed
from keras.layers import Bidirectional
from keras.applications.vgg16 import VGG16
from keras.preprocessing import image
from keras.applications.vgg16 import preprocess_input
from keras.layers.advanced_activations import PReLU
from keras.activations import linear as linear_activation
from keras import initializers
from keras.callbacks import ReduceLROnPlateau, EarlyStopping
from sklearn.metrics import fbeta_score
from sklearn.metrics import classification_report,confusion_matrix
from sklearn.metrics import accuracy_score
from sklearn.metrics import f1_score
from sklearn.metrics import precision_score
from sklearn.metrics import recall_score
from keras.utils import to_categorical
def VGGNET():
vgg_model = VGG16(include_top=False, weights='imagenet', input_shape=(256, 256, 3))
for layers in vgg_model.layers:
layers.trainable = True
model = Sequential()
model.add(vgg_model)
#model.add(GlobalAveragePooling2D())
model.add(Flatten(name='flatten_1'))
model.add(Dense(17, activation='sigmoid', name='dense_1'))
return model
def CA_VGG_LSTM():
vgg_model = VGG16(include_top=False, weights='imagenet', input_shape=(247, 242, 3))
model = Sequential()
for layer in tuple(vgg_model.layers[:-5]):
layer_type = type(layer).__name__
model.add(layer)
model.add(Conv2D(512, (3, 3), activation='relu', name='block5_conv1'))
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2'))
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3'))
model.add(Conv2D(17, kernel_size=(1, 1), strides=(1, 1), kernel_initializer='glorot_uniform'))
model.add(Reshape((17, 28*28), input_shape=(28, 28, 17)))
model.add(LSTM(17, input_shape=(17, 28*28), activation='tanh', kernel_initializer=initializers.RandomUniform(minval=-0.1, maxval=0.1, seed=None)))
model.add(Dense(17, activation='sigmoid'))
return model
def CA_VGG_BILSTM():
vgg_model = VGG16(include_top=False, weights='imagenet', input_shape=(247, 242, 3))
model = Sequential()
for layer in tuple(vgg_model.layers[:-5]):
layer_type = type(layer).__name__
model.add(layer)
model.add(Conv2D(512, (3, 3), activation='relu', name='block5_conv1'))
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2'))
model.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3'))
model.add(Conv2D(17, kernel_size=(1, 1), strides=(1, 1), kernel_initializer='glorot_uniform'))
model.add(Reshape((17, 28*28), input_shape=(28, 28, 17)))
model.add(Bidirectional(LSTM(17, input_shape=(17, 28*28), activation='tanh', kernel_initializer=initializers.RandomUniform(minval=-0.1, maxval=0.1, seed=None)), merge_mode='sum'))
model.add(Dense(17, activation='sigmoid'))
return model
def GoogLeNet():
base_model = InceptionV3(include_top=False, weights='imagenet', input_shape=(256, 256, 3))
# base_model = InceptionV1
# add a global spatial average pooling layer
x = base_model.output
x = GlobalAveragePooling2D()(x)
# let's add a fully-connected layer
x = Dense(1024, activation='relu')(x)
# and a logistic layer -- let's say we have 17 classes
predictions = Dense(17, activation='sigmoid')(x)
# this is the model we will train
model = Model(inputs=base_model.input, outputs=predictions)
return model
def ResNet50():
#base_model = ResNet50(include_top=False, weights='imagenet', input_shape=(247, 242, 3))
base_model = applications.resnet50.ResNet50(weights= 'imagenet', include_top=False, input_shape= (256,256,3))
# add a global spatial average pooling layer
x = base_model.output
x = GlobalAveragePooling2D()(x)
# let's add a fully-connected layer
x = Dense(1024, activation='relu')(x)
# and a logistic layer -- let's say we have 17 classes
predictions = Dense(17, activation='sigmoid')(x)
# this is the model we will train
model = Model(inputs=base_model.input, outputs=predictions)
return model
def residual_block(input, input_channels=None, output_channels=None, kernel_size=(3, 3), stride=1):
if output_channels is None:
output_channels = input.get_shape()[-1].value
if input_channels is None:
input_channels = output_channels // 4
strides = (stride, stride)
x = BatchNormalization()(input)
x = Activation('relu')(x)
x = Conv2D(input_channels, (1, 1))(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(input_channels, kernel_size, padding='same', strides=stride)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(output_channels, (1, 1), padding='same')(x)
if input_channels != output_channels or stride != 1:
input = Conv2D(output_channels, (1, 1), padding='same', strides=strides)(input)
x = Add()([x, input])
return x
def attention_block(input, input_channels=None, output_channels=None, encoder_depth=1):
p = 1
t = 2
r = 1
if input_channels is None:
input_channels = input.get_shape()[-1].value
if output_channels is None:
output_channels = input_channels
# First Residual Block
for i in range(p):
input = residual_block(input)
# Trunc Branch
output_trunk = input
for i in range(t):
output_trunk = residual_block(output_trunk)
# Soft Mask Branch
## encoder
### first down sampling
output_soft_mask = MaxPool2D(padding='same')(input) # 32x32
for i in range(r):
output_soft_mask = residual_block(output_soft_mask)
skip_connections = []
for i in range(encoder_depth - 1):
## skip connections
output_skip_connection = residual_block(output_soft_mask)
skip_connections.append(output_skip_connection)
# print ('skip shape:', output_skip_connection.get_shape())
## down sampling
output_soft_mask = MaxPool2D(padding='same')(output_soft_mask)
for _ in range(r):
output_soft_mask = residual_block(output_soft_mask)
## decoder
skip_connections = list(reversed(skip_connections))
for i in range(encoder_depth - 1):
## upsampling
for _ in range(r):
output_soft_mask = residual_block(output_soft_mask)
output_soft_mask = UpSampling2D()(output_soft_mask)
## skip connections
output_soft_mask = Add()([output_soft_mask, skip_connections[i]])
### last upsampling
for i in range(r):
output_soft_mask = residual_block(output_soft_mask)
output_soft_mask = UpSampling2D()(output_soft_mask)
## Output
output_soft_mask = Conv2D(input_channels, (1, 1))(output_soft_mask)
output_soft_mask = Conv2D(input_channels, (1, 1))(output_soft_mask)
output_soft_mask = Activation('sigmoid')(output_soft_mask)
# Attention: (1 + output_soft_mask) * output_trunk
output = Lambda(lambda x: x + 1)(output_soft_mask)
output = Multiply()([output, output_trunk]) #
# Last Residual Block
for i in range(p):
output = residual_block(output)
return output
def ResAttentionNet56(shape=(256, 256, 3), n_channels=64, n_classes=17,
dropout=0):
input_ = Input(shape=shape)
x = Conv2D(n_channels, (7, 7), strides=(2, 2), padding='same')(input_) # 112x112
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = MaxPool2D(pool_size=(3, 3), strides=(2, 2), padding='same')(x) # 56x56
x = residual_block(x, output_channels=n_channels * 4) # 56x56
x = attention_block(x, encoder_depth=3) # bottleneck 7x7
x = residual_block(x, output_channels=n_channels * 8, stride=2) # 28x28
x = attention_block(x, encoder_depth=2) # bottleneck 7x7
x = residual_block(x, output_channels=n_channels * 16, stride=2) # 14x14
x = attention_block(x, encoder_depth=1) # bottleneck 7x7
x = residual_block(x, output_channels=n_channels * 32, stride=2) # 7x7
x = residual_block(x, output_channels=n_channels * 32)
x = residual_block(x, output_channels=n_channels * 32)
pool_size = (x.get_shape()[1].value, x.get_shape()[2].value)
x = AveragePooling2D(pool_size=pool_size, strides=(1, 1))(x)
if dropout:
x = Dropout(dropout)(x)
output = Dense(n_classes, activation='sigmoid')(x)
model = Model(input_, output)
return model
def my_model(shape=(256,256,3)):
input_ = Input(shape=shape)
a1 = MaxPool2D(pool_size=(2, 2), strides=(2, 2), padding='same')(input_)
x1 = Conv2D(64, (3, 3), strides=(1, 1), padding='same', activation='relu')(input_)
x1 = Conv2D(64, (3, 3), strides=(1, 1), padding='same', activation='relu')(x1)
x1 = BatchNormalization()(x1)
x1 = MaxPool2D(pool_size=(2, 2), strides=(2, 2), padding='same')(x1)
x1 = concatenate([a1, x1])
a2 = MaxPool2D(pool_size=(2, 2), strides=(2, 2), padding='same')(a1)
x2 = Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu')(x1)
x2 = Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu')(x2)
x2 = BatchNormalization()(x2)
x2 = MaxPool2D(pool_size=(2, 2), strides=(2, 2), padding='same')(x2)
x2 = concatenate([a2, x2])
a3 = MaxPool2D(pool_size=(2, 2), strides=(2, 2), padding='same')(a2)
x3 = Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu')(x2)
x3 = Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu')(x3)
x3 = Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu')(x3)
x3 = BatchNormalization()(x3)
x3 = MaxPool2D(pool_size=(2, 2), strides=(2, 2), padding='same')(x3)
x3 = concatenate([a3, x3])
a4 = MaxPool2D(pool_size=(2, 2), strides=(2, 2), padding='same')(a3)
x4 = Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu')(x3)
x4 = Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu')(x4)
x4 = Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu')(x4)
x4 = BatchNormalization()(x4)
x4 = MaxPool2D(pool_size=(2, 2), strides=(2, 2), padding='same')(x4)
x4 = concatenate([a4, x4])
a5 = MaxPool2D(pool_size=(2, 2), strides=(2, 2), padding='same')(a4)
x5 = Conv2D(512, (3, 3), strides=(1, 1), padding='same')(x4)
x5 = Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu')(x5)
x5 = Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu')(x5)
x5 = BatchNormalization()(x5)
x5 = Activation('relu')(x5)
x5 = MaxPool2D(pool_size=(2, 2), strides=(2, 2), padding='same')(x5)
x5 = concatenate([a5, x5])
pool_size = (x5.get_shape()[1].value, x5.get_shape()[2].value)
x5 = AveragePooling2D(pool_size=pool_size, strides=(1, 1))(x5)
x5 = Flatten()(x5)
#x5 = Dropout(0.50)(x5)
pool_size = (x4.get_shape()[1].value, x4.get_shape()[2].value)
x4 = AveragePooling2D(pool_size=pool_size, strides=(1, 1))(x4)
x4 = Flatten()(x4)
#x4 = Dropout(0.50)(x4)
pool_size = (x3.get_shape()[1].value, x3.get_shape()[2].value)
x3 = AveragePooling2D(pool_size=pool_size, strides=(1, 1))(x3)
x3 = Flatten()(x3)
#x3 = Dropout(0.50)(x3)
pool_size = (x2.get_shape()[1].value, x2.get_shape()[2].value)
x2 = AveragePooling2D(pool_size=pool_size, strides=(1, 1))(x2)
x2 = Flatten()(x2)
#x2 = Dropout(0.50)(x2)
pool_size = (x1.get_shape()[1].value, x1.get_shape()[2].value)
x1 = AveragePooling2D(pool_size=pool_size, strides=(1, 1))(x1)
x1 = Flatten()(x1)
#x1 = Dropout(0.50)(x1)
x = concatenate([x1, x2, x3, x4, x5],axis=-1)
x = Dense(4096, activation='relu')(x)
x = Dense(4096, activation='relu')(x)
output = Dense(17, activation='sigmoid')(x)
model = Model(input_, output)
return model
| {"/multilabelMetrics/examplebasedranking.py": ["/multilabelMetrics/functions.py"], "/testUCM.py": ["/multilabelMetrics/examplebasedclassification.py", "/multilabelMetrics/examplebasedranking.py"], "/trainUCM.py": ["/MLFNet/MLFNet_GC.py"]} |
65,539 | WangXin81/GC-MLFNet-Submitted-to-IEEE-JSTARS | refs/heads/main | /MLFNet/MLFNet_SE.py | import os
from keras import layers, optimizers, models
from keras.regularizers import l2
from models.resnet50 import ResNet50
# from keras.applications.resnet50 import ResNet50
from keras.layers import *
from keras.models import Model
import keras.backend as K
from keras.models import Model
from keras.layers import Input, BatchNormalization, Conv2D, MaxPooling2D, Dropout, concatenate, merge, UpSampling2D
from keras.optimizers import Adam
def SE(x):
bs, h, w, c = x.get_shape().as_list()
sequeeze = GlobalAveragePooling2D()(x)
excitation = Dense(c//4)(sequeeze)
excitation = Activation('relu')(excitation)
excitation = Dense(c)(excitation)
excitation = Activation('sigmoid')(excitation)
excitation = Reshape((1, 1, c))(excitation)
scale = Multiply()([x, excitation])
return scale
def SEMLFNet(pretrained_weights=None, input_size=(256, 256, 3), classNum=6):
H, W, C = input_size
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
base_model = ResNet50(H, W, C)
if (pretrained_weights): base_model.load_weights(pretrained_weights)
# base_model.load_weights('')
# print(base_model.output)
C1, C2, C3, C4, C5 = base_model.output
P2 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C2)
P2 = BatchNormalization(axis=bn_axis)(P2)
P2 = Activation('relu')(P2)
P3 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C3)
P3 = BatchNormalization(axis=bn_axis)(P3)
P3 = Activation('relu')(P3)
P4 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C4)
P4 = BatchNormalization(axis=bn_axis)(P4)
P4 = Activation('relu')(P4)
P5 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C5)
P5 = BatchNormalization(axis=bn_axis)(P5)
P5 = Activation('relu')(P5)
P2 = MaxPooling2D(pool_size=(2,2))(P2)
P3 = layers.add([P2, P3])
P3 = Conv2D(256, (3, 3), padding='SAME', kernel_initializer='he_normal')(P3)
P3 = BatchNormalization(axis=bn_axis)(P3)
P3 = Activation('relu')(P3)
P3 = SE(P3)
P3 = MaxPooling2D(pool_size=(2, 2))(P3)
P4 = layers.add([P3, P4])
P4 = Conv2D(256, (3, 3), padding='SAME', kernel_initializer='he_normal')(P4)
P4 = BatchNormalization(axis=bn_axis)(P4)
P4 = Activation('relu')(P4)
P4 = SE(P4)
P4 = MaxPooling2D(pool_size=(2, 2))(P4)
P5 = layers.add([P4, P5])
P5 = Conv2D(256, (3, 3), padding='SAME', kernel_initializer='he_normal')(P5)
P5 = BatchNormalization(axis=bn_axis)(P5)
P5 = Activation('relu')(P5)
P5 = SE(P5)
P2 = GlobalAveragePooling2D()(P2)
P3 = GlobalAveragePooling2D()(P3)
P4 = GlobalAveragePooling2D()(P4)
P5 = GlobalAveragePooling2D()(P5)
out = concatenate([P2,P3,P4,P5], axis=-1)
out = Dense(1024, activation='relu')(out)
out = Dense(classNum, activation='sigmoid')(out)
model = Model(input=base_model.input, output=out)
# if (pretrained_weights): model.load_weights(pretrained_weights)
return model | {"/multilabelMetrics/examplebasedranking.py": ["/multilabelMetrics/functions.py"], "/testUCM.py": ["/multilabelMetrics/examplebasedclassification.py", "/multilabelMetrics/examplebasedranking.py"], "/trainUCM.py": ["/MLFNet/MLFNet_GC.py"]} |
65,540 | WangXin81/GC-MLFNet-Submitted-to-IEEE-JSTARS | refs/heads/main | /MLFNet/MLFNet.py | import os
from keras import layers, optimizers, models
from keras.regularizers import l2
from models.resnet50 import ResNet50
# from keras.applications.resnet50 import ResNet50
from keras.layers import *
from keras.models import Model
import keras.backend as K
from keras.models import Model
from keras.layers import Input, BatchNormalization, Conv2D, MaxPooling2D, Dropout, concatenate, merge, UpSampling2D
from keras.optimizers import Adam
def FPN(pretrained_weights=None, input_size=(256, 256, 3), classNum=6):
H, W, C = input_size
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
base_model = ResNet50(H, W, C)
if (pretrained_weights): base_model.load_weights(pretrained_weights)
# base_model.load_weights('')
# print(base_model.output)
C1, C2, C3, C4, C5 = base_model.output
P2 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C2)
P2 = BatchNormalization(axis=bn_axis)(P2)
P2 = Activation('relu')(P2)
P3 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C3)
P3 = BatchNormalization(axis=bn_axis)(P3)
P3 = Activation('relu')(P3)
P4 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C4)
P4 = BatchNormalization(axis=bn_axis)(P4)
P4 = Activation('relu')(P4)
P5 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C5)
P5 = BatchNormalization(axis=bn_axis)(P5)
P5 = Activation('relu')(P5)
P2 = GlobalAveragePooling2D()(P2)
P3 = GlobalAveragePooling2D()(P3)
P4 = GlobalAveragePooling2D()(P4)
P5 = GlobalAveragePooling2D()(P5)
out = concatenate([P2, P3, P4, P5],axis=-1)
out = Dense(classNum, activation='sigmoid')(out)
model = Model(input=base_model.input, output=out)
# if (pretrained_weights): model.load_weights(pretrained_weights)
return model | {"/multilabelMetrics/examplebasedranking.py": ["/multilabelMetrics/functions.py"], "/testUCM.py": ["/multilabelMetrics/examplebasedclassification.py", "/multilabelMetrics/examplebasedranking.py"], "/trainUCM.py": ["/MLFNet/MLFNet_GC.py"]} |
65,541 | WangXin81/GC-MLFNet-Submitted-to-IEEE-JSTARS | refs/heads/main | /MLFNet/MLFNet_GC.py | import os
from keras import layers, optimizers, models
from keras.regularizers import l2
from models.resnet50 import ResNet50
# from keras.applications.resnet50 import ResNet50
from keras.layers import *
from keras.models import Model
import keras.backend as K
from keras.models import Model
from keras.layers import Input, BatchNormalization, Conv2D, MaxPooling2D, Dropout, concatenate, merge, UpSampling2D
from keras.optimizers import Adam
def GCM(x):
"""
simplified non local net
GCnet 发现在NLnet中图像每个点的全局上下文相近,只计算一个点的全局相似度,计算量减少1/hw
:parameter x:input layers or tensor
"""
bs, h, w, c = x.get_shape().as_list()
input_x = x
input_x = layers.Reshape((h*w, c))(input_x) # [bs, H*W, C]
# input_x = layers.Lambda(lambda x: tf.transpose(x, perm=[0, 2, 1]))(input_x) # [bs,C,H*W]
# input_x = layers.Lambda(lambda x: tf.expand_dims(x, axis=1))(input_x) # [bs,1,C,H*W]
context_mask = layers.Conv2D(filters=1, kernel_size=(1, 1))(x) # [bs,h,w,1]
context_mask = layers.Reshape((h*w, 1))(context_mask)
context_mask = layers.Softmax(axis=1)(context_mask) # [bs, H*W, 1]
# context_mask = layers.Lambda(lambda x: tf.transpose(x, [0, 2, 1]))(context_mask)
# context_mask = layers.Lambda(lambda x: tf.expand_dims(x, axis=-1))(context_mask)
context = layers.dot([input_x, context_mask],axes=1) # [bs,1,c]
context = layers.Reshape((1, 1, c))(context)
# context_transform = layers.Conv2D(c, (1, 1))(context)
# context_transform = LayerNormalization()(context_transform)
# context_transform = layers.ReLU()(context_transform)
# context_transform = layers.Conv2D(c, (1, 1))(context_transform)
# context_transform=layers.Conv2D(c,kernel_size=(1,1))(context)
x = layers.Add()([x,context])
return x
def GCMLFNet(pretrained_weights=None, input_size=(256, 256, 3), classNum=6):
H, W, C = input_size
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
base_model = ResNet50(H, W, C)
if (pretrained_weights): base_model.load_weights(pretrained_weights)
# base_model.load_weights('')
# print(base_model.output)
C1, C2, C3, C4, C5 = base_model.output
P2 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C2)
P2 = BatchNormalization(axis=bn_axis)(P2)
P2 = Activation('relu')(P2)
P3 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C3)
P3 = BatchNormalization(axis=bn_axis)(P3)
P3 = Activation('relu')(P3)
P4 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C4)
P4 = BatchNormalization(axis=bn_axis)(P4)
P4 = Activation('relu')(P4)
P5 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C5)
P5 = BatchNormalization(axis=bn_axis)(P5)
P5 = Activation('relu')(P5)
P2 = MaxPooling2D(pool_size=(2,2))(P2)
P3 = layers.add([P2, P3])
P3 = Conv2D(256, (3, 3), padding='SAME', kernel_initializer='he_normal')(P3)
P3 = BatchNormalization(axis=bn_axis)(P3)
P3 = Activation('relu')(P3)
P3 = GCM(P3)
P3 = MaxPooling2D(pool_size=(2, 2))(P3)
P4 = layers.add([P3, P4])
P4 = Conv2D(256, (3, 3), padding='SAME', kernel_initializer='he_normal')(P4)
P4 = BatchNormalization(axis=bn_axis)(P4)
P4 = Activation('relu')(P4)
P4 = GCM(P4)
P4 = MaxPooling2D(pool_size=(2, 2))(P4)
P5 = layers.add([P4, P5])
P5 = Conv2D(256, (3, 3), padding='SAME', kernel_initializer='he_normal')(P5)
P5 = BatchNormalization(axis=bn_axis)(P5)
P5 = Activation('relu')(P5)
P5 = GCM(P5)
P2 = GlobalAveragePooling2D()(P2)
P3 = GlobalAveragePooling2D()(P3)
P4 = GlobalAveragePooling2D()(P4)
P5 = GlobalAveragePooling2D()(P5)
out = concatenate([P2,P3,P4,P5], axis=-1)
out = Dense(1024, activation='relu')(out)
out = Dense(classNum, activation='sigmoid')(out)
model = Model(input=base_model.input, output=out)
# if (pretrained_weights): model.load_weights(pretrained_weights)
return model | {"/multilabelMetrics/examplebasedranking.py": ["/multilabelMetrics/functions.py"], "/testUCM.py": ["/multilabelMetrics/examplebasedclassification.py", "/multilabelMetrics/examplebasedranking.py"], "/trainUCM.py": ["/MLFNet/MLFNet_GC.py"]} |
65,542 | WangXin81/GC-MLFNet-Submitted-to-IEEE-JSTARS | refs/heads/main | /MLFNet/MLFNet_NL.py | import os
from keras import layers, optimizers, models
from keras.regularizers import l2
from models.resnet50 import ResNet50
# from keras.applications.resnet50 import ResNet50
from keras.layers import *
from keras.models import Model
import keras.backend as K
from keras.models import Model
from keras.layers import Input, BatchNormalization, Conv2D, MaxPooling2D, Dropout, concatenate, merge, UpSampling2D
from keras.optimizers import Adam
def NL(ip, intermediate_dim=None, compression=2,
mode='embedded', add_residual=True):
"""
Adds a Non-Local block for self attention to the input tensor.
Input tensor can be or rank 3 (temporal), 4 (spatial) or 5 (spatio-temporal).
Arguments:
ip: input tensor
intermediate_dim: The dimension of the intermediate representation. Can be
`None` or a positive integer greater than 0. If `None`, computes the
intermediate dimension as half of the input channel dimension.
compression: None or positive integer. Compresses the intermediate
representation during the dot products to reduce memory consumption.
Default is set to 2, which states halve the time/space/spatio-time
dimension for the intermediate step. Set to 1 to prevent computation
compression. None or 1 causes no reduction.
mode: Mode of operation. Can be one of `embedded`, `gaussian`, `dot` or
`concatenate`.
add_residual: Boolean value to decide if the residual connection should be
added or not. Default is True for ResNets, and False for Self Attention.
Returns:
a tensor of same shape as input
"""
channel_dim = 1 if K.image_data_format() == 'channels_first' else -1
ip_shape = K.int_shape(ip)
if mode not in ['gaussian', 'embedded', 'dot', 'concatenate']:
raise ValueError('`mode` must be one of `gaussian`, `embedded`, `dot` or `concatenate`')
if compression is None:
compression = 1
dim1, dim2, dim3 = None, None, None
# check rank and calculate the input shape
if len(ip_shape) == 3: # temporal / time series data
rank = 3
batchsize, dim1, channels = ip_shape
elif len(ip_shape) == 4: # spatial / image data
rank = 4
if channel_dim == 1:
batchsize, channels, dim1, dim2 = ip_shape
else:
batchsize, dim1, dim2, channels = ip_shape
elif len(ip_shape) == 5: # spatio-temporal / Video or Voxel data
rank = 5
if channel_dim == 1:
batchsize, channels, dim1, dim2, dim3 = ip_shape
else:
batchsize, dim1, dim2, dim3, channels = ip_shape
else:
raise ValueError('Input dimension has to be either 3 (temporal), 4 (spatial) or 5 (spatio-temporal)')
# verify correct intermediate dimension specified
if intermediate_dim is None:
intermediate_dim = channels // 2
if intermediate_dim < 1:
intermediate_dim = 1
else:
intermediate_dim = int(intermediate_dim)
if intermediate_dim < 1:
raise ValueError('`intermediate_dim` must be either `None` or positive integer greater than 1.')
if mode == 'gaussian': # Gaussian instantiation
x1 = Reshape((-1, channels))(ip) # xi
x2 = Reshape((-1, channels))(ip) # xj
f = dot([x1, x2], axes=2)
f = Activation('softmax')(f)
elif mode == 'dot': # Dot instantiation
# theta path
theta = _convND(ip, rank, intermediate_dim)
theta = Reshape((-1, intermediate_dim))(theta)
# phi path
phi = _convND(ip, rank, intermediate_dim)
phi = Reshape((-1, intermediate_dim))(phi)
f = dot([theta, phi], axes=2)
size = K.int_shape(f)
# scale the values to make it size invariant
f = Lambda(lambda z: (1. / float(size[-1])) * z)(f)
elif mode == 'concatenate': # Concatenation instantiation
raise NotImplementedError('Concatenate model has not been implemented yet')
else: # Embedded Gaussian instantiation
# theta path
theta = _convND(ip, rank, intermediate_dim)
theta = Reshape((-1, intermediate_dim))(theta)
# phi path
phi = _convND(ip, rank, intermediate_dim)
phi = Reshape((-1, intermediate_dim))(phi)
if compression > 1:
# shielded computation
phi = MaxPool1D(compression)(phi)
f = dot([theta, phi], axes=2)
f = Activation('softmax')(f)
# g path
g = _convND(ip, rank, intermediate_dim)
g = Reshape((-1, intermediate_dim))(g)
if compression > 1 and mode == 'embedded':
# shielded computation
g = MaxPool1D(compression)(g)
# compute output path
y = dot([f, g], axes=[2, 1])
# reshape to input tensor format
if rank == 3:
y = Reshape((dim1, intermediate_dim))(y)
elif rank == 4:
if channel_dim == -1:
y = Reshape((dim1, dim2, intermediate_dim))(y)
else:
y = Reshape((intermediate_dim, dim1, dim2))(y)
else:
if channel_dim == -1:
y = Reshape((dim1, dim2, dim3, intermediate_dim))(y)
else:
y = Reshape((intermediate_dim, dim1, dim2, dim3))(y)
# project filters
y = _convND(y, rank, channels)
# residual connection
if add_residual:
y = add([ip, y])
return y
def _convND(ip, rank, channels):
assert rank in [3, 4, 5], "Rank of input must be 3, 4 or 5"
if rank == 3:
x = Conv1D(channels, 1, padding='same', use_bias=False, kernel_initializer='he_normal')(ip)
elif rank == 4:
x = Conv2D(channels, (1, 1), padding='same', use_bias=False, kernel_initializer='he_normal')(ip)
else:
x = Conv3D(channels, (1, 1, 1), padding='same', use_bias=False, kernel_initializer='he_normal')(ip)
return x
def NLMLFNet(pretrained_weights=None, input_size=(256, 256, 3), classNum=6):
H, W, C = input_size
if K.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
base_model = ResNet50(H, W, C)
if (pretrained_weights): base_model.load_weights(pretrained_weights)
# base_model.load_weights('')
# print(base_model.output)
C1, C2, C3, C4, C5 = base_model.output
P2 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C2)
P2 = BatchNormalization(axis=bn_axis)(P2)
P2 = Activation('relu')(P2)
P3 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C3)
P3 = BatchNormalization(axis=bn_axis)(P3)
P3 = Activation('relu')(P3)
P4 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C4)
P4 = BatchNormalization(axis=bn_axis)(P4)
P4 = Activation('relu')(P4)
P5 = Conv2D(256, (1, 1), padding='SAME', kernel_initializer='he_normal')(C5)
P5 = BatchNormalization(axis=bn_axis)(P5)
P5 = Activation('relu')(P5)
P2 = MaxPooling2D(pool_size=(2,2))(P2)
P3 = layers.add([P2, P3])
P3 = Conv2D(256, (3, 3), padding='SAME', kernel_initializer='he_normal')(P3)
P3 = BatchNormalization(axis=bn_axis)(P3)
P3 = Activation('relu')(P3)
P3 = NL(P3)
P3 = MaxPooling2D(pool_size=(2, 2))(P3)
P4 = layers.add([P3, P4])
P4 = Conv2D(256, (3, 3), padding='SAME', kernel_initializer='he_normal')(P4)
P4 = BatchNormalization(axis=bn_axis)(P4)
P4 = Activation('relu')(P4)
P4 = NL(P4)
P4 = MaxPooling2D(pool_size=(2, 2))(P4)
P5 = layers.add([P4, P5])
P5 = Conv2D(256, (3, 3), padding='SAME', kernel_initializer='he_normal')(P5)
P5 = BatchNormalization(axis=bn_axis)(P5)
P5 = Activation('relu')(P5)
P5 = NL(P5)
P2 = GlobalAveragePooling2D()(P2)
P3 = GlobalAveragePooling2D()(P3)
P4 = GlobalAveragePooling2D()(P4)
P5 = GlobalAveragePooling2D()(P5)
out = concatenate([P2,P3,P4,P5], axis=-1)
out = Dense(1024, activation='relu')(out)
out = Dense(classNum, activation='sigmoid')(out)
model = Model(input=base_model.input, output=out)
# if (pretrained_weights): model.load_weights(pretrained_weights)
return model | {"/multilabelMetrics/examplebasedranking.py": ["/multilabelMetrics/functions.py"], "/testUCM.py": ["/multilabelMetrics/examplebasedclassification.py", "/multilabelMetrics/examplebasedranking.py"], "/trainUCM.py": ["/MLFNet/MLFNet_GC.py"]} |
65,543 | WangXin81/GC-MLFNet-Submitted-to-IEEE-JSTARS | refs/heads/main | /multilabelMetrics/labelbasedranking.py | def accuracyMacro(y_test, predictions):
"""
AUC Macro of our model
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
predictions: sparse or dense matrix (n_samples, n_labels)
Matrix of predicted labels given by our model
Returns
=======
aucMacro : float
AUC Macro
"""
aucMacro = 0.0
return aucMacro
def accuracyMicro(y_test, predictions):
"""
AUC Micro of our model
Params
======
y_test : sparse or dense matrix (n_samples, n_labels)
Matrix of labels used in the test phase
predictions: sparse or dense matrix (n_samples, n_labels)
Matrix of predicted labels given by our model
Returns
=======
aucMicro : float
AUC Micro
"""
aucMicro = 0.0
return aucMicro | {"/multilabelMetrics/examplebasedranking.py": ["/multilabelMetrics/functions.py"], "/testUCM.py": ["/multilabelMetrics/examplebasedclassification.py", "/multilabelMetrics/examplebasedranking.py"], "/trainUCM.py": ["/MLFNet/MLFNet_GC.py"]} |
65,544 | WangXin81/GC-MLFNet-Submitted-to-IEEE-JSTARS | refs/heads/main | /multilabelMetrics/functions.py | #Auxiliary functions
import numpy as np
def relevantIndexes(matrix, row):
"""
Gets the relevant indexes of a vector
"""
relevant = []
for j in range(matrix.shape[1]):
if matrix[row,j] == 1:
relevant.append(int(j))
return relevant
def irrelevantIndexes(matrix, row):
"""
Gets the irrelevant indexes of a vector
"""
irrelevant = []
for j in range(matrix.shape[1]):
if matrix[row,j] == 0:
irrelevant.append(int(j))
return irrelevant
def multilabelConfussionMatrix(y_test, predictions):
"""
Returns the TP, FP, TN, FN
"""
TP = np.zeros(y_test.shape[1])
FP = np.zeros(y_test.shape[1])
TN = np.zeros(y_test.shape[1])
FN = np.zeros(y_test.shape[1])
for j in range(y_test.shape[1]):
TPaux = 0
FPaux = 0
TNaux = 0
FNaux = 0
for i in range(y_test.shape[0]):
if int(y_test[i,j]) == 1:
if int(y_test[i,j]) == 1 and int(predictions[i,j]) == 1:
TPaux += 1
else:
FPaux += 1
else:
if int(y_test[i,j]) == 0 and int(predictions[i,j]) == 0:
TNaux += 1
else:
FNaux += 1
TP[j] = TPaux
FP[j] = FPaux
TN[j] = TNaux
FN[j] = FNaux
return TP, FP, TN, FN
def multilabelMicroConfussionMatrix(TP, FP, TN, FN):
TPMicro = 0.0
FPMicro = 0.0
TNMicro = 0.0
FNMicro = 0.0
for i in range(len(TP)):
TPMicro = TPMicro + TP[i]
FPMicro = FPMicro + FP[i]
TNMicro = TNMicro + TN[i]
FNMicro = FNMicro + FN[i]
return TPMicro, FPMicro, TNMicro, FNMicro
def rankingMatrix(probabilities):
"""
Matrix with the rankings for each label
"""
ranking = np.zeros(shape=[probabilities.shape[0], probabilities.shape[1]])
probCopy = np.copy(probabilities)
for i in range(probabilities.shape[0]):
indexMost = 0
iteration = 1
while(sum(probCopy[i,:]) != 0):
for j in range(probabilities.shape[1]):
if probCopy[i,j] > probCopy[i,indexMost]:
indexMost = j
ranking[i, indexMost] = iteration
probCopy[i, indexMost] = 0
iteration += 1
return ranking | {"/multilabelMetrics/examplebasedranking.py": ["/multilabelMetrics/functions.py"], "/testUCM.py": ["/multilabelMetrics/examplebasedclassification.py", "/multilabelMetrics/examplebasedranking.py"], "/trainUCM.py": ["/MLFNet/MLFNet_GC.py"]} |
65,545 | WangXin81/GC-MLFNet-Submitted-to-IEEE-JSTARS | refs/heads/main | /trainUCM.py | #!/usr/bin/env python
# coding: utf-8
import keras
import numpy as np
import scipy.io as scio
import imageio
from keras import Model
from keras.layers import Dense
from keras.preprocessing.image import ImageDataGenerator
from keras.optimizers import SGD, Adam, Nadam, RMSprop, Adagrad
from sklearn.metrics import hamming_loss
from keras.callbacks import ReduceLROnPlateau, EarlyStopping, ModelCheckpoint
from sklearn.metrics import classification_report, confusion_matrix
from sklearn.metrics import accuracy_score
from models.mobilenet_v2 import MobileNetV2
from MLFNet.MLFNet_GC import GCMLFNet
premodel_path = 'pretrained/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5'
input_size = (256, 256, 3)
classnum = 17
OBSERVATIONS_FILE = 'UcmImages.npy' # The file containing the data samples.
LABELS_FILE = 'UcmLabels.npy' # The file containing the labels.
TESTING_DATA_NUM = 420
images = np.load(OBSERVATIONS_FILE)
labels = np.load(LABELS_FILE)
random_indices = np.arange(images.shape[0])
np.random.seed(42)
np.random.shuffle(random_indices)
labels = labels[random_indices]
images = images[random_indices]
test_set = images[:TESTING_DATA_NUM]
test_labels = labels[:TESTING_DATA_NUM]
train_set = images[TESTING_DATA_NUM:]
train_labels = labels[TESTING_DATA_NUM:]
# Parameters For Data Augmentation later
ROTATION_RANGE = 45
SHIFT_FRACTION = 0.2
SHEAR_RANGE = 0.0
ZOOM_RANGE = 0.0
HORIZONTAL_FLIP = True
VERTICAL_FILP = True
data_generator = ImageDataGenerator(
rotation_range=ROTATION_RANGE, width_shift_range=SHIFT_FRACTION, height_shift_range=SHIFT_FRACTION,
shear_range=SHEAR_RANGE, zoom_range=ZOOM_RANGE, horizontal_flip=HORIZONTAL_FLIP,
vertical_flip=VERTICAL_FILP)
data_generator.fit(train_set)
import keras.backend as K
import tensorflow as tf
####################################
def cal_base(y_true, y_pred):
y_pred_positive = K.round(K.clip(y_pred, 0, 1))
y_pred_negative = 1 - y_pred_positive
y_positive = K.round(K.clip(y_true, 0, 1))
y_negative = 1 - y_positive
TP = K.sum(y_positive * y_pred_positive)
TN = K.sum(y_negative * y_pred_negative)
FP = K.sum(y_negative * y_pred_positive)
FN = K.sum(y_positive * y_pred_negative)
return TP, TN, FP, FN
def acc(y_true, y_pred):
TP, TN, FP, FN = cal_base(y_true, y_pred)
ACC = (TP + TN) / (TP + FP + FN + TN + K.epsilon())
return ACC
def sensitivity(y_true, y_pred):
""" recall """
TP, TN, FP, FN = cal_base(y_true, y_pred)
SE = TP/(TP + FN + K.epsilon())
return SE
def precision(y_true, y_pred):
TP, TN, FP, FN = cal_base(y_true, y_pred)
PC = TP/(TP + FP + K.epsilon())
return PC
def specificity(y_true, y_pred):
TP, TN, FP, FN = cal_base(y_true, y_pred)
SP = TN / (TN + FP + K.epsilon())
return SP
def f1_socre(y_true, y_pred):
SE = sensitivity(y_true, y_pred)
PC = precision(y_true, y_pred)
F1 = 2 * SE * PC / (SE + PC + K.epsilon())
return F1
# precision
def P(y_true, y_pred):
true_positives = K.sum(K.cast(K.greater(K.clip(y_true * y_pred, 0, 1), 0.20), 'float32'))
pred_positives = K.sum(K.cast(K.greater(K.clip(y_pred, 0, 1), 0.20), 'float32'))
precision = true_positives / (pred_positives + K.epsilon())
return precision
# recall
def R(y_true, y_pred):
true_positives = K.sum(K.cast(K.greater(K.clip(y_true * y_pred, 0, 1), 0.20), 'float32'))
poss_positives = K.sum(K.cast(K.greater(K.clip(y_true, 0, 1), 0.20), 'float32'))
recall = true_positives / (poss_positives + K.epsilon())
return recall
# f1-score
def F(y_true, y_pred):
p_val = P(y_true, y_pred)
r_val = R(y_true, y_pred)
f_val = 2 * p_val * r_val / (p_val + r_val)
return f_val
# Useful Callbacks:
def CALLBACKS():
# lr_reducer = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=7, min_lr=10e-8, epsilon=0.01, verbose=1)
# early_stopper = EarlyStopping(monitor='val_loss', min_delta=0, patience=10, verbose=1)
model_checkpoint = ModelCheckpoint('generatedmodel/UCM.hdf5', monitor='val_F', mode='max', verbose=1, save_best_only=True)
callbacks = [
# lr_reducer, early_stopper,
model_checkpoint]
return callbacks
model = GCMLFNet(pretrained_weights = premodel_path,
input_size = input_size,
classNum = classnum)
# base_model = MobileNetV2(input_shape=input_size,
# include_top=False,
# weights='imagenet',
# input_tensor=None,
# pooling='avg',
# classes=17,
# backend=keras.backend,
# layers=keras.layers,
# models=keras.models,
# utils=keras.utils)
#
# x=base_model.output
# x=Dense(17,activation='sigmoid')(x)
# model=Model(inputs=base_model.input,outputs=x)
op = Adam(lr=3e-4)
model.compile(loss='binary_crossentropy', optimizer=op, metrics=['accuracy',P,R,F,precision,f1_socre,sensitivity,specificity])
model.summary()
from tensorflow.python.profiler.model_analyzer import profile
from tensorflow.python.profiler.option_builder import ProfileOptionBuilder
print('TensorFlow:', tf.__version__)
# model = tf.keras.applications.ResNet50()
forward_pass = tf.function(
model.call,
input_signature=[tf.TensorSpec(shape=(1,) + model.input_shape[1:])])
graph_info = profile(forward_pass.get_concrete_function().graph,
options=ProfileOptionBuilder.float_operation())
# The //2 is necessary since `profile` counts multiply and accumulate
# as two flops, here we report the total number of multiply accumulate ops
flops = graph_info.total_float_ops // 2
print('Flops: {:,}'.format(flops))
data_augmentation = 0
if data_augmentation == 1:
model.fit_generator(data_generator.flow(train_set, train_labels, batch_size=8),
epochs=35,
steps_per_epoch=1680 // 8,
verbose=2,
validation_data=(test_set, test_labels),
callbacks=CALLBACKS())
else:
model.fit(train_set, train_labels, batch_size=8, epochs=35,
validation_data=(test_set, test_labels), callbacks=CALLBACKS())
| {"/multilabelMetrics/examplebasedranking.py": ["/multilabelMetrics/functions.py"], "/testUCM.py": ["/multilabelMetrics/examplebasedclassification.py", "/multilabelMetrics/examplebasedranking.py"], "/trainUCM.py": ["/MLFNet/MLFNet_GC.py"]} |
65,557 | wubozhi/itc-testing-tools | refs/heads/master | /python/splint.py | import sys
import os.path
import system
import dirutils
import tempfile
import shutil
from pathlib import Path
temp_path = os.path.abspath(sys.argv[1])
directory = os.path.abspath(sys.argv[2])
csv = os.path.abspath(sys.argv[3])
exe = sys.argv[4]
if (len(sys.argv) > 5):
opts = sys.argv[5]
else:
opts = ""
# create temporary dir to run the analyzer
tmpdir_path = os.path.join(str(Path.home()),"tmp", "splint-" + next(tempfile._get_candidate_names()))
shutil.copytree(directory, tmpdir_path)
print("======[SPLINT]=======")
print("[CWD]:", tmpdir_path)
print("[CSV]:", csv)
print("[EXE]:", exe)
print("[EXE OPTIONS]:", opts)
source_files = dirutils.list_files(tmpdir_path, '.c') + dirutils.list_files(tmpdir_path, '.cpp')
dirutils.file_line_error_header(csv)
dirutils.reset_file(temp_path)
for source_file in source_files:
if source_file.endswith("main.c"):
continue
if source_file.endswith("invalid_extern_1.c"):
continue
if source_file.endswith("invalid_extern.c"):
source_file = source_file + " " + os.path.join(tmpdir_path, "invalid_extern_1.c")
splint = exe + " -nestcomment +posixlib " + source_file + " " + opts
(output, err, exit, time) = system.system_call(splint, tmpdir_path)
dirutils.tool_exec_log(temp_path, splint, output, err, exit)
lines = output.splitlines()
sys.stdout = open(csv, "a")
for line in lines:
a = line.decode("utf-8").strip().split(":")
if (len(a) >= 4):
message = a[3]
i = 4
while (i < len(a)):
message = message + ":" + a[i]
i = i + 1
print(os.path.basename(a[0]), ",", a[1], ",", message)
sys.stdout = sys.__stdout__
print("[CLEANUP]: removing ", tmpdir_path)
shutil.rmtree(tmpdir_path)
print("======[DONE WITH SPLINT]=======")
| {"/python/clanalyze.py": ["/python/system.py"], "/benchmark.py": ["/python/system.py", "/python/clanalyze.py", "/python/latex.py"]} |
65,558 | wubozhi/itc-testing-tools | refs/heads/master | /python/dirutils.py | import os
import sys
from os import listdir
from os.path import isfile, join
def list_files(directory, extension, absolute_path=False):
files = []
for f in listdir(directory):
fpath = join(directory, f)
if isfile(fpath) and fpath.endswith(extension):
files.append(fpath)
return files
def append_in(file_path, text):
sys.stdout = open(file_path, "a")
print(text)
sys.stdout = sys.__stdout__
def reset_file(file_path):
if os.path.exists(file_path):
os.remove(file_path)
sys.stdout = open(file_path, "w")
sys.stdout = sys.__stdout__
def file_line_error_header(file_path):
if os.path.exists(file_path):
os.remove(file_path)
sys.stdout = open(file_path,"w")
print("File, Line, Error")
sys.stdout = sys.__stdout__
def tool_exec_log(file_path, cmd, out, err, exit):
sys.stdout = open(file_path, "a")
print("[CMD]: " + cmd)
print("[OUTPUT]:\n" + out.decode("utf-8"))
print("[ERR]:\n" + err.decode("utf-8"))
print("[EXIT]: " + str(exit) + "\n")
sys.stdout = sys.__stdout__
| {"/python/clanalyze.py": ["/python/system.py"], "/benchmark.py": ["/python/system.py", "/python/clanalyze.py", "/python/latex.py"]} |
65,559 | wubozhi/itc-testing-tools | refs/heads/master | /python/clanalyze.py | import sys
import os.path
from itertools import takewhile
import re
import python.system
# directory = os.path.abspath(sys.argv[1])
# csv = os.path.abspath(sys.argv[2])
# exe = sys.argv[3]
# opts = sys.argv[4]
def clanalyze(directory, temp_path, csv, exe, opts):
print("======Running cl /analyze=======")
print("Working dir:", directory)
print("CSV file:", csv)
print("Excutable:", exe)
print("Executable options:", opts)
try:
command = exe + " \"" + directory + "/*.c*\" /I \"" + directory + "\""
(output, err, exit, time) = python.system.system_call(command)
except:
print("TROUBLE CALLING ANALYZER(0): warning XXX: ", sys.exc_info())
with open(temp_path, "wb") as text_file:
text_file.write(output)
text_file.write(err)
regexp = re.compile("(\S+)\((\d+)\)\s?:\s+\S+\s+\S+:\s+(.+)")
sys.stdout = open(csv, "w")
with open(temp_path) as f:
for line in f.readlines():
m = regexp.match(line)
if not (m is None):
name = m.groups()[0]
idx = max(name.rfind("\\"), name.rfind("/"))
print(name[idx+1:], ", ", m.groups()[1], ",", m.groups()[2])
sys.stdout = sys.__stdout__
return time
| {"/python/clanalyze.py": ["/python/system.py"], "/benchmark.py": ["/python/system.py", "/python/clanalyze.py", "/python/latex.py"]} |
65,560 | wubozhi/itc-testing-tools | refs/heads/master | /python/uno-parser.py | import sys
import os.path
from itertools import takewhile
report = sys.argv[1]
with open(report) as f:
for line in f.readlines():
a = line.strip().split(":")
if (len(a) >= 4) and (a[0] == 'uno'):
if len(a[2]) > 10: # hack to work around bug in printint wrong array indexing
print(os.path.basename(a[1]), ",", ''.join(takewhile(str.isdigit, a[2].strip())), ",", a[2])
else:
print(os.path.basename(a[1]), ",", a[2], ",", a[3])
| {"/python/clanalyze.py": ["/python/system.py"], "/benchmark.py": ["/python/system.py", "/python/clanalyze.py", "/python/latex.py"]} |
65,561 | wubozhi/itc-testing-tools | refs/heads/master | /python/latex.py | import os
import sys
from math import sqrt
def lines(file_path):
with open(file_path) as f:
return f.read().splitlines()
def nice(toolname):
if toolname == 'clangcore':
return "Clang (core)"
if toolname == 'clangalpha':
return "Clang (alpha)"
if toolname == 'clangcorealpha':
return "Clang"
if toolname == 'framac':
return "Frama-C"
if toolname == "clanalyze":
return "System"
if toolname == "flintpp":
return "Flint++"
return toolname.capitalize()
def total(tex_file_name, rep_directory, latex_dir, tool_list):
tex_file_path = os.path.join(latex_dir, tex_file_name)
l = []
for tool in tool_list:
c_total_path = os.path.join(rep_directory, tool, 'c_total.csv')
items = lines(c_total_path)[1].split(",");
tp = int(items[0].strip())
fp = int(items[1].strip())
var = int(items[2].strip())
rdc = int(items[6].strip())
uni = int(items[8].strip())
cpp_total_path = os.path.join(rep_directory, tool, 'cpp_total.csv')
items = lines(cpp_total_path)[1].split(",");
tp = tp + int(items[0].strip())
fp = fp + int(items[1].strip())
var = var + int(items[2].strip())
rdc = rdc + int(items[6].strip())
uni = uni + int(items[8].strip())
dr = round((tp * 100.0) / var, 2)
fpr = round((fp * 100.0) / var, 2)
pr = round(sqrt(dr * (100 - fpr)), 2)
rdr = round((rdc * 100.0) / var, 2)
timing_path = os.path.join(rep_directory, tool, 'timing.csv')
timing = lines(timing_path)[0].split(",")
runtime = round(float(timing[1].strip()) + float(timing[2].strip()), 2)
# put everything in a tuple
l.append((tool, dr, fpr, pr, rdr, uni, runtime))
srt = sorted(l, key = lambda x : x[3])
srt.reverse()
sys.stdout = open(tex_file_path, "w")
print("\\begin{tabular}{|l|r|r|r|r|r|r|}")
print("\\hline")
print("\multicolumn{1}{|c|}{Tool} & \multicolumn{1}{|c|}{DR} & \multicolumn{1}{|c|}{FPR} & \multicolumn{1}{|c|}{PR} & \multicolumn{1}{|c|}{RDR} & \multicolumn{1}{|c|}{U} & \multicolumn{1}{|c|}{Time} \\\\ ")
print("\\hline")
for t in srt:
t_as_list = list(map(lambda x : "{:4.2f}".format(x) if isinstance(x, float) else str(x), list(t)))
t_as_list[0] = nice(t_as_list[0])
print(' & '.join(t_as_list),"\\\\")
print("\\hline")
print("\\end{tabular}")
sys.stdout = sys.__stdout__
# Detection rate by defects
def defects_dr(tex_file_name, rep_directory, latex_dir, tool_list):
tex_file_path = os.path.join(latex_dir, tex_file_name)
t_map = {}
defects = set()
for tool in tool_list:
c_total_path = os.path.join(rep_directory, tool, 'c_defects.csv')
head, *tail = lines(c_total_path)
cpp_total_path = os.path.join(rep_directory, tool, 'cpp_defects.csv')
h, *t = lines(cpp_total_path)
def_map = {}
for line in tail + t:
items = line.split(",")
name = items[0]
defects.add(name)
if (not name in def_map.keys()):
def_map[name] = (0, 0)
tp = int(items[1].strip())
var = int(items[3].strip())
def_map[name] = (def_map[name][0] + tp, def_map[name][1] + var)
t_map[tool] = def_map
sys.stdout = open(tex_file_path, "w")
print("\\begin{tabular}{|l|r|r|r|r|r|r|r|r|r|r|}")
print("%\\hline")
print("% Detection rate per defects \\\\ ")
print("\\hline")
print("Tool & D1 & D2 & D3 & D4 & D5 & D6 & D7 & D8 & D9", "\\\\")
print("%% ", "Tool &", " & ".join(sorted(defects)), "\\\\")
print("\\hline")
for tool in sorted(t_map.keys()):
print(nice(tool), end="")
def_map = t_map[tool]
for defect in sorted(defects):
tp = def_map[defect][0]
var = def_map[defect][1]
dr = int(round((tp * 100) / var, 0))
print(" & ", dr, end="")
print("\\\\")
print("\\hline")
print("\\end{tabular}")
sys.stdout = sys.__stdout__
# false positives rate
def defects_fpr(tex_file_name, rep_directory, latex_dir, tool_list):
tex_file_path = os.path.join(latex_dir, tex_file_name)
t_map = {}
defects = set()
for tool in tool_list:
c_total_path = os.path.join(rep_directory, tool, 'c_defects.csv')
head, *tail = lines(c_total_path)
cpp_total_path = os.path.join(rep_directory, tool, 'cpp_defects.csv')
h, *t = lines(cpp_total_path)
def_map = {}
for line in tail + t:
items = line.split(",")
name = items[0]
defects.add(name)
if (not name in def_map.keys()):
def_map[name] = (0, 0)
fp = int(items[2].strip())
var = int(items[3].strip())
def_map[name] = (def_map[name][0] + fp, def_map[name][1] + var)
t_map[tool] = def_map
sys.stdout = open(tex_file_path, "w")
print("\\begin{tabular}{|l|r|r|r|r|r|r|r|r|r|r|}")
print("%\\hline")
print("% False positive rate per defects \\\\ ")
print("\\hline")
print("Tool & D1 & D2 & D3 & D4 & D5 & D6 & D7 & D8 & D9", "\\\\")
print("%% ", "Tool &", " & ".join(sorted(defects)), "\\\\")
print("\\hline")
for tool in sorted(t_map.keys()):
print(nice(tool), end="")
def_map = t_map[tool]
for defect in sorted(defects):
fp = def_map[defect][0]
var = def_map[defect][1]
fpr = int(round((fp * 100) / var, 0))
print(" & ", fpr, end="")
print("\\\\")
print("\\hline")
print("\\end{tabular}")
sys.stdout = sys.__stdout__
# production
def defects_pr(tex_file_name, rep_directory, latex_dir, tool_list):
tex_file_path = os.path.join(latex_dir, tex_file_name)
t_map = {}
defects = set()
for tool in tool_list:
c_total_path = os.path.join(rep_directory, tool, 'c_defects.csv')
head, *tail = lines(c_total_path)
cpp_total_path = os.path.join(rep_directory, tool, 'cpp_defects.csv')
h, *t = lines(cpp_total_path)
def_map = {}
for line in tail + t:
items = line.split(",")
name = items[0]
defects.add(name)
if (not name in def_map.keys()):
def_map[name] = (0, 0, 0)
tp = int(items[1].strip())
fp = int(items[2].strip())
var = int(items[3].strip())
def_map[name] = (def_map[name][0] + tp, def_map[name][1] + fp, def_map[name][2] + var)
t_map[tool] = def_map
sys.stdout = open(tex_file_path, "w")
print("\\begin{tabular}{|l|r|r|r|r|r|r|r|r|r|r|}")
print("%\\hline")
print("% Production per defects \\\\ ")
print("\\hline")
print("Tool & D1 & D2 & D3 & D4 & D5 & D6 & D7 & D8 & D9", "\\\\")
print("%% ", "Tool &", " & ".join(sorted(defects)), "\\\\")
print("\\hline")
for tool in sorted(t_map.keys()):
print(nice(tool), end="")
def_map = t_map[tool]
for defect in sorted(defects):
tp = def_map[defect][0]
fp = def_map[defect][1]
var = def_map[defect][2]
dr = round((tp * 100) / var, 2)
fpr = round((fp * 100) / var, 2)
pr = int(round(sqrt(dr * (100 - fpr)), 0))
print(" & ", pr, end="")
print("\\\\")
print("\\hline")
print("\\end{tabular}")
sys.stdout = sys.__stdout__
# Robust detection rate
def defects_rdr(tex_file_name, rep_directory, latex_dir, tool_list):
tex_file_path = os.path.join(latex_dir, tex_file_name)
t_map = {}
defects = set()
for tool in tool_list:
c_total_path = os.path.join(rep_directory, tool, 'c_defects.csv')
head, *tail = lines(c_total_path)
cpp_total_path = os.path.join(rep_directory, tool, 'cpp_defects.csv')
h, *t = lines(cpp_total_path)
def_map = {}
for line in tail + t:
items = line.split(",")
name = items[0]
defects.add(name)
if (not name in def_map.keys()):
def_map[name] = (0, 0)
rdc = int(items[7].strip())
var = int(items[3].strip())
def_map[name] = (def_map[name][0] + rdc, def_map[name][1] + var)
t_map[tool] = def_map
sys.stdout = open(tex_file_path, "w")
print("\\begin{tabular}{|l|r|r|r|r|r|r|r|r|r|r|}")
print("%\\hline")
print("% Robust detection rate per defects \\\\ ")
print("\\hline")
print("Tool & D1 & D2 & D3 & D4 & D5 & D6 & D7 & D8 & D9", "\\\\")
print("%% ", "Tool &", " & ".join(sorted(defects)), "\\\\")
print("\\hline")
for tool in sorted(t_map.keys()):
print(nice(tool), end="")
def_map = t_map[tool]
for defect in sorted(defects):
rdc = def_map[defect][0]
var = def_map[defect][1]
rdr = int(round((rdc * 100) / var, 0))
print(" & ", rdr, end="")
print("\\\\")
print("\\hline")
print("\\end{tabular}")
sys.stdout = sys.__stdout__
def defects_unique(tex_file_name, rep_directory, latex_dir, tool_list):
tex_file_path = os.path.join(latex_dir, tex_file_name)
t_map = {}
defects = set()
for tool in tool_list:
c_total_path = os.path.join(rep_directory, tool, 'c_defects.csv')
head, *tail = lines(c_total_path)
cpp_total_path = os.path.join(rep_directory, tool, 'cpp_defects.csv')
h, *t = lines(cpp_total_path)
def_map = {}
for line in tail + t:
items = line.split(",")
name = items[0]
defects.add(name)
if (not name in def_map.keys()):
def_map[name] = 0
rdc = int(items[9].strip())
def_map[name] = def_map[name] + rdc
t_map[tool] = def_map
sys.stdout = open(tex_file_path, "w")
print("\\begin{tabular}{|l|r|r|r|r|r|r|r|r|r|r|}")
print("% \\hline")
print("% Unique (robust) defects \\\\ ")
print("\\hline")
print("Tool & D1 & D2 & D3 & D4 & D5 & D6 & D7 & D8 & D9", "\\\\")
print("%% ", "Tool &", " & ".join(sorted(defects)), "\\\\")
print("\\hline")
for tool in sorted(t_map.keys()):
print(nice(tool), end="")
def_map = t_map[tool]
for defect in sorted(defects):
unique = def_map[defect]
print(" & ", unique, end="")
print("\\\\")
print("\\hline")
print("\\end{tabular}")
sys.stdout = sys.__stdout__
# Production by subdefects
def subdefects_pr(tex_file_name, rep_directory, latex_dir, tool_list):
tex_file_path = os.path.join(latex_dir, tex_file_name)
t_map = {}
subdefects = set()
subdef_map = {} # subdef |-> [(tool, production)]
for tool in tool_list:
c_total_path = os.path.join(rep_directory, tool, 'c_subdefects.csv')
head, *tail = lines(c_total_path)
cpp_total_path = os.path.join(rep_directory, tool, 'cpp_subdefects.csv')
h, *t = lines(cpp_total_path)
for line in tail + t:
items = line.split(",")
name = items[2]
subdefects.add(name)
if (not name in subdef_map.keys()):
subdef_map[name] = []
tp = int(items[3].strip())
fp = int(items[4].strip())
var = int(items[5].strip())
dr = round((tp * 100) / var, 2)
fpr = round((fp * 100) / var, 2)
pr = round(sqrt(dr * (100 - fpr)), 2)
subdef_map[name] = subdef_map[name] + [(tool, pr)]
for subdef in subdef_map.keys():
# print(subdef,":")
# print(subdef_map[subdef])
srt = sorted(subdef_map[subdef], key = lambda x : x[1])
srt.reverse()
subdef_map[subdef] = srt[0]
# print(subdef_map[subdef])
# print("\n\n")
sys.stdout = open(tex_file_path, "w")
print("\\begin{tabular}{|l|c|r|}")
print("%\\hline")
print("% Production per subdefects \\\\ ")
print("\\hline")
print("\multicolumn{1}{|c|}{Defect subtype} & \multicolumn{1}{|c|}{Tool} & \multicolumn{1}{|c|}{PR}", "\\\\")
print("\\hline")
for subdefect in sorted(subdef_map.keys()):
sub = subdefect if len(subdefect) <= 20 else subdefect[0:27]+'...'
toool = nice(subdef_map[subdefect][0]) if subdef_map[subdefect][1] > 0 else "-"
print(sub, " & ", toool, " & ", "{:4.2f}".format(subdef_map[subdefect][1]), "\\\\")
print("\\hline")
print("\\end{tabular}")
sys.stdout = sys.__stdout__
# Robust detection rate by subdefects
def subdefects_rdr(tex_file_name, rep_directory, latex_dir, tool_list):
tex_file_path = os.path.join(latex_dir, tex_file_name)
t_map = {}
subdefects = set()
subdef_map = {} # subdef |-> [(tool, rdr)]
for tool in tool_list:
c_total_path = os.path.join(rep_directory, tool, 'c_subdefects.csv')
head, *tail = lines(c_total_path)
cpp_total_path = os.path.join(rep_directory, tool, 'cpp_subdefects.csv')
h, *t = lines(cpp_total_path)
for line in tail + t:
items = line.split(",")
name = items[2]
subdefects.add(name)
if (not name in subdef_map.keys()):
subdef_map[name] = []
rdc = int(items[9].strip())
var = int(items[5].strip())
rdr = round((rdc * 100) / var, 2)
subdef_map[name] = subdef_map[name] + [(tool, rdr)]
for subdef in subdef_map.keys():
# print(subdef,":")
# print(subdef_map[subdef])
srt = sorted(subdef_map[subdef], key = lambda x : x[1])
srt.reverse()
subdef_map[subdef] = srt[0]
# print(subdef_map[subdef])
# print("\n\n")
sys.stdout = open(tex_file_path, "w")
print("\\begin{tabular}{|l|c|r|}")
print("%\\hline")
print("% Robust detection rate per subdefects \\\\ ")
print("\\hline")
print("\multicolumn{1}{|c|}{Defect subtype} & \multicolumn{1}{|c|}{Tool} & \multicolumn{1}{|c|}{RDR}", "\\\\")
print("\\hline")
for subdefect in sorted(subdef_map.keys()):
sub = subdefect if len(subdefect) <= 20 else subdefect[0:27]+'...'
toool = nice(subdef_map[subdefect][0]) if subdef_map[subdefect][1] > 0 else "-"
print(sub, " & ", toool, " & ", "{:4.2f}".format(subdef_map[subdefect][1]), "\\\\")
print("\\hline")
print("\\end{tabular}")
sys.stdout = sys.__stdout__
# Unique by subdefects
def subdefects_unique(tex_file_name, rep_directory, latex_dir, tool_list):
tex_file_path = os.path.join(latex_dir, tex_file_name)
t_map = {}
subdefects = set()
subdef_map = {} # subdef |-> [(tool, unique)]
for tool in tool_list:
c_total_path = os.path.join(rep_directory, tool, 'c_subdefects.csv')
head, *tail = lines(c_total_path)
cpp_total_path = os.path.join(rep_directory, tool, 'cpp_subdefects.csv')
h, *t = lines(cpp_total_path)
for line in tail + t:
items = line.split(",")
name = items[2]
subdefects.add(name)
if (not name in subdef_map.keys()):
subdef_map[name] = []
rdc = int(items[11].strip())
subdef_map[name] = subdef_map[name] + [(tool, rdc)]
for subdef in subdef_map.keys():
# print(subdef,":")
# print(subdef_map[subdef])
srt = sorted(subdef_map[subdef], key = lambda x : x[1])
srt.reverse()
subdef_map[subdef] = srt[0]
# print(subdef_map[subdef])
# print("\n\n")
sys.stdout = open(tex_file_path, "w")
print("\\begin{tabular}{|l|c|c|}")
print("%\\hline")
print("% Unique per subdefects \\\\ ")
print("\\hline")
print("\multicolumn{1}{|c|}{Defect subtype} & \multicolumn{1}{|c|}{Tool} & \multicolumn{1}{|c|}{Unique}", "\\\\")
print("\\hline")
for subdefect in sorted(subdef_map.keys()):
sub = subdefect if len(subdefect) <= 20 else subdefect[0:27]+'...'
toool = nice(subdef_map[subdefect][0]) if subdef_map[subdefect][1] > 0 else "-"
print(sub, " & ", toool, " & ", subdef_map[subdefect][1], "\\\\")
print("\\hline")
print("\\end{tabular}")
sys.stdout = sys.__stdout__
# Detected by all by subdefects
def subdefects_all(tex_file_name, rep_directory, latex_dir, tool_list):
tex_file_path = os.path.join(latex_dir, tex_file_name)
t_map = {}
subdefects = set()
subdef_map = {} # subdef |-> [tools]
subdef_files = {}
for tool in tool_list:
c_total_path = os.path.join(rep_directory, tool, 'c_subdefects.csv')
head, *tail = lines(c_total_path)
cpp_total_path = os.path.join(rep_directory, tool, 'cpp_subdefects.csv')
h, *t = lines(cpp_total_path)
for line in tail + t:
items = line.split(",")
name = items[2]
subdefects.add(name)
if (not name in subdef_map.keys()):
subdef_map[name] = []
subdef_files[name] = []
rdc = int(items[11].strip())
tp = int(items[3].strip())
filename = items[0].strip()
subdef_map[name] = subdef_map[name] + [(tool, tp)]
if not (filename in subdef_files[name]):
subdef_files[name] = subdef_files[name] + [filename]
else:
subdef_files[name] = subdef_files[name]
for subdef in subdef_map.keys():
srt = list(filter(lambda x : x[1] != 0, subdef_map[subdef]))
subdef_map[subdef] = srt;
sys.stdout = open(tex_file_path, "w") #
print("\\begin{tabular}{|l|l|l|}") #
print("%\\hline") #
print("% Subdefects detected by \\\\ ") #
print("\\hline") #
print("{Defect subtype} & {Tools which detected this subtype} & {Filenames}", "\\\\") #
print("\\hline") #
for subdefect in sorted(subdef_map.keys()): #
sub = subdefect if len(subdefect) <= 20 else subdefect[0:27]+'...' #
toool = ",".join(list(map (lambda x : x[0], subdef_map[subdefect])))
fnames = ",".join(list(map (lambda x : str(x.replace("_", "\\_")), subdef_files[subdefect])))
#
print(sub, " & ", toool, " & ", fnames, "\\\\") #
print("\\hline") #
print("\\end{tabular}") #
sys.stdout = sys.__stdout__ #
| {"/python/clanalyze.py": ["/python/system.py"], "/benchmark.py": ["/python/system.py", "/python/clanalyze.py", "/python/latex.py"]} |
65,562 | wubozhi/itc-testing-tools | refs/heads/master | /python/framac.py | import sys
import os
import os.path
import system
import dirutils
import tempfile
import shutil
from shutil import copyfile
from pathlib import Path
temp_path = os.path.abspath(sys.argv[1])
directory = os.path.abspath(sys.argv[2])
csv = os.path.abspath(sys.argv[3])
exe = sys.argv[4]
if (len(sys.argv) > 5):
opts = sys.argv[5]
else:
opts = ""
# create temporary dir to run the analyzer
tmpdir_path = os.path.join(str(Path.home()), "tmp", "frama-c-" + next(tempfile._get_candidate_names()))
shutil.copytree(directory, tmpdir_path)
print("======[FRAMA-C]=======")
print("[CWD]:", tmpdir_path)
print("[CSV]:", csv)
print("[EXE]:", exe)
print("[EXE OPTIONS]:", opts)
pthread = os.path.join(tmpdir_path, "pthread.h")
unistd = os.path.join(tmpdir_path, "unistd.h")
copyfile(os.path.join(tmpdir_path, "pthread.hx"), pthread)
copyfile(os.path.join(tmpdir_path, "unistd.hx"), unistd)
source_files = dirutils.list_files(tmpdir_path, '.c') + dirutils.list_files(tmpdir_path, '.cpp')
dirutils.file_line_error_header(csv)
dirutils.reset_file(temp_path)
for source_file in source_files:
if source_file.endswith("main.c"):
continue
if source_file.endswith("invalid_extern_1.c"):
continue
if source_file.endswith("invalid_extern.c"):
source_file = source_file + " " + os.path.join(tmpdir_path, "invalid_extern_1.c")
framac = exe + " -val -quiet " + source_file + " main.c"
(output, err, exit, time) = system.system_call(framac, tmpdir_path)
dirutils.tool_exec_log(temp_path, framac, output, err, exit)
sys.stdout = open(csv, "a")
lines = output.splitlines()
i = 0
while i < len(lines):
line = lines[i].decode("utf-8")
if (line[0] == '['):
j = line.find("]");
if (j != -1):
parsed = line[j+1:].split(':')
if (len(parsed) >= 3):
fname = parsed[0].strip()
line_no = parsed[1].strip()
message = parsed[2].strip()
if (i + 1 < len(lines)):
message = message + ":" + lines[i+1].decode("utf-8")
if (fname != "main.c" and line_no.isdigit()):
print(fname + "," + line_no + "," + message)
i = i + 1
sys.stdout = sys.__stdout__
print("[CLEANUP]: removing ", tmpdir_path)
shutil.rmtree(tmpdir_path)
print("======[DONE WITH FRAMA-C]=======")
| {"/python/clanalyze.py": ["/python/system.py"], "/benchmark.py": ["/python/system.py", "/python/clanalyze.py", "/python/latex.py"]} |
65,563 | wubozhi/itc-testing-tools | refs/heads/master | /python/flint++.py | import json
import sys
import os.path
import system
import dirutils
import shutil
import tempfile
from pathlib import Path
json_path = os.path.abspath(sys.argv[1])
temp_path = os.path.abspath(sys.argv[2])
directory = os.path.abspath(sys.argv[3])
csv = os.path.abspath(sys.argv[4])
exe = sys.argv[5]
opts = sys.argv[6]
# create temporary dir to run the analyzer
tmpdir_path = os.path.join(str(Path.home()),"tmp", "flintpp-" + next(tempfile._get_candidate_names()))
shutil.copytree(directory, tmpdir_path)
print("======[FLINT++]=======")
print("[CWD]:", tmpdir_path)
print("[CSV]:", csv)
print("[EXE]:", exe)
print("[EXE OPTIONS]:", opts)
source_files = dirutils.list_files(tmpdir_path, '.c') + dirutils.list_files(tmpdir_path, '.cpp')
dirutils.file_line_error_header(csv)
dirutils.reset_file(temp_path)
for source_file in source_files:
if source_file.endswith("main.c"):
continue
if source_file.endswith("invalid_extern_1.c"):
continue
if source_file.endswith("invalid_extern.c"):
source_file = source_file + " " + os.path.join(tmpdir_path, "invalid_extern_1.c")
flintpp = exe + " " + opts + " " + source_file
(output, err, exit, time) = system.system_call(flintpp, tmpdir_path)
dirutils.tool_exec_log(temp_path, flintpp, output, err, exit)
data = json.loads(output.decode("utf-8"))
sys.stdout = open(csv, "a")
for f in data['files']:
filename = f['path']
for error in f['reports']:
print(os.path.basename(filename), ",", error['line'], ",", error['title'])
sys.stdout = sys.__stdout__
print("[CLEANUP]: removing ", tmpdir_path)
shutil.rmtree(tmpdir_path)
print("======[DONE WITH FLINT++]=======")
| {"/python/clanalyze.py": ["/python/system.py"], "/benchmark.py": ["/python/system.py", "/python/clanalyze.py", "/python/latex.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.