code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
from audioop import reverse
from time import sleep
import pytest
import requests
from utils import *
from conftest import file_dir as test_home
from conftest import ref_version
import json
import datetime
class TestsortMajor:
def test_sort_with_reminderState_1(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&reminderState=notReminded"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_2(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&downloadState=notStarted"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_3(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&downloadState=inProgress"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_4(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&downloadState=suspended"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_5(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&downloadState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_6(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&downloadState=notStarted,inProgress,suspended"))
assert filter_response.status_code == 200
def test_sort_with_recordingState_7(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&recordingState=notStarted"))
assert filter_response.status_code == 200
def test_sort_with_recordingState_8(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&recordingState=inProgress"))
assert filter_response.status_code == 200
def test_sort_with_recordingState_9(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&recordingState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_sort_with_recordingContentState_10(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_sort_with_recordingContentState_11(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_sort_with_recordingContentState_12(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_sort_with_downloadContentState_13(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_sort_with_downloadContentState_14(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_sort_with_downloadContentState_15(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_sort_with_embed_16(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&embed=eventBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_17(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_18(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_19(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_20(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_21(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_22(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_23(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_24(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_sort_with_bookingType_25(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&bookingType=manual"))
assert filter_response.status_code == 200
def test_sort_with_bookingType_26(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&bookingType=event"))
assert filter_response.status_code == 200
def test_sort_with_bookingType_27(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_sort_with_reminderState_28(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&reminderState=notReminded"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_29(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&downloadState=notStarted"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_30(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&downloadState=inProgress"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_31(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&downloadState=suspended"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_32(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&downloadState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_33(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&downloadState=notStarted,inProgress,suspended"))
assert filter_response.status_code == 200
def test_sort_with_recordingState_34(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&recordingState=notStarted"))
assert filter_response.status_code == 200
def test_sort_with_recordingState_35(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&recordingState=inProgress"))
assert filter_response.status_code == 200
def test_sort_with_recordingState_36(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&recordingState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_sort_with_recordingContentState_37(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_sort_with_recordingContentState_38(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_sort_with_recordingContentState_39(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_sort_with_downloadContentState_40(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_sort_with_downloadContentState_41(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_sort_with_downloadContentState_42(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_sort_with_embed_43(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&embed=eventBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_44(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_45(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_46(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_47(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_48(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_49(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_50(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_51(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_sort_with_bookingType_52(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&bookingType=manual"))
assert filter_response.status_code == 200
def test_sort_with_bookingType_53(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&bookingType=event"))
assert filter_response.status_code == 200
def test_sort_with_bookingType_54(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=date&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_sort_with_reminderState_55(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&reminderState=notReminded"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_56(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&downloadState=notStarted"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_57(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&downloadState=inProgress"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_58(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&downloadState=suspended"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_59(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&downloadState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_sort_with_downloadState_60(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&downloadState=notStarted,inProgress,suspended"))
assert filter_response.status_code == 200
def test_sort_with_recordingState_61(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&recordingState=notStarted"))
assert filter_response.status_code == 200
def test_sort_with_recordingState_62(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&recordingState=inProgress"))
assert filter_response.status_code == 200
def test_sort_with_recordingState_63(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&recordingState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_sort_with_recordingContentState_64(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_sort_with_recordingContentState_65(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_sort_with_recordingContentState_66(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_sort_with_downloadContentState_67(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_sort_with_downloadContentState_68(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_sort_with_downloadContentState_69(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_sort_with_embed_70(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&embed=eventBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_71(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_72(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_73(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_74(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_75(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_76(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_77(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_sort_with_embed_78(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_sort_with_bookingType_79(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&bookingType=manual"))
assert filter_response.status_code == 200
def test_sort_with_bookingType_80(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&bookingType=event"))
assert filter_response.status_code == 200
def test_sort_with_bookingType_81(self):
filter_response = call_ref_url("get", make_booking_filter_url("sort=title,date&bookingType=manual,event"))
assert filter_response.status_code == 200
class TestreminderStateMajor:
def test_reminderState_with_downloadState_1(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&downloadState=notStarted"))
assert filter_response.status_code == 200
def test_reminderState_with_downloadState_2(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&downloadState=inProgress"))
assert filter_response.status_code == 200
def test_reminderState_with_downloadState_3(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&downloadState=suspended"))
assert filter_response.status_code == 200
def test_reminderState_with_downloadState_4(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&downloadState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_reminderState_with_downloadState_5(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&downloadState=notStarted,inProgress,suspended"))
assert filter_response.status_code == 200
def test_reminderState_with_recordingState_6(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&recordingState=notStarted"))
assert filter_response.status_code == 200
def test_reminderState_with_recordingState_7(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&recordingState=inProgress"))
assert filter_response.status_code == 200
def test_reminderState_with_recordingState_8(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&recordingState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_reminderState_with_recordingContentState_9(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_reminderState_with_recordingContentState_10(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_reminderState_with_recordingContentState_11(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_reminderState_with_downloadContentState_12(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_reminderState_with_downloadContentState_13(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_reminderState_with_downloadContentState_14(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_reminderState_with_embed_15(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&embed=eventBooking"))
assert filter_response.status_code == 200
def test_reminderState_with_embed_16(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_reminderState_with_embed_17(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_reminderState_with_embed_18(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_reminderState_with_embed_19(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_reminderState_with_embed_20(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_reminderState_with_embed_21(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_reminderState_with_embed_22(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_reminderState_with_embed_23(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_reminderState_with_bookingType_24(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&bookingType=manual"))
assert filter_response.status_code == 200
def test_reminderState_with_bookingType_25(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&bookingType=event"))
assert filter_response.status_code == 200
def test_reminderState_with_bookingType_26(self):
filter_response = call_ref_url("get", make_booking_filter_url("reminderState=notReminded&bookingType=manual,event"))
assert filter_response.status_code == 200
class TestdownloadStateMajor:
def test_downloadState_with_recordingState_1(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&recordingState=notStarted"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_2(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&recordingState=inProgress"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_3(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&recordingState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_4(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_5(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_6(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_7(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_8(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_9(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_10(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&embed=eventBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_11(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_12(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_13(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_14(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_15(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_16(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_17(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_18(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_19(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&bookingType=manual"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_20(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&bookingType=event"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_21(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_22(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&recordingState=notStarted"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_23(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&recordingState=inProgress"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_24(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&recordingState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_25(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_26(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_27(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_28(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_29(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_30(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_31(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&embed=eventBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_32(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_33(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_34(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_35(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_36(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_37(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_38(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_39(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_40(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&bookingType=manual"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_41(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&bookingType=event"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_42(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=inProgress&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_43(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&recordingState=notStarted"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_44(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&recordingState=inProgress"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_45(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&recordingState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_46(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_47(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_48(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_49(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_50(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_51(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_52(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&embed=eventBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_53(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_54(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_55(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_56(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_57(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_58(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_59(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_60(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_61(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&bookingType=manual"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_62(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&bookingType=event"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_63(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=suspended&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_64(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&recordingState=notStarted"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_65(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&recordingState=inProgress"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_66(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&recordingState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_67(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_68(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_69(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_70(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_71(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_72(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_73(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&embed=eventBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_74(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_75(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_76(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_77(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_78(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_79(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_80(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_81(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_82(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&bookingType=manual"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_83(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&bookingType=event"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_84(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_85(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&recordingState=notStarted"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_86(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&recordingState=inProgress"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingState_87(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&recordingState=notStarted,inProgress"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_88(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_89(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_downloadState_with_recordingContentState_90(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_91(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_92(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_downloadState_with_downloadContentState_93(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_94(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&embed=eventBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_95(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_96(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_97(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_98(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_99(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_100(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_101(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_embed_102(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_103(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&bookingType=manual"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_104(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&bookingType=event"))
assert filter_response.status_code == 200
def test_downloadState_with_bookingType_105(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadState=notStarted,inProgress,suspended&bookingType=manual,event"))
assert filter_response.status_code == 200
class TestrecordingStateMajor:
def test_recordingState_with_recordingContentState_1(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_recordingState_with_recordingContentState_2(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_recordingState_with_recordingContentState_3(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_recordingState_with_downloadContentState_4(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_recordingState_with_downloadContentState_5(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_recordingState_with_downloadContentState_6(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_7(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&embed=eventBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_8(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_9(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_10(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_11(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_12(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_13(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_14(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_15(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_bookingType_16(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&bookingType=manual"))
assert filter_response.status_code == 200
def test_recordingState_with_bookingType_17(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&bookingType=event"))
assert filter_response.status_code == 200
def test_recordingState_with_bookingType_18(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_recordingState_with_recordingContentState_19(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_recordingState_with_recordingContentState_20(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_recordingState_with_recordingContentState_21(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_recordingState_with_downloadContentState_22(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_recordingState_with_downloadContentState_23(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_recordingState_with_downloadContentState_24(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_25(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&embed=eventBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_26(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_27(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_28(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_29(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_30(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_31(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_32(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_33(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_bookingType_34(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&bookingType=manual"))
assert filter_response.status_code == 200
def test_recordingState_with_bookingType_35(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&bookingType=event"))
assert filter_response.status_code == 200
def test_recordingState_with_bookingType_36(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=inProgress&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_recordingState_with_recordingContentState_37(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&recordingContentState=partial"))
assert filter_response.status_code == 200
def test_recordingState_with_recordingContentState_38(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&recordingContentState=complete"))
assert filter_response.status_code == 200
def test_recordingState_with_recordingContentState_39(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&recordingContentState=partial,complete"))
assert filter_response.status_code == 200
def test_recordingState_with_downloadContentState_40(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_recordingState_with_downloadContentState_41(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_recordingState_with_downloadContentState_42(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_43(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&embed=eventBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_44(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_45(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_46(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_47(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_48(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_49(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_50(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_embed_51(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_recordingState_with_bookingType_52(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&bookingType=manual"))
assert filter_response.status_code == 200
def test_recordingState_with_bookingType_53(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&bookingType=event"))
assert filter_response.status_code == 200
def test_recordingState_with_bookingType_54(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingState=notStarted,inProgress&bookingType=manual,event"))
assert filter_response.status_code == 200
class TestrecordingContentStateMajor:
def test_recordingContentState_with_downloadContentState_1(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_recordingContentState_with_downloadContentState_2(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_recordingContentState_with_downloadContentState_3(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_4(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&embed=eventBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_5(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_6(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_7(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_8(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_9(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_10(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_11(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_12(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_bookingType_13(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&bookingType=manual"))
assert filter_response.status_code == 200
def test_recordingContentState_with_bookingType_14(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&bookingType=event"))
assert filter_response.status_code == 200
def test_recordingContentState_with_bookingType_15(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_recordingContentState_with_downloadContentState_16(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_recordingContentState_with_downloadContentState_17(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_recordingContentState_with_downloadContentState_18(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_19(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&embed=eventBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_20(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_21(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_22(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_23(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_24(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_25(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_26(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_27(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_bookingType_28(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&bookingType=manual"))
assert filter_response.status_code == 200
def test_recordingContentState_with_bookingType_29(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&bookingType=event"))
assert filter_response.status_code == 200
def test_recordingContentState_with_bookingType_30(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=complete&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_recordingContentState_with_downloadContentState_31(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&downloadContentState=partial"))
assert filter_response.status_code == 200
def test_recordingContentState_with_downloadContentState_32(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&downloadContentState=complete"))
assert filter_response.status_code == 200
def test_recordingContentState_with_downloadContentState_33(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&downloadContentState=partial,complete"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_34(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&embed=eventBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_35(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_36(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_37(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_38(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_39(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_40(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_41(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_embed_42(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_recordingContentState_with_bookingType_43(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&bookingType=manual"))
assert filter_response.status_code == 200
def test_recordingContentState_with_bookingType_44(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&bookingType=event"))
assert filter_response.status_code == 200
def test_recordingContentState_with_bookingType_45(self):
filter_response = call_ref_url("get", make_booking_filter_url("recordingContentState=partial,complete&bookingType=manual,event"))
assert filter_response.status_code == 200
class TestdownloadContentStateMajor:
def test_downloadContentState_with_embed_1(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&embed=eventBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_2(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_3(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_4(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_5(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_6(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_7(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_8(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_9(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_bookingType_10(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&bookingType=manual"))
assert filter_response.status_code == 200
def test_downloadContentState_with_bookingType_11(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&bookingType=event"))
assert filter_response.status_code == 200
def test_downloadContentState_with_bookingType_12(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_13(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&embed=eventBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_14(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_15(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_16(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_17(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_18(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_19(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_20(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_21(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_bookingType_22(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&bookingType=manual"))
assert filter_response.status_code == 200
def test_downloadContentState_with_bookingType_23(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&bookingType=event"))
assert filter_response.status_code == 200
def test_downloadContentState_with_bookingType_24(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=complete&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_25(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&embed=eventBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_26(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&embed=seasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_27(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&embed=transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_28(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&embed=transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_29(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&embed=reminderBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_30(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&embed=eventBooking,seasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_31(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&embed=eventBooking,seasonBooking,transcodeBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_32(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_embed_33(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking"))
assert filter_response.status_code == 200
def test_downloadContentState_with_bookingType_34(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&bookingType=manual"))
assert filter_response.status_code == 200
def test_downloadContentState_with_bookingType_35(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&bookingType=event"))
assert filter_response.status_code == 200
def test_downloadContentState_with_bookingType_36(self):
filter_response = call_ref_url("get", make_booking_filter_url("downloadContentState=partial,complete&bookingType=manual,event"))
assert filter_response.status_code == 200
class TestembedMajor:
def test_embed_with_bookingType_1(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking&bookingType=manual"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_2(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking&bookingType=event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_3(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_4(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=seasonBooking&bookingType=manual"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_5(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=seasonBooking&bookingType=event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_6(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=seasonBooking&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_7(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=transcodeBooking&bookingType=manual"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_8(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=transcodeBooking&bookingType=event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_9(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=transcodeBooking&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_10(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=transcodeSeasonBooking&bookingType=manual"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_11(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=transcodeSeasonBooking&bookingType=event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_12(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=transcodeSeasonBooking&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_13(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=reminderBooking&bookingType=manual"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_14(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=reminderBooking&bookingType=event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_15(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=reminderBooking&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_16(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking&bookingType=manual"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_17(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking&bookingType=event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_18(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_19(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking,transcodeBooking&bookingType=manual"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_20(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking,transcodeBooking&bookingType=event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_21(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking,transcodeBooking&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_22(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking&bookingType=manual"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_23(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking&bookingType=event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_24(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking&bookingType=manual,event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_25(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking&bookingType=manual"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_26(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking&bookingType=event"))
assert filter_response.status_code == 200
def test_embed_with_bookingType_27(self):
filter_response = call_ref_url("get", make_booking_filter_url("embed=eventBooking,seasonBooking,transcodeBooking,transcodeSeasonBooking,reminderBooking&bookingType=manual,event"))
assert filter_response.status_code == 200
|
soumyaslab/pythonlab
|
py2/excel_example/test_planner_extended_filters.py
|
Python
|
gpl-2.0
| 83,909
|
this_should_be_linted = "double quote string"
|
maxcountryman/flake8-single-quotes
|
tests/data/doubles.py
|
Python
|
mit
| 46
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta 4
# Copyright 2015 tvalacarta@gmail.com
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#
# Distributed under the terms of GNU General Public License v3 (GPLv3)
# http://www.gnu.org/licenses/gpl-3.0.html
# ------------------------------------------------------------
# This file is part of pelisalacarta 4.
#
# pelisalacarta 4 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pelisalacarta 4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pelisalacarta 4. If not, see <http://www.gnu.org/licenses/>.
# ------------------------------------------------------------
# Common Library Tools
# ------------------------------------------------------------
import errno
import math
from core import config
from core import filetools
from core import logger
from core import scrapertools
from core import scraper
from core.item import Item
from platformcode import platformtools
FOLDER_MOVIES = config.get_setting("folder_movies")
FOLDER_TVSHOWS = config.get_setting("folder_tvshows")
LIBRARY_PATH = config.get_library_path()
MOVIES_PATH = filetools.join(LIBRARY_PATH, FOLDER_MOVIES)
TVSHOWS_PATH = filetools.join(LIBRARY_PATH, FOLDER_TVSHOWS)
if not FOLDER_MOVIES or not FOLDER_TVSHOWS or not LIBRARY_PATH \
or not filetools.exists(MOVIES_PATH) or not filetools.exists(TVSHOWS_PATH):
config.verify_directories_created()
addon_name = "plugin://plugin.video.pelisalacarta/"
def read_nfo(path_nfo, item=None):
"""
Metodo para leer archivos nfo.
Los arcivos nfo tienen la siguiente extructura: url_scraper | xml + item_json
[url_scraper] y [xml] son opcionales, pero solo uno de ellos ha de existir siempre.
@param path_nfo: ruta absoluta al archivo nfo
@type path_nfo: str
@param item: Si se pasa este parametro el item devuelto sera una copia de este con
los valores de 'infoLabels', 'library_playcounts' y 'path' leidos del nfo
@type: Item
@return: Una tupla formada por la cabecera (head_nfo ='url_scraper'|'xml') y el objeto 'item_json'
@rtype: tuple (str, Item)
"""
head_nfo = ""
it = None
data = filetools.read(path_nfo)
if data:
head_nfo = data.splitlines()[0] + "\n"
data = "\n".join(data.splitlines()[1:])
it_nfo = Item().fromjson(data)
if item:
it = item.clone()
it.infoLabels = it_nfo.infoLabels
if 'library_playcounts' in it_nfo:
it.library_playcounts = it_nfo.library_playcounts
if it_nfo.path:
it.path = it_nfo.path
else:
it = it_nfo
if 'fanart' in it.infoLabels:
it.fanart = it.infoLabels['fanart']
return head_nfo, it
def save_library_movie(item):
"""
guarda en la libreria de peliculas el elemento item, con los valores que contiene.
@type item: item
@param item: elemento que se va a guardar.
@rtype insertados: int
@return: el número de elementos insertados
@rtype sobreescritos: int
@return: el número de elementos sobreescritos
@rtype fallidos: int
@return: el número de elementos fallidos o -1 si ha fallado todo
"""
logger.info()
# logger.debug(item.tostring('\n'))
insertados = 0
sobreescritos = 0
fallidos = 0
path = ""
# Itentamos obtener el titulo correcto:
# 1. contentTitle: Este deberia ser el sitio correcto, ya que title suele contener "Añadir a la biblioteca..."
# 2. fulltitle
# 3. title
if not item.contentTitle:
# Colocamos el titulo correcto en su sitio para que scraper lo localize
if item.fulltitle:
item.contentTitle = item.fulltitle
else:
item.contentTitle = item.title
# Si llegados a este punto no tenemos titulo, salimos
if not item.contentTitle or not item.channel:
logger.debug("NO ENCONTRADO contentTitle")
return 0, 0, -1 # Salimos sin guardar
scraper_return = scraper.find_and_set_infoLabels(item)
# Llegados a este punto podemos tener:
# scraper_return = True: Un item con infoLabels con la información actualizada de la peli
# scraper_return = False: Un item sin información de la peli (se ha dado a cancelar en la ventana)
# item.infoLabels['code'] == "" : No se ha encontrado el identificador de IMDB necesario para continuar, salimos
if not scraper_return or not item.infoLabels['code']:
# TODO de momento si no hay resultado no añadimos nada,
# aunq podriamos abrir un cuadro para introducir el identificador/nombre a mano
logger.debug("NO ENCONTRADO EN SCRAPER O NO TIENE code")
return 0, 0, -1
_id = item.infoLabels['code'][0]
# progress dialog
p_dialog = platformtools.dialog_progress('pelisalacarta', 'Añadiendo película...')
if config.get_setting("original_title_folder", "biblioteca") == 1 and item.infoLabels['originaltitle']:
base_name = item.infoLabels['originaltitle']
else:
base_name = item.contentTitle
base_name = unicode(filetools.validate_path(base_name.replace('/', '-')), "utf8").lower().encode("utf8")
for raiz, subcarpetas, ficheros in filetools.walk(MOVIES_PATH):
for c in subcarpetas:
code = scrapertools.find_single_match(c, '\[(.*?)\]')
if code and code in item.infoLabels['code']:
path = filetools.join(raiz, c)
_id = code
break
if not path:
# Crear carpeta
path = filetools.join(MOVIES_PATH, ("%s [%s]" % (base_name, _id)).strip())
logger.info("Creando directorio pelicula:" + path)
if not filetools.mkdir(path):
logger.debug("No se ha podido crear el directorio")
return 0, 0, -1
nfo_path = filetools.join(path, "%s [%s].nfo" % (base_name, _id))
strm_path = filetools.join(path, "%s.strm" % base_name)
json_path = filetools.join(path, ("%s [%s].json" % (base_name, item.channel.lower())))
nfo_exists = filetools.exists(nfo_path)
strm_exists = filetools.exists(strm_path)
json_exists = filetools.exists(json_path)
if not nfo_exists:
# Creamos .nfo si no existe
logger.info("Creando .nfo: " + nfo_path)
head_nfo = scraper.get_nfo(item)
item_nfo = Item(title=item.contentTitle, channel="biblioteca", action='findvideos',
library_playcounts={"%s [%s]" % (base_name, _id): 0}, infoLabels=item.infoLabels,
library_urls={})
else:
# Si existe .nfo, pero estamos añadiendo un nuevo canal lo abrimos
head_nfo, item_nfo = read_nfo(nfo_path)
if not strm_exists:
# Crear base_name.strm si no existe
item_strm = Item(channel='biblioteca', action='play_from_library',
strm_path=strm_path.replace(MOVIES_PATH, ""), contentType='movie',
contentTitle = item.contentTitle)
strm_exists = filetools.write(strm_path, '%s?%s' % (addon_name, item_strm.tourl()))
item_nfo.strm_path = strm_path.replace(MOVIES_PATH, "")
# Solo si existen item_nfo y .strm continuamos
if item_nfo and strm_exists:
if json_exists:
logger.info("El fichero existe. Se sobreescribe")
sobreescritos += 1
else:
insertados += 1
if filetools.write(json_path, item.tojson()):
p_dialog.update(100, 'Añadiendo película...', item.contentTitle)
item_nfo.library_urls[item.channel] = item.url
if filetools.write(nfo_path, head_nfo + item_nfo.tojson()):
# actualizamos la biblioteca de Kodi con la pelicula
if config.is_xbmc():
from platformcode import xbmc_library
xbmc_library.update(FOLDER_MOVIES, filetools.basename(path) + "/")
p_dialog.close()
return insertados, sobreescritos, fallidos
# Si llegamos a este punto es por q algo ha fallado
logger.error("No se ha podido guardar %s en la biblioteca" % item.contentTitle)
p_dialog.update(100, 'Fallo al añadir...', item.contentTitle)
p_dialog.close()
return 0, 0, -1
def save_library_tvshow(item, episodelist):
"""
guarda en la libreria de series la serie con todos los capitulos incluidos en la lista episodelist
@type item: item
@param item: item que representa la serie a guardar
@type episodelist: list
@param episodelist: listado de items que representan los episodios que se van a guardar.
@rtype insertados: int
@return: el número de episodios insertados
@rtype sobreescritos: int
@return: el número de episodios sobreescritos
@rtype fallidos: int
@return: el número de episodios fallidos o -1 si ha fallado toda la serie
"""
logger.info()
# logger.debug(item.tostring('\n'))
path = ""
# Si llegados a este punto no tenemos titulo o code, salimos
if not (item.contentSerieName or item.infoLabels['code']) or not item.channel:
logger.debug("NO ENCONTRADO contentSerieName NI code")
return 0, 0, -1 # Salimos sin guardar
scraper_return = scraper.find_and_set_infoLabels(item)
# Llegados a este punto podemos tener:
# scraper_return = True: Un item con infoLabels con la información actualizada de la serie
# scraper_return = False: Un item sin información de la peli (se ha dado a cancelar en la ventana)
# item.infoLabels['code'] == "" : No se ha encontrado el identificador de IMDB necesario para continuar, salimos
if not scraper_return or not item.infoLabels['code']:
# TODO de momento si no hay resultado no añadimos nada,
# aunq podriamos abrir un cuadro para introducir el identificador/nombre a mano
logger.debug("NO ENCONTRADO EN SCRAPER O NO TIENE code")
return 0, 0, -1
_id = item.infoLabels['code'][0]
if config.get_setting("original_title_folder", "biblioteca") == 1 and item.infoLabels['originaltitle']:
base_name = item.infoLabels['originaltitle']
elif item.infoLabels['title']:
base_name = item.infoLabels['title']
else:
base_name = item.contentSerieName
base_name = unicode(filetools.validate_path(base_name.replace('/', '-')), "utf8").lower().encode("utf8")
for raiz, subcarpetas, ficheros in filetools.walk(TVSHOWS_PATH):
for c in subcarpetas:
code = scrapertools.find_single_match(c, '\[(.*?)\]')
if code and code in item.infoLabels['code']:
path = filetools.join(raiz, c)
_id = code
break
if not path:
path = filetools.join(TVSHOWS_PATH, ("%s [%s]" % (base_name, _id)).strip())
logger.info("Creando directorio serie: " + path)
try:
filetools.mkdir(path)
except OSError, exception:
if exception.errno != errno.EEXIST:
raise
tvshow_path = filetools.join(path, "tvshow.nfo")
if not filetools.exists(tvshow_path):
# Creamos tvshow.nfo, si no existe, con la head_nfo, info de la serie y marcas de episodios vistos
logger.info("Creando tvshow.nfo: " + tvshow_path)
head_nfo = scraper.get_nfo(item)
item_tvshow = Item(title=item.contentTitle, channel="biblioteca", action="get_temporadas",
fanart=item.infoLabels['fanart'], thumbnail=item.infoLabels['thumbnail'],
infoLabels=item.infoLabels, path=path.replace(TVSHOWS_PATH, ""))
item_tvshow.library_playcounts = {}
item_tvshow.library_urls = {item.channel: item.url}
else:
# Si existe tvshow.nfo, pero estamos añadiendo un nuevo canal actualizamos el listado de urls
head_nfo, item_tvshow = read_nfo(tvshow_path)
item_tvshow.channel = "biblioteca"
item_tvshow.action = "get_temporadas"
item_tvshow.library_urls[item.channel] = item.url
# FILTERTOOLS
# si el canal tiene filtro de idiomas, añadimos el canal y el show
if episodelist and "list_idiomas" in episodelist[0]:
# si ya hemos añadido un canal previamente con filtro, añadimos o actualizamos el canal y show
if "library_filter_show" in item_tvshow:
item_tvshow.library_filter_show[item.channel] = item.show
# no habia ningún canal con filtro y lo generamos por primera vez
else:
item_tvshow.library_filter_show = {item.channel: item.show}
if item.channel != "descargas":
item_tvshow.active = 1 # para que se actualice a diario cuando se llame a library_service
filetools.write(tvshow_path, head_nfo + item_tvshow.tojson())
if not episodelist:
# La lista de episodios esta vacia
return 0, 0, 0
# Guardar los episodios
'''import time
start_time = time.time()'''
insertados, sobreescritos, fallidos = save_library_episodes(path, episodelist, item)
'''msg = "Insertados: %d | Sobreescritos: %d | Fallidos: %d | Tiempo: %2.2f segundos" % \
(insertados, sobreescritos, fallidos, time.time() - start_time)
logger.debug(msg)'''
return insertados, sobreescritos, fallidos
def save_library_episodes(path, episodelist, serie, silent=False, overwrite=True):
"""
guarda en la ruta indicada todos los capitulos incluidos en la lista episodelist
@type path: str
@param path: ruta donde guardar los episodios
@type episodelist: list
@param episodelist: listado de items que representan los episodios que se van a guardar.
@type serie: item
@param serie: serie de la que se van a guardar los episodios
@type silent: bool
@param silent: establece si se muestra la notificación
@param overwrite: permite sobreescribir los ficheros existentes
@type overwrite: bool
@rtype insertados: int
@return: el número de episodios insertados
@rtype sobreescritos: int
@return: el número de episodios sobreescritos
@rtype fallidos: int
@return: el número de episodios fallidos
"""
logger.info()
# No hay lista de episodios, no hay nada que guardar
if not len(episodelist):
logger.info("No hay lista de episodios, salimos sin crear strm")
return 0, 0, 0
insertados = 0
sobreescritos = 0
fallidos = 0
news_in_playcounts = {}
# Listamos todos los ficheros de la serie, asi evitamos tener que comprobar si existe uno por uno
raiz, carpetas_series, ficheros = filetools.walk(path).next()
ficheros = [filetools.join(path, f) for f in ficheros]
# Silent es para no mostrar progreso (para library_service)
if not silent:
# progress dialog
p_dialog = platformtools.dialog_progress('pelisalacarta', 'Añadiendo episodios...')
p_dialog.update(0, 'Añadiendo episodio...')
new_episodelist =[]
# Obtenemos el numero de temporada y episodio y descartamos los q no lo sean
for e in episodelist:
try:
season_episode = scrapertools.get_season_and_episode(e.title)
e.infoLabels = serie.infoLabels
e.contentSeason, e.contentEpisodeNumber = season_episode.split("x")
new_episodelist.append(e)
except:
continue
# No hay lista de episodios, no hay nada que guardar
if not len(new_episodelist):
logger.info("No hay lista de episodios, salimos sin crear strm")
return 0, 0, 0
# fix float porque la division se hace mal en python 2.x
t = float(100) / len(new_episodelist)
for i, e in enumerate(scraper.sort_episode_list(new_episodelist)):
if not silent:
p_dialog.update(int(math.ceil((i + 1) * t)), 'Añadiendo episodio...', e.title)
season_episode = "%sx%s" % (e.contentSeason, str(e.contentEpisodeNumber).zfill(2))
strm_path = filetools.join(path, "%s.strm" % season_episode)
nfo_path = filetools.join(path, "%s.nfo" % season_episode)
json_path = filetools.join(path, ("%s [%s].json" % (season_episode, e.channel)).lower())
strm_exists = strm_path in ficheros
nfo_exists = nfo_path in ficheros
json_exists = json_path in ficheros
if not strm_exists:
# Si no existe season_episode.strm añadirlo
item_strm = Item(action='play_from_library', channel='biblioteca',
strm_path=strm_path.replace(TVSHOWS_PATH, ""), infoLabels={})
item_strm.contentSeason = e.contentSeason
item_strm.contentEpisodeNumber = e.contentEpisodeNumber
item_strm.contentType = e.contentType
item_strm.contentTitle = season_episode
# FILTERTOOLS
if item_strm.list_idiomas:
# si tvshow.nfo tiene filtro se le pasa al item_strm que se va a generar
if "library_filter_show" in serie:
item_strm.library_filter_show = serie.library_filter_show
if item_strm.library_filter_show == "":
logger.error("Se ha producido un error al obtener el nombre de la serie a filtrar")
# logger.debug("item_strm" + item_strm.tostring('\n'))
# logger.debug("serie " + serie.tostring('\n'))
strm_exists = filetools.write(strm_path, '%s?%s' % (addon_name, item_strm.tourl()))
item_nfo = None
if not nfo_exists and e.infoLabels["code"]:
# Si no existe season_episode.nfo añadirlo
scraper.find_and_set_infoLabels(e)
head_nfo = scraper.get_nfo(e)
item_nfo = e.clone(channel="biblioteca", url="", action='findvideos',
strm_path=strm_path.replace(TVSHOWS_PATH, ""))
nfo_exists = filetools.write(nfo_path, head_nfo + item_nfo.tojson())
# Solo si existen season_episode.nfo y season_episode.strm continuamos
if nfo_exists and strm_exists:
if not json_exists or overwrite:
# Obtenemos infoLabel del episodio
if not item_nfo:
head_nfo, item_nfo = read_nfo(nfo_path)
e.infoLabels = item_nfo.infoLabels
if filetools.write(json_path, e.tojson()):
if not json_exists:
logger.info("Insertado: %s" % json_path)
insertados += 1
# Marcamos episodio como no visto
news_in_playcounts[season_episode] = 0
# Marcamos la temporada como no vista
news_in_playcounts["season %s" % e.contentSeason] = 0
# Marcamos la serie como no vista
# logger.debug("serie " + serie.tostring('\n'))
news_in_playcounts[serie.contentTitle] = 0
else:
logger.info("Sobreescrito: %s" % json_path)
sobreescritos += 1
else:
logger.info("Fallido: %s" % json_path)
fallidos += 1
else:
logger.info("Fallido: %s" % json_path)
fallidos += 1
if not silent and p_dialog.iscanceled():
break
if not silent:
p_dialog.close()
if news_in_playcounts:
# Si hay nuevos episodios los marcamos como no vistos en tvshow.nfo ...
tvshow_path = filetools.join(path, "tvshow.nfo")
try:
import datetime
head_nfo, tvshow_item = read_nfo(tvshow_path)
tvshow_item.library_playcounts.update(news_in_playcounts)
if tvshow_item.active == 30:
tvshow_item.active = 1
update_last = datetime.date.today()
tvshow_item.update_last = update_last.strftime('%Y-%m-%d')
update_next = datetime.date.today() + datetime.timedelta(days=int(tvshow_item.active))
tvshow_item.update_next = update_next.strftime('%Y-%m-%d')
filetools.write(tvshow_path, head_nfo + tvshow_item.tojson())
except:
logger.error("Error al actualizar tvshow.nfo")
fallidos = -1
else:
# ... si ha sido correcto actualizamos la biblioteca de Kodi
if config.is_xbmc() and not silent:
from platformcode import xbmc_library
xbmc_library.update(FOLDER_TVSHOWS, filetools.basename(path))
if fallidos == len(episodelist):
fallidos = -1
logger.debug("%s [%s]: insertados= %s, sobreescritos= %s, fallidos= %s" %
(serie.contentSerieName, serie.channel, insertados, sobreescritos, fallidos))
return insertados, sobreescritos, fallidos
def add_pelicula_to_library(item):
"""
guarda una pelicula en la libreria de cine. La pelicula puede ser un enlace dentro de un canal o un video
descargado previamente.
Para añadir episodios descargados en local, el item debe tener exclusivamente:
- contentTitle: titulo de la pelicula
- title: titulo a mostrar junto al listado de enlaces -findvideos- ("Reproducir video local HD")
- infoLabels["tmdb_id"] o infoLabels["imdb_id"]
- contentType == "movie"
- channel = "descargas"
- url : ruta local al video
@type item: item
@param item: elemento que se va a guardar.
"""
logger.info()
if config.is_xbmc():
from platformcode import xbmc_library
xbmc_library.ask_set_content()
new_item = item.clone(action="findvideos")
insertados, sobreescritos, fallidos = save_library_movie(new_item)
if fallidos == 0:
platformtools.dialog_ok(config.get_localized_string(30131), new_item.contentTitle,
config.get_localized_string(30135)) # 'se ha añadido a la biblioteca'
else:
platformtools.dialog_ok(config.get_localized_string(30131),
"ERROR, la pelicula NO se ha añadido a la biblioteca")
def add_serie_to_library(item, channel=None):
"""
Guarda contenido en la libreria de series. Este contenido puede ser uno de estos dos:
- La serie con todos los capitulos incluidos en la lista episodelist.
- Un solo capitulo descargado previamente en local.
Para añadir episodios descargados en local, el item debe tener exclusivamente:
- contentSerieName (o show): Titulo de la serie
- contentTitle: titulo del episodio para extraer season_and_episode ("1x01 Piloto")
- title: titulo a mostrar junto al listado de enlaces -findvideos- ("Reproducir video local")
- infoLabels["tmdb_id"] o infoLabels["imdb_id"]
- contentType != "movie"
- channel = "descargas"
- url : ruta local al video
@type item: item
@param item: item que representa la serie a guardar
@type channel: modulo
@param channel: canal desde el que se guardara la serie.
Por defecto se importara item.from_channel o item.channel
"""
logger.info("show=#" + item.show + "#")
if config.is_xbmc():
from platformcode import xbmc_library
xbmc_library.ask_set_content()
if item.channel == "descargas":
itemlist = [item.clone()]
else:
# Esta marca es porque el item tiene algo más aparte en el atributo "extra"
item.action = item.extra
if isinstance(item.extra, str) and "###" in item.extra:
item.action = item.extra.split("###")[0]
item.extra = item.extra.split("###")[1]
if item.from_action:
item.__dict__["action"] = item.__dict__.pop("from_action")
if item.from_channel:
item.__dict__["channel"] = item.__dict__.pop("from_channel")
if not channel:
try:
channel = __import__('channels.%s' % item.channel, fromlist=["channels.%s" % item.channel])
except ImportError:
exec "import channels." + item.channel + " as channel"
# Obtiene el listado de episodios
itemlist = getattr(channel, item.action)(item)
insertados, sobreescritos, fallidos = save_library_tvshow(item, itemlist)
if not insertados and not sobreescritos and not fallidos:
platformtools.dialog_ok("Biblioteca", "ERROR, la serie NO se ha añadido a la biblioteca",
"No se ha podido obtener ningun episodio")
logger.error("La serie %s no se ha podido añadir a la biblioteca. No se ha podido obtener ningun episodio"
% item.show)
elif fallidos == -1:
platformtools.dialog_ok("Biblioteca", "ERROR, la serie NO se ha añadido a la biblioteca")
logger.error("La serie %s no se ha podido añadir a la biblioteca" % item.show)
elif fallidos > 0:
platformtools.dialog_ok("Biblioteca", "ERROR, la serie NO se ha añadido completa a la biblioteca")
logger.error("No se han podido añadir %s episodios de la serie %s a la biblioteca" % (fallidos, item.show))
else:
platformtools.dialog_ok("Biblioteca", "La serie se ha añadido a la biblioteca")
logger.info("[launcher.py] Se han añadido %s episodios de la serie %s a la biblioteca" %
(insertados, item.show))
if config.is_xbmc():
if config.get_setting("sync_trakt_new_tvshow", "biblioteca"):
import xbmc
from platformcode import xbmc_library
if config.get_setting("sync_trakt_new_tvshow_wait", "biblioteca"):
# Comprobar que no se esta buscando contenido en la biblioteca de Kodi
while xbmc.getCondVisibility('Library.IsScanningVideo()'):
xbmc.sleep(1000)
# Se lanza la sincronizacion
xbmc_library.sync_trakt()
|
r0balo/pelisalacarta
|
python/main-classic/core/library.py
|
Python
|
gpl-3.0
| 26,504
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2012 OpenERP SA (<http://openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import string
import datetime
import re
_logger = logging.getLogger(__name__)
try:
import vatnumber
except ImportError:
_logger.warning("VAT validation partially unavailable because the `vatnumber` Python library cannot be found. "
"Install it to support more countries, for example with `easy_install vatnumber`.")
vatnumber = None
from openerp.osv import fields, osv
from openerp.tools.misc import ustr
from openerp.tools.translate import _
_ref_vat = {
'at': 'ATU12345675',
'be': 'BE0477472701',
'bg': 'BG1234567892',
'ch': 'CHE-123.456.788 TVA or CH TVA 123456', #Swiss by Yannick Vaucher @ Camptocamp
'cy': 'CY12345678F',
'cz': 'CZ12345679',
'de': 'DE123456788',
'dk': 'DK12345674',
'ee': 'EE123456780',
'el': 'EL12345670',
'es': 'ESA12345674',
'fi': 'FI12345671',
'fr': 'FR32123456789',
'gb': 'GB123456782',
'gr': 'GR12345670',
'hu': 'HU12345676',
'hr': 'HR01234567896', # Croatia, contributed by Milan Tribuson
'ie': 'IE1234567T',
'it': 'IT12345670017',
'lt': 'LT123456715',
'lu': 'LU12345613',
'lv': 'LV41234567891',
'mt': 'MT12345634',
'mx': 'MXABC123456T1B',
'nl': 'NL123456782B90',
'no': 'NO123456785',
'pl': 'PL1234567883',
'pt': 'PT123456789',
'ro': 'RO1234567897',
'se': 'SE123456789701',
'si': 'SI12345679',
'sk': 'SK0012345675',
}
class res_partner(osv.osv):
_inherit = 'res.partner'
def _split_vat(self, vat):
vat_country, vat_number = vat[:2].lower(), vat[2:].replace(' ', '')
return vat_country, vat_number
def simple_vat_check(self, cr, uid, country_code, vat_number, context=None):
'''
Check the VAT number depending of the country.
http://sima-pc.com/nif.php
'''
if not ustr(country_code).encode('utf-8').isalpha():
return False
check_func_name = 'check_vat_' + country_code
check_func = getattr(self, check_func_name, None) or \
getattr(vatnumber, check_func_name, None)
if not check_func:
# No VAT validation available, default to check that the country code exists
res_country = self.pool.get('res.country')
return bool(res_country.search(cr, uid, [('code', '=ilike', country_code)], context=context))
return check_func(vat_number)
def vies_vat_check(self, cr, uid, country_code, vat_number, context=None):
try:
# Validate against VAT Information Exchange System (VIES)
# see also http://ec.europa.eu/taxation_customs/vies/
return vatnumber.check_vies(country_code.upper()+vat_number)
except Exception:
# see http://ec.europa.eu/taxation_customs/vies/checkVatService.wsdl
# Fault code may contain INVALID_INPUT, SERVICE_UNAVAILABLE, MS_UNAVAILABLE,
# TIMEOUT or SERVER_BUSY. There is no way we can validate the input
# with VIES if any of these arise, including the first one (it means invalid
# country code or empty VAT number), so we fall back to the simple check.
return self.simple_vat_check(cr, uid, country_code, vat_number, context=context)
def button_check_vat(self, cr, uid, ids, context=None):
if not self.check_vat(cr, uid, ids, context=context):
msg = self._construct_constraint_msg(cr, uid, ids, context=context)
raise osv.except_osv(_('Error!'), msg)
return True
def check_vat(self, cr, uid, ids, context=None):
user_company = self.pool.get('res.users').browse(cr, uid, uid).company_id
if user_company.vat_check_vies:
# force full VIES online check
check_func = self.vies_vat_check
else:
# quick and partial off-line checksum validation
check_func = self.simple_vat_check
for partner in self.browse(cr, uid, ids, context=context):
if not partner.vat:
continue
vat_country, vat_number = self._split_vat(partner.vat)
if not check_func(cr, uid, vat_country, vat_number, context=context):
#return False
return {'value':True,'warning':{'title':'warning','message':'Vat is bad'}}
return True
def vat_change(self, cr, uid, ids, value, context=None):
return {'value': {'vat_subjected': bool(value)}}
_columns = {
'vat_subjected': fields.boolean('VAT Legal Statement', help="Check this box if the partner is subjected to the VAT. It will be used for the VAT legal statement.")
}
def _commercial_fields(self, cr, uid, context=None):
return super(res_partner, self)._commercial_fields(cr, uid, context=context) + ['vat_subjected']
def _construct_constraint_msg(self, cr, uid, ids, context=None):
def default_vat_check(cn, vn):
# by default, a VAT number is valid if:
# it starts with 2 letters
# has more than 3 characters
return cn[0] in string.ascii_lowercase and cn[1] in string.ascii_lowercase
vat_country, vat_number = self._split_vat(self.browse(cr, uid, ids)[0].vat)
vat_no = "'CC##' (CC=Country Code, ##=VAT Number)"
if default_vat_check(vat_country, vat_number):
vat_no = _ref_vat[vat_country] if vat_country in _ref_vat else vat_no
return '\n' + _('This VAT number does not seem to be valid.\nNote: the expected format is %s') % vat_no
_constraints = [(check_vat, _construct_constraint_msg, ["vat"])]
__check_vat_ch_re1 = re.compile(r'(MWST|TVA|IVA)[0-9]{6}$')
__check_vat_ch_re2 = re.compile(r'E([0-9]{9}|-[0-9]{3}\.[0-9]{3}\.[0-9]{3})(MWST|TVA|IVA)$')
def check_vat_ch(self, vat):
'''
Check Switzerland VAT number.
'''
# VAT number in Switzerland will change between 2011 and 2013
# http://www.estv.admin.ch/mwst/themen/00154/00589/01107/index.html?lang=fr
# Old format is "TVA 123456" we will admit the user has to enter ch before the number
# Format will becomes such as "CHE-999.999.99C TVA"
# Both old and new format will be accepted till end of 2013
# Accepted format are: (spaces are ignored)
# CH TVA ######
# CH IVA ######
# CH MWST #######
#
# CHE#########MWST
# CHE#########TVA
# CHE#########IVA
# CHE-###.###.### MWST
# CHE-###.###.### TVA
# CHE-###.###.### IVA
#
if self.__check_vat_ch_re1.match(vat):
return True
match = self.__check_vat_ch_re2.match(vat)
if match:
# For new TVA numbers, do a mod11 check
num = filter(lambda s: s.isdigit(), match.group(1)) # get the digits only
factor = (5,4,3,2,7,6,5,4)
csum = sum([int(num[i]) * factor[i] for i in range(8)])
check = (11 - (csum % 11)) % 11
return check == int(num[8])
return False
# Mexican VAT verification, contributed by <moylop260@hotmail.com>
# and Panos Christeas <p_christ@hol.gr>
__check_vat_mx_re = re.compile(r"(?P<primeras>[A-Za-z\xd1\xf1&]{3,4})" \
r"[ \-_]?" \
r"(?P<ano>[0-9]{2})(?P<mes>[01][0-9])(?P<dia>[0-3][0-9])" \
r"[ \-_]?" \
r"(?P<code>[A-Za-z0-9&\xd1\xf1]{3})$")
def check_vat_mx(self, vat):
''' Mexican VAT verification
Verificar RFC México
'''
# we convert to 8-bit encoding, to help the regex parse only bytes
vat = ustr(vat).encode('iso8859-1')
m = self.__check_vat_mx_re.match(vat)
if not m:
#No valid format
return False
try:
ano = int(m.group('ano'))
if ano > 30:
ano = 1900 + ano
else:
ano = 2000 + ano
datetime.date(ano, int(m.group('mes')), int(m.group('dia')))
except ValueError:
return False
#Valid format and valid date
return True
# Norway VAT validation, contributed by Rolv Råen (adEgo) <rora@adego.no>
def check_vat_no(self, vat):
'''
Check Norway VAT number.See http://www.brreg.no/english/coordination/number.html
'''
if len(vat) != 9:
return False
try:
int(vat)
except ValueError:
return False
sum = (3 * int(vat[0])) + (2 * int(vat[1])) + \
(7 * int(vat[2])) + (6 * int(vat[3])) + \
(5 * int(vat[4])) + (4 * int(vat[5])) + \
(3 * int(vat[6])) + (2 * int(vat[7]))
check = 11 -(sum % 11)
if check == 11:
check = 0
if check == 10:
# 10 is not a valid check digit for an organization number
return False
return check == int(vat[8])
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
jmesteve/openerpseda
|
openerp/addons/base_vat/base_vat.py
|
Python
|
agpl-3.0
| 10,127
|
# -*- coding: utf-8 -*
__kupfer_name__ = _("XFCE Session Management")
__kupfer_sources__ = ("XfceItemsSource", )
__description__ = _("Special items and actions for XFCE environment")
__version__ = "2012-09-17"
__author__ = "Karol Będkowski <karol.bedkowski@gmail.com>"
from kupfer.plugin import session_support as support
# sequences of argument lists
LOGOUT_CMD = (["xfce4-session-logout", "--logout"],)
SHUTDOWN_CMD = (["xfce4-session-logout"],)
LOCKSCREEN_CMD = (["xdg-screensaver", "lock"], )
class XfceItemsSource (support.CommonSource):
def __init__(self):
support.CommonSource.__init__(self, _("XFCE Session Management"))
def get_items(self):
return (
support.LogoutBrowse(LOGOUT_CMD),
support.LockScreen(LOCKSCREEN_CMD),
support.ShutdownBrowse(SHUTDOWN_CMD),
)
|
KarolBedkowski/kupfer-adds
|
kupfer/plugin/session_xfce.py
|
Python
|
gpl-3.0
| 793
|
from flask import Flask, render_template, session, request, redirect
import random
app = Flask(__name__)
app.secret_key = 'my_secret_key'
@app.route('/')
def index():
if not 'gold' in session:
session['gold'] = 0
if not 'activities' in session:
session['activities'] = []
return render_template('index.html')
@app.route('/process', methods = ['POST'])
def process():
buildings = {
'farm':random.randint(5,10),
'casino':random.randint(-50,50),
'cave':random.randint(0,30),
'house':random.randint(0,5)
}
if request.form['building'] in buildings:
""" OMG What???"""
result = buildings[request.form['building']]
session['gold'] = session['gold']+result
result_dictionary = {
'class': ('red','green')[result > 0],
'activity': "You went to the {} and {} {} gold!".format(request.form['building'], ('lost','gained')[result > 0], result)
}
session['activities'].append(result_dictionary)
return redirect('/')
if __name__ == '__main__':
app.run(debug = True)
"""
Explain line 24 - 31. Will it work? How, where what? why!?
#25 tells you how many golds you have based
on which building is visited. The activity is passed to an empty array
that was created for activities A dictonary is created of concatenated 'activity'
string describing the actions.
"""
|
authman/Python201609
|
Jessie Smith/assignments/Flask Olympics/olympics8/server.py
|
Python
|
mit
| 1,465
|
"""Naming conventions for abd."""
# =============================================================================
# CONTENTS
# -----------------------------------------------------------------------------
# abdt_naming
#
# Public Classes:
# Error
# TrackerBranch
# .branch
# .status
# .description
# .base
# .id
# .remote
# .remote_base
# .remote_branch
# .review_name
# .update_status
# ReviewBranch
# .branch
# .description
# .base
# .remote
# .remote_base
# .remote_branch
# .make_tracker
#
# Public Functions:
# isStatusBad
# isStatusBadPreReview
# isStatusBadLand
# get_branch_pairs
#
# Public Assignments:
# WB_STATUS_OK
# WB_STATUS_PREFIX_BAD
# WB_STATUS_BAD_NAME
# WB_STATUS_BAD_PREREVIEW
# WB_STATUS_BAD_INREVIEW
# WB_STATUS_BAD_LAND
# WB_STATUS_BAD_ABANDONED
# WB_DICT_STATUS_DESC
# EXAMPLE_REVIEW_BRANCH_BASE
# EXAMPLE_REVIEW_BRANCH_DESCRIPTION
# ARCYD_BRANCH_NAMESPACE
# TRACKING_BRANCH_PREFIX
# RESERVED_BRANCH_NAME
# BranchPair
#
# -----------------------------------------------------------------------------
# (this contents block is generated, edits will be lost)
# =============================================================================
from __future__ import absolute_import
import collections
import phlgitu_ref
WB_STATUS_OK = "ok"
WB_STATUS_PREFIX_BAD = "bad_"
WB_STATUS_BAD_NAME = WB_STATUS_PREFIX_BAD + "name"
WB_STATUS_BAD_PREREVIEW = WB_STATUS_PREFIX_BAD + "prerev"
WB_STATUS_BAD_INREVIEW = WB_STATUS_PREFIX_BAD + "inrev"
WB_STATUS_BAD_LAND = WB_STATUS_PREFIX_BAD + "land"
WB_STATUS_BAD_ABANDONED = WB_STATUS_PREFIX_BAD + "abandoned"
WB_DICT_STATUS_DESC = {
WB_STATUS_OK: "ok",
WB_STATUS_BAD_NAME: "branch name is invalid",
WB_STATUS_BAD_PREREVIEW: "didn't manage to create a review",
WB_STATUS_BAD_INREVIEW: "bad update during review",
WB_STATUS_BAD_LAND: "didn't manage to land the change",
WB_STATUS_BAD_ABANDONED: "the review is abandoned",
}
EXAMPLE_REVIEW_BRANCH_BASE = "master"
EXAMPLE_REVIEW_BRANCH_DESCRIPTION = "mywork"
ARCYD_BRANCH_NAMESPACE = 'dev/arcyd/'
TRACKING_BRANCH_PREFIX = ARCYD_BRANCH_NAMESPACE + 'trackers/'
RESERVED_BRANCH_NAME = ARCYD_BRANCH_NAMESPACE + 'reserve'
class Error(Exception):
pass
def isStatusBad(working_branch):
"""Return True if the status of 'working_branch' is bad.
:working_branch: a WorkingBranch
:returns: True if the branch is bad
"""
return working_branch.status.startswith(WB_STATUS_PREFIX_BAD)
def isStatusBadPreReview(working_branch):
"""Return True if 'working_branch' status is WB_STATUS_BAD_PREREVIEW.
:working_branch: a WorkingBranch
:returns: True if the branch is in WB_STATUS_BAD_PREREVIEW
"""
return working_branch.status == WB_STATUS_BAD_PREREVIEW
def isStatusBadLand(working_branch):
"""Return True if 'working_branch' status is WB_STATUS_BAD_PREREVIEW.
:working_branch: a WorkingBranch
:returns: True if the branch is in WB_STATUS_BAD_PREREVIEW
"""
return working_branch.status == WB_STATUS_BAD_LAND
class TrackerBranch(object):
def __init__(
self,
naming,
branch,
review_branch,
status,
description,
base,
rev_id,
remote):
super(TrackerBranch, self).__init__()
self._naming = naming
self._branch = branch
self._status = status
self._description = description
self._base = base
self._id = rev_id
self._remote = remote
self._remote_base = None
self._remote_branch = None
self._review_name = review_branch
self._update_remotes()
@property
def branch(self):
return self._branch
@property
def status(self):
return self._status
@property
def description(self):
return self._description
@property
def base(self):
return self._base
@property
def id(self):
return self._id
@property
def remote(self):
return self._remote
@property
def remote_base(self):
return self._remote_base
@property
def remote_branch(self):
return self._remote_branch
@property
def review_name(self):
return self._review_name
def update_status(self, status):
self._status = status
self._branch = self._naming.make_tracker_branch_name(
self._status, self._description, self._base, self._id)
self._update_remotes()
def _update_remotes(self):
self._remote_base = phlgitu_ref.make_remote(
self._base, self._remote)
self._remote_branch = phlgitu_ref.make_remote(
self._branch, self._remote)
def __str__(self):
return 'abdt_naming.TrackerBranch("{}")'.format(self.branch)
__repr__ = __str__
class ReviewBranch(object):
def __init__(
self,
naming,
branch,
description,
base,
remote):
super(ReviewBranch, self).__init__()
self._naming = naming
self._branch = branch
self._description = description
self._base = base
self._remote = remote
self._remote_base = None
self._remote_branch = None
self._update_remotes()
@property
def branch(self):
return self._branch
@property
def description(self):
return self._description
@property
def base(self):
return self._base
@property
def remote(self):
return self._remote
@property
def remote_base(self):
return self._remote_base
@property
def remote_branch(self):
return self._remote_branch
def make_tracker(self, status, rev_id):
"""Return a TrackerBranch based on this branch and supplied params.
:status: the status string for the new branch
:rev_id: the revision id string for the new branch
:returns: a TrackerBranch
"""
if rev_id is None:
rev_id = "none"
else:
rev_id = str(rev_id)
tracking_branch_name = self._naming.make_tracker_branch_name(
status, self.description, self.base, rev_id)
tracking_branch = self._naming.make_tracker_branch_from_name(
tracking_branch_name)
return tracking_branch
def _update_remotes(self):
self._remote_base = phlgitu_ref.make_remote(
self._base, self._remote)
self._remote_branch = phlgitu_ref.make_remote(
self._branch, self._remote)
def __str__(self):
return 'abdt_naming.ReviewBranch("{}")'.format(self.branch)
__repr__ = __str__
BranchPair = collections.namedtuple(
"abdt_naming__BranchPair", [
"review",
"tracker"])
def _get_branches(branch_list, func):
"""Return a list of branches made by func() from strings in 'branch_list'.
Strings that aren't valid working branch names are ignored, 'func' is
expected to raise Error in this case.
:branch_list: list of branch name strings
:func: the branch factory funtion to use
:returns: list of WorkingBranch
"""
converted_branch_list = []
for branch in branch_list:
try:
converted_branch_list.append(
func(branch))
except Error:
pass # ignore naming errors, we only want the valid branches
return converted_branch_list
def get_branch_pairs(branch_list, naming):
"""Return a list of BranchPair where items in 'branch_list' are suitable.
Note that if a review_branch or tracker_branch does not have a pair then
the other member of the tuple is set to 'None'.
:branch_list: a list of branch name strings to generate the pairs from
:returns: a list of BranchPair where items in 'branch_list' are suitable
"""
tracker_branches = _get_branches(
branch_list, naming.make_tracker_branch_from_name)
review_branches = _get_branches(
branch_list, naming.make_review_branch_from_name)
# XXX: pychecker and pyflakes don't understand dictcomps yet so do it like
# this instead
name_to_tracked = dict([(b.review_name, b) for b in tracker_branches])
name_to_review = dict([(b.branch, b) for b in review_branches])
tracked = set(name_to_tracked.keys())
actual = set(name_to_review.keys())
abandoned_trackers = [name_to_tracked[b] for b in tracked - actual]
new_reviews = [name_to_review[b] for b in actual - tracked]
matched = actual & tracked
res = [BranchPair(name_to_review[b], name_to_tracked[b]) for b in matched]
res += [BranchPair(None, b) for b in abandoned_trackers]
res += [BranchPair(b, None) for b in new_reviews]
return res
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2014 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
|
valhallasw/phabricator-tools
|
py/abd/abdt_naming.py
|
Python
|
apache-2.0
| 9,640
|
# -*- coding: utf-8 -*-
"""Tests for notify plugins package."""
|
infothrill/python-dyndnsc
|
dyndnsc/tests/plugins/notify/__init__.py
|
Python
|
mit
| 65
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from tests.common.impala_test_suite import ImpalaTestSuite
from tests.common.test_dimensions import create_exec_option_dimension
from tests.common.test_dimensions import create_uncompressed_text_dimension
from tests.common.test_vector import ImpalaTestDimension
from tests.util.test_file_parser import QueryTestSectionReader
class TestExprs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestExprs, cls).add_test_dimensions()
# Test with and without expr rewrites to cover regular expr evaluations
# as well as constant folding, in particular, timestamp literals.
cls.ImpalaTestMatrix.add_dimension(
ImpalaTestDimension('enable_expr_rewrites', *[0,1]))
if cls.exploration_strategy() == 'core':
# Test with file format that supports codegen
cls.ImpalaTestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
def test_exprs(self, vector):
vector.get_value('exec_option')['enable_expr_rewrites'] = \
vector.get_value('enable_expr_rewrites')
# TODO: Enable some of these tests for Avro if possible
# Don't attempt to evaluate timestamp expressions with Avro tables (which don't
# support a timestamp type)"
table_format = vector.get_value('table_format')
if table_format.file_format == 'avro':
pytest.skip()
if table_format.file_format == 'hbase':
pytest.xfail("A lot of queries check for NULLs, which hbase does not recognize")
if table_format.file_format == 'kudu':
# Can't load LikeTbl without KUDU-1570.
pytest.xfail("Need support for Kudu tables with nullable PKs (KUDU-1570)")
self.run_test_case('QueryTest/exprs', vector)
# This will change the current database to matching table format and then execute
# select current_database(). An error will be thrown if multiple values are returned.
current_db = self.execute_scalar('select current_database()', vector=vector)
assert current_db == QueryTestSectionReader.get_db_name(table_format)
# Tests very deep expression trees and expressions with many children. Impala defines
# a 'safe' upper bound on the expr depth and the number of expr children in the
# FE Expr.java and any changes to those limits should be reflected in this test.
# The expr limits primarily guard against stack overflows or similar problems
# causing crashes. Therefore, this tests succeeds if no Impalads crash.
class TestExprLimits(ImpalaTestSuite):
# Keep these in sync with Expr.java
EXPR_CHILDREN_LIMIT = 10000
EXPR_DEPTH_LIMIT = 1000
@classmethod
def get_workload(self):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestExprLimits, cls).add_test_dimensions()
if cls.exploration_strategy() != 'exhaustive':
# Ensure the test runs with codegen enabled and disabled, even when the
# exploration strategy is not exhaustive.
cls.ImpalaTestMatrix.clear_dimension('exec_option')
cls.ImpalaTestMatrix.add_dimension(create_exec_option_dimension(
cluster_sizes=[0], disable_codegen_options=[False, True], batch_sizes=[0]))
# There is no reason to run these tests using all dimensions.
cls.ImpalaTestMatrix.add_dimension(
create_uncompressed_text_dimension(cls.get_workload()))
def test_expr_child_limit(self, vector):
# IN predicate
in_query = "select 1 IN("
for i in xrange(0, self.EXPR_CHILDREN_LIMIT - 1):
in_query += str(i)
if (i + 1 != self.EXPR_CHILDREN_LIMIT - 1):
in_query += ","
in_query += ")"
self.__exec_query(in_query)
# CASE expr
case_query = "select case "
for i in xrange(0, self.EXPR_CHILDREN_LIMIT/2):
case_query += " when true then 1"
case_query += " end"
self.__exec_query(case_query)
def test_expr_depth_limit(self, vector):
# Compound predicates
and_query = "select " + self.__gen_deep_infix_expr("true", " and false")
self.__exec_query(and_query)
or_query = "select " + self.__gen_deep_infix_expr("true", " or false")
self.__exec_query(or_query)
# Arithmetic expr
arith_query = "select " + self.__gen_deep_infix_expr("1", " + 1")
self.__exec_query(arith_query)
func_query = "select " + self.__gen_deep_func_expr("lower(", "'abc'", ")")
self.__exec_query(func_query)
# Casts.
cast_query = "select " + self.__gen_deep_func_expr("cast(", "1", " as int)")
self.__exec_query(cast_query)
def __gen_deep_infix_expr(self, prefix, repeat_suffix):
expr = prefix
for i in xrange(self.EXPR_DEPTH_LIMIT - 1):
expr += repeat_suffix
return expr
def __gen_deep_func_expr(self, open_func, base_arg, close_func):
expr = ""
for i in xrange(self.EXPR_DEPTH_LIMIT - 1):
expr += open_func
expr += base_arg
for i in xrange(self.EXPR_DEPTH_LIMIT - 1):
expr += close_func
return expr
def __exec_query(self, sql_str):
try:
impala_ret = self.execute_query(sql_str)
assert impala_ret.success, "Failed to execute query %s" % (sql_str)
except: # consider any exception a failure
assert False, "Failed to execute query %s" % (sql_str)
|
michaelhkw/incubator-impala
|
tests/query_test/test_exprs.py
|
Python
|
apache-2.0
| 6,107
|
# -*- coding: utf-8 -*-
from __future__ import division
from os import walk
from os.path import abspath, join, getsize
from hashlib import md5 as hashmd5
from sys import version_info
from zlib import crc32
class AnalysingFiles:
def __init__(self, md5=False):
"""
Initialize this instance before proceeding to the analyzes.
:param md5: Hashing using the MD5 hash digest (`True`) or by using the CRC32.
:type md5 bool:
:return:
"""
self.stats = dict(directories=0, files=0, size=0)
if md5:
self.get_hash_file = self.get_md5_hash_file
else:
self.get_hash_file = self.get_crc32_hash_file
self.files = dict()
def browse(self, path):
"""
Browses the given directory and counts the number of directories and by the same way.
:param path:
:type path str:
:return:
"""
for root, dirs, files in walk(path):
self.stats['directories'] += len(dirs)
self.stats['files'] += len(files)
for name in files:
try:
file_path = abspath(join(root, name))
file_size = getsize(file_path)
# if not already contain the size as key, we create a new list into containing the path
# else we append the path
self.files[file_size] = [file_path] if not self.files.get(file_size) else \
self.files[file_size] + [file_path]
# we return the file analysed and it size if the application want to know what is going on
yield True, {'path': path, 'size': file_size}
self.stats['size'] += file_size # we add the new size into the total size of the statistics
except OSError as e:
yield False, e # there is an error! We return `False` and the error everything in a tuple
def analysing(self, progress_bar_obj):
"""
Sorts the files by size and regroups into a dictionary using the size in bytes as key and a list inside
containing the paths to these files before calculating the hashes and comparing each others.
And, finally, returns the hashes as key with inside, the paths of the duplicates.
:param progress_bar_obj: An instance of the package `progressbar`'s object `ProgressBar`.
:return:
"""
possible_duplicates = list()
hashes = dict()
# sorts files by size
files_appended = 0
# removing alone paths
for size, paths in self.files.items():
length = len(paths)
if length > 1:
possible_duplicates += paths # adding the paths with the same size into the `possible_duplicates` list
files_appended += length
del self.files # liberating unnecessary memory used before hashing
files_number = 0
# getting sizes and hashing the files with the same size
for path in possible_duplicates:
hash_ = self.get_hash_file(path)
if hash_: # if the hashing as not occurred an error
# if the hash calculated not already exists we create a new list containing the path
# with the hash as key
# else, we add the file into the list of the key (hash)
hashes[hash_] = [path] if not hashes.get(hash_) else hashes[hash_] + [path]
files_number += 1
percentage = ((files_number / files_appended) * 100)
progress_bar_obj.update(percentage if percentage <= 100 else 100)
del possible_duplicates
if version_info.major > 2:
hashes_ = hashes.copy()
for h in hashes.keys():
if len(hashes[h]) < 2:
del hashes_[h]
else:
for h in hashes.keys():
if len(hashes[h]) < 2:
del hashes[h]
return hashes
@staticmethod
def get_md5_hash_file(path):
"""
Returns the MD5 hash digest of a file.
:param path:
:type path str:
:return:
"""
md5 = hashmd5()
try:
with open(path, 'rb') as fd:
while 1:
data = fd.read(2**9)
if not data:
break
md5.update(data)
except IOError:
return False
return md5.hexdigest()
@staticmethod
def get_crc32_hash_file(path):
"""
Returns the CRC32 hash digest of a file.
:param path:
:type path str:
:return:
"""
try:
with open(path, 'rb') as fd:
r = 0
while 1:
data = fd.read(2**9)
r = crc32(data, r)
if not data:
return "%8X" % (r & 0xFFFFFFFF)
except IOError:
return False
|
NyanKiyoshi/Py-Search-Duplicates
|
AnalysingFiles.py
|
Python
|
mit
| 5,059
|
# -*- test-case-name: twisted.web.test.test_xml -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Micro Document Object Model: a partial DOM implementation with SUX.
This is an implementation of what we consider to be the useful subset of the
DOM. The chief advantage of this library is that, not being burdened with
standards compliance, it can remain very stable between versions. We can also
implement utility 'pythonic' ways to access and mutate the XML tree.
Since this has not subjected to a serious trial by fire, it is not recommended
to use this outside of Twisted applications. However, it seems to work just
fine for the documentation generator, which parses a fairly representative
sample of XML.
Microdom mainly focuses on working with HTML and XHTML.
"""
# System Imports
import re
from cStringIO import StringIO
from types import StringTypes, UnicodeType
# Twisted Imports
from twisted.web.sux import XMLParser, ParseError
from twisted.python.util import InsensitiveDict
def getElementsByTagName(iNode, name):
"""
Return a list of all child elements of C{iNode} with a name matching
C{name}.
Note that this implementation does not conform to the DOM Level 1 Core
specification because it may return C{iNode}.
@param iNode: An element at which to begin searching. If C{iNode} has a
name matching C{name}, it will be included in the result.
@param name: A C{str} giving the name of the elements to return.
@return: A C{list} of direct or indirect child elements of C{iNode} with
the name C{name}. This may include C{iNode}.
"""
matches = []
matches_append = matches.append # faster lookup. don't do this at home
slice = [iNode]
while len(slice)>0:
c = slice.pop(0)
if c.nodeName == name:
matches_append(c)
slice[:0] = c.childNodes
return matches
def getElementsByTagNameNoCase(iNode, name):
name = name.lower()
matches = []
matches_append = matches.append
slice=[iNode]
while len(slice)>0:
c = slice.pop(0)
if c.nodeName.lower() == name:
matches_append(c)
slice[:0] = c.childNodes
return matches
# order is important
HTML_ESCAPE_CHARS = (('&', '&'), # don't add any entities before this one
('<', '<'),
('>', '>'),
('"', '"'))
REV_HTML_ESCAPE_CHARS = list(HTML_ESCAPE_CHARS)
REV_HTML_ESCAPE_CHARS.reverse()
XML_ESCAPE_CHARS = HTML_ESCAPE_CHARS + (("'", '''),)
REV_XML_ESCAPE_CHARS = list(XML_ESCAPE_CHARS)
REV_XML_ESCAPE_CHARS.reverse()
def unescape(text, chars=REV_HTML_ESCAPE_CHARS):
"Perform the exact opposite of 'escape'."
for s, h in chars:
text = text.replace(h, s)
return text
def escape(text, chars=HTML_ESCAPE_CHARS):
"Escape a few XML special chars with XML entities."
for s, h in chars:
text = text.replace(s, h)
return text
class MismatchedTags(Exception):
def __init__(self, filename, expect, got, endLine, endCol, begLine, begCol):
(self.filename, self.expect, self.got, self.begLine, self.begCol, self.endLine,
self.endCol) = filename, expect, got, begLine, begCol, endLine, endCol
def __str__(self):
return ("expected </%s>, got </%s> line: %s col: %s, began line: %s col: %s"
% (self.expect, self.got, self.endLine, self.endCol, self.begLine,
self.begCol))
class Node(object):
nodeName = "Node"
def __init__(self, parentNode=None):
self.parentNode = parentNode
self.childNodes = []
def isEqualToNode(self, other):
"""
Compare this node to C{other}. If the nodes have the same number of
children and corresponding children are equal to each other, return
C{True}, otherwise return C{False}.
@type other: L{Node}
@rtype: C{bool}
"""
if len(self.childNodes) != len(other.childNodes):
return False
for a, b in zip(self.childNodes, other.childNodes):
if not a.isEqualToNode(b):
return False
return True
def writexml(self, stream, indent='', addindent='', newl='', strip=0,
nsprefixes={}, namespace=''):
raise NotImplementedError()
def toxml(self, indent='', addindent='', newl='', strip=0, nsprefixes={},
namespace=''):
s = StringIO()
self.writexml(s, indent, addindent, newl, strip, nsprefixes, namespace)
rv = s.getvalue()
return rv
def writeprettyxml(self, stream, indent='', addindent=' ', newl='\n', strip=0):
return self.writexml(stream, indent, addindent, newl, strip)
def toprettyxml(self, indent='', addindent=' ', newl='\n', strip=0):
return self.toxml(indent, addindent, newl, strip)
def cloneNode(self, deep=0, parent=None):
raise NotImplementedError()
def hasChildNodes(self):
if self.childNodes:
return 1
else:
return 0
def appendChild(self, child):
"""
Make the given L{Node} the last child of this node.
@param child: The L{Node} which will become a child of this node.
@raise TypeError: If C{child} is not a C{Node} instance.
"""
if not isinstance(child, Node):
raise TypeError("expected Node instance")
self.childNodes.append(child)
child.parentNode = self
def insertBefore(self, new, ref):
"""
Make the given L{Node} C{new} a child of this node which comes before
the L{Node} C{ref}.
@param new: A L{Node} which will become a child of this node.
@param ref: A L{Node} which is already a child of this node which
C{new} will be inserted before.
@raise TypeError: If C{new} or C{ref} is not a C{Node} instance.
@return: C{new}
"""
if not isinstance(new, Node) or not isinstance(ref, Node):
raise TypeError("expected Node instance")
i = self.childNodes.index(ref)
new.parentNode = self
self.childNodes.insert(i, new)
return new
def removeChild(self, child):
"""
Remove the given L{Node} from this node's children.
@param child: A L{Node} which is a child of this node which will no
longer be a child of this node after this method is called.
@raise TypeError: If C{child} is not a C{Node} instance.
@return: C{child}
"""
if not isinstance(child, Node):
raise TypeError("expected Node instance")
if child in self.childNodes:
self.childNodes.remove(child)
child.parentNode = None
return child
def replaceChild(self, newChild, oldChild):
"""
Replace a L{Node} which is already a child of this node with a
different node.
@param newChild: A L{Node} which will be made a child of this node.
@param oldChild: A L{Node} which is a child of this node which will
give up its position to C{newChild}.
@raise TypeError: If C{newChild} or C{oldChild} is not a C{Node}
instance.
@raise ValueError: If C{oldChild} is not a child of this C{Node}.
"""
if not isinstance(newChild, Node) or not isinstance(oldChild, Node):
raise TypeError("expected Node instance")
if oldChild.parentNode is not self:
raise ValueError("oldChild is not a child of this node")
self.childNodes[self.childNodes.index(oldChild)] = newChild
oldChild.parentNode = None
newChild.parentNode = self
def lastChild(self):
return self.childNodes[-1]
def firstChild(self):
if len(self.childNodes):
return self.childNodes[0]
return None
#def get_ownerDocument(self):
# """This doesn't really get the owner document; microdom nodes
# don't even have one necessarily. This gets the root node,
# which is usually what you really meant.
# *NOT DOM COMPLIANT.*
# """
# node=self
# while (node.parentNode): node=node.parentNode
# return node
#ownerDocument=node.get_ownerDocument()
# leaving commented for discussion; see also domhelpers.getParents(node)
class Document(Node):
def __init__(self, documentElement=None):
Node.__init__(self)
if documentElement:
self.appendChild(documentElement)
def cloneNode(self, deep=0, parent=None):
d = Document()
d.doctype = self.doctype
if deep:
newEl = self.documentElement.cloneNode(1, self)
else:
newEl = self.documentElement
d.appendChild(newEl)
return d
doctype = None
def isEqualToDocument(self, n):
return (self.doctype == n.doctype) and Node.isEqualToNode(self, n)
isEqualToNode = isEqualToDocument
def get_documentElement(self):
return self.childNodes[0]
documentElement=property(get_documentElement)
def appendChild(self, child):
"""
Make the given L{Node} the I{document element} of this L{Document}.
@param child: The L{Node} to make into this L{Document}'s document
element.
@raise ValueError: If this document already has a document element.
"""
if self.childNodes:
raise ValueError("Only one element per document.")
Node.appendChild(self, child)
def writexml(self, stream, indent='', addindent='', newl='', strip=0,
nsprefixes={}, namespace=''):
stream.write('<?xml version="1.0"?>' + newl)
if self.doctype:
stream.write("<!DOCTYPE "+self.doctype+">" + newl)
self.documentElement.writexml(stream, indent, addindent, newl, strip,
nsprefixes, namespace)
# of dubious utility (?)
def createElement(self, name, **kw):
return Element(name, **kw)
def createTextNode(self, text):
return Text(text)
def createComment(self, text):
return Comment(text)
def getElementsByTagName(self, name):
if self.documentElement.caseInsensitive:
return getElementsByTagNameNoCase(self, name)
return getElementsByTagName(self, name)
def getElementById(self, id):
childNodes = self.childNodes[:]
while childNodes:
node = childNodes.pop(0)
if node.childNodes:
childNodes.extend(node.childNodes)
if hasattr(node, 'getAttribute') and node.getAttribute("id") == id:
return node
class EntityReference(Node):
def __init__(self, eref, parentNode=None):
Node.__init__(self, parentNode)
self.eref = eref
self.nodeValue = self.data = "&" + eref + ";"
def isEqualToEntityReference(self, n):
if not isinstance(n, EntityReference):
return 0
return (self.eref == n.eref) and (self.nodeValue == n.nodeValue)
isEqualToNode = isEqualToEntityReference
def writexml(self, stream, indent='', addindent='', newl='', strip=0,
nsprefixes={}, namespace=''):
stream.write(self.nodeValue)
def cloneNode(self, deep=0, parent=None):
return EntityReference(self.eref, parent)
class CharacterData(Node):
def __init__(self, data, parentNode=None):
Node.__init__(self, parentNode)
self.value = self.data = self.nodeValue = data
def isEqualToCharacterData(self, n):
return self.value == n.value
isEqualToNode = isEqualToCharacterData
class Comment(CharacterData):
"""A comment node."""
def writexml(self, stream, indent='', addindent='', newl='', strip=0,
nsprefixes={}, namespace=''):
val=self.data
if isinstance(val, UnicodeType):
val=val.encode('utf8')
stream.write("<!--%s-->" % val)
def cloneNode(self, deep=0, parent=None):
return Comment(self.nodeValue, parent)
class Text(CharacterData):
def __init__(self, data, parentNode=None, raw=0):
CharacterData.__init__(self, data, parentNode)
self.raw = raw
def isEqualToNode(self, other):
"""
Compare this text to C{text}. If the underlying values and the C{raw}
flag are the same, return C{True}, otherwise return C{False}.
"""
return (
CharacterData.isEqualToNode(self, other) and
self.raw == other.raw)
def cloneNode(self, deep=0, parent=None):
return Text(self.nodeValue, parent, self.raw)
def writexml(self, stream, indent='', addindent='', newl='', strip=0,
nsprefixes={}, namespace=''):
if self.raw:
val = self.nodeValue
if not isinstance(val, StringTypes):
val = str(self.nodeValue)
else:
v = self.nodeValue
if not isinstance(v, StringTypes):
v = str(v)
if strip:
v = ' '.join(v.split())
val = escape(v)
if isinstance(val, UnicodeType):
val = val.encode('utf8')
stream.write(val)
def __repr__(self):
return "Text(%s" % repr(self.nodeValue) + ')'
class CDATASection(CharacterData):
def cloneNode(self, deep=0, parent=None):
return CDATASection(self.nodeValue, parent)
def writexml(self, stream, indent='', addindent='', newl='', strip=0,
nsprefixes={}, namespace=''):
stream.write("<![CDATA[")
stream.write(self.nodeValue)
stream.write("]]>")
def _genprefix():
i = 0
while True:
yield 'p' + str(i)
i = i + 1
genprefix = _genprefix().next
class _Attr(CharacterData):
"Support class for getAttributeNode."
class Element(Node):
preserveCase = 0
caseInsensitive = 1
nsprefixes = None
def __init__(self, tagName, attributes=None, parentNode=None,
filename=None, markpos=None,
caseInsensitive=1, preserveCase=0,
namespace=None):
Node.__init__(self, parentNode)
self.preserveCase = preserveCase or not caseInsensitive
self.caseInsensitive = caseInsensitive
if not preserveCase:
tagName = tagName.lower()
if attributes is None:
self.attributes = {}
else:
self.attributes = attributes
for k, v in self.attributes.items():
self.attributes[k] = unescape(v)
if caseInsensitive:
self.attributes = InsensitiveDict(self.attributes,
preserve=preserveCase)
self.endTagName = self.nodeName = self.tagName = tagName
self._filename = filename
self._markpos = markpos
self.namespace = namespace
def addPrefixes(self, pfxs):
if self.nsprefixes is None:
self.nsprefixes = pfxs
else:
self.nsprefixes.update(pfxs)
def endTag(self, endTagName):
if not self.preserveCase:
endTagName = endTagName.lower()
self.endTagName = endTagName
def isEqualToElement(self, n):
if self.caseInsensitive:
return ((self.attributes == n.attributes)
and (self.nodeName.lower() == n.nodeName.lower()))
return (self.attributes == n.attributes) and (self.nodeName == n.nodeName)
def isEqualToNode(self, other):
"""
Compare this element to C{other}. If the C{nodeName}, C{namespace},
C{attributes}, and C{childNodes} are all the same, return C{True},
otherwise return C{False}.
"""
return (
self.nodeName.lower() == other.nodeName.lower() and
self.namespace == other.namespace and
self.attributes == other.attributes and
Node.isEqualToNode(self, other))
def cloneNode(self, deep=0, parent=None):
clone = Element(
self.tagName, parentNode=parent, namespace=self.namespace,
preserveCase=self.preserveCase, caseInsensitive=self.caseInsensitive)
clone.attributes.update(self.attributes)
if deep:
clone.childNodes = [child.cloneNode(1, clone) for child in self.childNodes]
else:
clone.childNodes = []
return clone
def getElementsByTagName(self, name):
if self.caseInsensitive:
return getElementsByTagNameNoCase(self, name)
return getElementsByTagName(self, name)
def hasAttributes(self):
return 1
def getAttribute(self, name, default=None):
return self.attributes.get(name, default)
def getAttributeNS(self, ns, name, default=None):
nsk = (ns, name)
if self.attributes.has_key(nsk):
return self.attributes[nsk]
if ns == self.namespace:
return self.attributes.get(name, default)
return default
def getAttributeNode(self, name):
return _Attr(self.getAttribute(name), self)
def setAttribute(self, name, attr):
self.attributes[name] = attr
def removeAttribute(self, name):
if name in self.attributes:
del self.attributes[name]
def hasAttribute(self, name):
return name in self.attributes
def writexml(self, stream, indent='', addindent='', newl='', strip=0,
nsprefixes={}, namespace=''):
"""
Serialize this L{Element} to the given stream.
@param stream: A file-like object to which this L{Element} will be
written.
@param nsprefixes: A C{dict} mapping namespace URIs as C{str} to
prefixes as C{str}. This defines the prefixes which are already in
scope in the document at the point at which this L{Element} exists.
This is essentially an implementation detail for namespace support.
Applications should not try to use it.
@param namespace: The namespace URI as a C{str} which is the default at
the point in the document at which this L{Element} exists. This is
essentially an implementation detail for namespace support.
Applications should not try to use it.
"""
# write beginning
ALLOWSINGLETON = ('img', 'br', 'hr', 'base', 'meta', 'link', 'param',
'area', 'input', 'col', 'basefont', 'isindex',
'frame')
BLOCKELEMENTS = ('html', 'head', 'body', 'noscript', 'ins', 'del',
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'script',
'ul', 'ol', 'dl', 'pre', 'hr', 'blockquote',
'address', 'p', 'div', 'fieldset', 'table', 'tr',
'form', 'object', 'fieldset', 'applet', 'map')
FORMATNICELY = ('tr', 'ul', 'ol', 'head')
# this should never be necessary unless people start
# changing .tagName on the fly(?)
if not self.preserveCase:
self.endTagName = self.tagName
w = stream.write
if self.nsprefixes:
newprefixes = self.nsprefixes.copy()
for ns in nsprefixes.keys():
if ns in newprefixes:
del newprefixes[ns]
else:
newprefixes = {}
begin = ['<']
if self.tagName in BLOCKELEMENTS:
begin = [newl, indent] + begin
bext = begin.extend
writeattr = lambda _atr, _val: bext((' ', _atr, '="', escape(_val), '"'))
# Make a local for tracking what end tag will be used. If namespace
# prefixes are involved, this will be changed to account for that
# before it's actually used.
endTagName = self.endTagName
if namespace != self.namespace and self.namespace is not None:
# If the current default namespace is not the namespace of this tag
# (and this tag has a namespace at all) then we'll write out
# something related to namespaces.
if self.namespace in nsprefixes:
# This tag's namespace already has a prefix bound to it. Use
# that prefix.
prefix = nsprefixes[self.namespace]
bext(prefix + ':' + self.tagName)
# Also make sure we use it for the end tag.
endTagName = prefix + ':' + self.endTagName
else:
# This tag's namespace has no prefix bound to it. Change the
# default namespace to this tag's namespace so we don't need
# prefixes. Alternatively, we could add a new prefix binding.
# I'm not sure why the code was written one way rather than the
# other. -exarkun
bext(self.tagName)
writeattr("xmlns", self.namespace)
# The default namespace just changed. Make sure any children
# know about this.
namespace = self.namespace
else:
# This tag has no namespace or its namespace is already the default
# namespace. Nothing extra to do here.
bext(self.tagName)
j = ''.join
for attr, val in self.attributes.iteritems():
if isinstance(attr, tuple):
ns, key = attr
if nsprefixes.has_key(ns):
prefix = nsprefixes[ns]
else:
prefix = genprefix()
newprefixes[ns] = prefix
assert val is not None
writeattr(prefix+':'+key,val)
else:
assert val is not None
writeattr(attr, val)
if newprefixes:
for ns, prefix in newprefixes.iteritems():
if prefix:
writeattr('xmlns:'+prefix, ns)
newprefixes.update(nsprefixes)
downprefixes = newprefixes
else:
downprefixes = nsprefixes
w(j(begin))
if self.childNodes:
w(">")
newindent = indent + addindent
for child in self.childNodes:
if self.tagName in BLOCKELEMENTS and \
self.tagName in FORMATNICELY:
w(j((newl, newindent)))
child.writexml(stream, newindent, addindent, newl, strip,
downprefixes, namespace)
if self.tagName in BLOCKELEMENTS:
w(j((newl, indent)))
w(j(('</', endTagName, '>')))
elif self.tagName.lower() not in ALLOWSINGLETON:
w(j(('></', endTagName, '>')))
else:
w(" />")
def __repr__(self):
rep = "Element(%s" % repr(self.nodeName)
if self.attributes:
rep += ", attributes=%r" % (self.attributes,)
if self._filename:
rep += ", filename=%r" % (self._filename,)
if self._markpos:
rep += ", markpos=%r" % (self._markpos,)
return rep + ')'
def __str__(self):
rep = "<" + self.nodeName
if self._filename or self._markpos:
rep += " ("
if self._filename:
rep += repr(self._filename)
if self._markpos:
rep += " line %s column %s" % self._markpos
if self._filename or self._markpos:
rep += ")"
for item in self.attributes.items():
rep += " %s=%r" % item
if self.hasChildNodes():
rep += " >...</%s>" % self.nodeName
else:
rep += " />"
return rep
def _unescapeDict(d):
dd = {}
for k, v in d.items():
dd[k] = unescape(v)
return dd
def _reverseDict(d):
dd = {}
for k, v in d.items():
dd[v]=k
return dd
class MicroDOMParser(XMLParser):
# <dash> glyph: a quick scan thru the DTD says BODY, AREA, LINK, IMG, HR,
# P, DT, DD, LI, INPUT, OPTION, THEAD, TFOOT, TBODY, COLGROUP, COL, TR, TH,
# TD, HEAD, BASE, META, HTML all have optional closing tags
soonClosers = 'area link br img hr input base meta'.split()
laterClosers = {'p': ['p', 'dt'],
'dt': ['dt','dd'],
'dd': ['dt', 'dd'],
'li': ['li'],
'tbody': ['thead', 'tfoot', 'tbody'],
'thead': ['thead', 'tfoot', 'tbody'],
'tfoot': ['thead', 'tfoot', 'tbody'],
'colgroup': ['colgroup'],
'col': ['col'],
'tr': ['tr'],
'td': ['td'],
'th': ['th'],
'head': ['body'],
'title': ['head', 'body'], # this looks wrong...
'option': ['option'],
}
def __init__(self, beExtremelyLenient=0, caseInsensitive=1, preserveCase=0,
soonClosers=soonClosers, laterClosers=laterClosers):
self.elementstack = []
d = {'xmlns': 'xmlns', '': None}
dr = _reverseDict(d)
self.nsstack = [(d,None,dr)]
self.documents = []
self._mddoctype = None
self.beExtremelyLenient = beExtremelyLenient
self.caseInsensitive = caseInsensitive
self.preserveCase = preserveCase or not caseInsensitive
self.soonClosers = soonClosers
self.laterClosers = laterClosers
# self.indentlevel = 0
def shouldPreserveSpace(self):
for edx in xrange(len(self.elementstack)):
el = self.elementstack[-edx]
if el.tagName == 'pre' or el.getAttribute("xml:space", '') == 'preserve':
return 1
return 0
def _getparent(self):
if self.elementstack:
return self.elementstack[-1]
else:
return None
COMMENT = re.compile(r"\s*/[/*]\s*")
def _fixScriptElement(self, el):
# this deals with case where there is comment or CDATA inside
# <script> tag and we want to do the right thing with it
if not self.beExtremelyLenient or not len(el.childNodes) == 1:
return
c = el.firstChild()
if isinstance(c, Text):
# deal with nasty people who do stuff like:
# <script> // <!--
# x = 1;
# // --></script>
# tidy does this, for example.
prefix = ""
oldvalue = c.value
match = self.COMMENT.match(oldvalue)
if match:
prefix = match.group()
oldvalue = oldvalue[len(prefix):]
# now see if contents are actual node and comment or CDATA
try:
e = parseString("<a>%s</a>" % oldvalue).childNodes[0]
except (ParseError, MismatchedTags):
return
if len(e.childNodes) != 1:
return
e = e.firstChild()
if isinstance(e, (CDATASection, Comment)):
el.childNodes = []
if prefix:
el.childNodes.append(Text(prefix))
el.childNodes.append(e)
def gotDoctype(self, doctype):
self._mddoctype = doctype
def gotTagStart(self, name, attributes):
# print ' '*self.indentlevel, 'start tag',name
# self.indentlevel += 1
parent = self._getparent()
if (self.beExtremelyLenient and isinstance(parent, Element)):
parentName = parent.tagName
myName = name
if self.caseInsensitive:
parentName = parentName.lower()
myName = myName.lower()
if myName in self.laterClosers.get(parentName, []):
self.gotTagEnd(parent.tagName)
parent = self._getparent()
attributes = _unescapeDict(attributes)
namespaces = self.nsstack[-1][0]
newspaces = {}
for k, v in attributes.items():
if k.startswith('xmlns'):
spacenames = k.split(':',1)
if len(spacenames) == 2:
newspaces[spacenames[1]] = v
else:
newspaces[''] = v
del attributes[k]
if newspaces:
namespaces = namespaces.copy()
namespaces.update(newspaces)
for k, v in attributes.items():
ksplit = k.split(':', 1)
if len(ksplit) == 2:
pfx, tv = ksplit
if pfx != 'xml' and pfx in namespaces:
attributes[namespaces[pfx], tv] = v
del attributes[k]
el = Element(name, attributes, parent,
self.filename, self.saveMark(),
caseInsensitive=self.caseInsensitive,
preserveCase=self.preserveCase,
namespace=namespaces.get(''))
revspaces = _reverseDict(newspaces)
el.addPrefixes(revspaces)
if newspaces:
rscopy = self.nsstack[-1][2].copy()
rscopy.update(revspaces)
self.nsstack.append((namespaces, el, rscopy))
self.elementstack.append(el)
if parent:
parent.appendChild(el)
if (self.beExtremelyLenient and el.tagName in self.soonClosers):
self.gotTagEnd(name)
def _gotStandalone(self, factory, data):
parent = self._getparent()
te = factory(data, parent)
if parent:
parent.appendChild(te)
elif self.beExtremelyLenient:
self.documents.append(te)
def gotText(self, data):
if data.strip() or self.shouldPreserveSpace():
self._gotStandalone(Text, data)
def gotComment(self, data):
self._gotStandalone(Comment, data)
def gotEntityReference(self, entityRef):
self._gotStandalone(EntityReference, entityRef)
def gotCData(self, cdata):
self._gotStandalone(CDATASection, cdata)
def gotTagEnd(self, name):
# print ' '*self.indentlevel, 'end tag',name
# self.indentlevel -= 1
if not self.elementstack:
if self.beExtremelyLenient:
return
raise MismatchedTags(*((self.filename, "NOTHING", name)
+self.saveMark()+(0,0)))
el = self.elementstack.pop()
pfxdix = self.nsstack[-1][2]
if self.nsstack[-1][1] is el:
nstuple = self.nsstack.pop()
else:
nstuple = None
if self.caseInsensitive:
tn = el.tagName.lower()
cname = name.lower()
else:
tn = el.tagName
cname = name
nsplit = name.split(':',1)
if len(nsplit) == 2:
pfx, newname = nsplit
ns = pfxdix.get(pfx,None)
if ns is not None:
if el.namespace != ns:
if not self.beExtremelyLenient:
raise MismatchedTags(*((self.filename, el.tagName, name)
+self.saveMark()+el._markpos))
if not (tn == cname):
if self.beExtremelyLenient:
if self.elementstack:
lastEl = self.elementstack[0]
for idx in xrange(len(self.elementstack)):
if self.elementstack[-(idx+1)].tagName == cname:
self.elementstack[-(idx+1)].endTag(name)
break
else:
# this was a garbage close tag; wait for a real one
self.elementstack.append(el)
if nstuple is not None:
self.nsstack.append(nstuple)
return
del self.elementstack[-(idx+1):]
if not self.elementstack:
self.documents.append(lastEl)
return
else:
raise MismatchedTags(*((self.filename, el.tagName, name)
+self.saveMark()+el._markpos))
el.endTag(name)
if not self.elementstack:
self.documents.append(el)
if self.beExtremelyLenient and el.tagName == "script":
self._fixScriptElement(el)
def connectionLost(self, reason):
XMLParser.connectionLost(self, reason) # This can cause more events!
if self.elementstack:
if self.beExtremelyLenient:
self.documents.append(self.elementstack[0])
else:
raise MismatchedTags(*((self.filename, self.elementstack[-1],
"END_OF_FILE")
+self.saveMark()
+self.elementstack[-1]._markpos))
def parse(readable, *args, **kwargs):
"""Parse HTML or XML readable."""
if not hasattr(readable, "read"):
readable = open(readable, "rb")
mdp = MicroDOMParser(*args, **kwargs)
mdp.filename = getattr(readable, "name", "<xmlfile />")
mdp.makeConnection(None)
if hasattr(readable,"getvalue"):
mdp.dataReceived(readable.getvalue())
else:
r = readable.read(1024)
while r:
mdp.dataReceived(r)
r = readable.read(1024)
mdp.connectionLost(None)
if not mdp.documents:
raise ParseError(mdp.filename, 0, 0, "No top-level Nodes in document")
if mdp.beExtremelyLenient:
if len(mdp.documents) == 1:
d = mdp.documents[0]
if not isinstance(d, Element):
el = Element("html")
el.appendChild(d)
d = el
else:
d = Element("html")
for child in mdp.documents:
d.appendChild(child)
else:
d = mdp.documents[0]
doc = Document(d)
doc.doctype = mdp._mddoctype
return doc
def parseString(st, *args, **kw):
if isinstance(st, UnicodeType):
# this isn't particularly ideal, but it does work.
return parse(StringIO(st.encode('UTF-16')), *args, **kw)
return parse(StringIO(st), *args, **kw)
def parseXML(readable):
"""Parse an XML readable object."""
return parse(readable, caseInsensitive=0, preserveCase=1)
def parseXMLString(st):
"""Parse an XML readable object."""
return parseString(st, caseInsensitive=0, preserveCase=1)
# Utility
class lmx:
"""Easy creation of XML."""
def __init__(self, node='div'):
if isinstance(node, StringTypes):
node = Element(node)
self.node = node
def __getattr__(self, name):
if name[0] == '_':
raise AttributeError("no private attrs")
return lambda **kw: self.add(name,**kw)
def __setitem__(self, key, val):
self.node.setAttribute(key, val)
def __getitem__(self, key):
return self.node.getAttribute(key)
def text(self, txt, raw=0):
nn = Text(txt, raw=raw)
self.node.appendChild(nn)
return self
def add(self, tagName, **kw):
newNode = Element(tagName, caseInsensitive=0, preserveCase=0)
self.node.appendChild(newNode)
xf = lmx(newNode)
for k, v in kw.items():
if k[0] == '_':
k = k[1:]
xf[k]=v
return xf
|
bdh1011/wau
|
venv/lib/python2.7/site-packages/twisted/web/microdom.py
|
Python
|
mit
| 35,469
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('discussions', '0005_auto_20150430_1645'),
]
operations = [
migrations.AlterField(
model_name='post',
name='replays_to',
field=models.ForeignKey(to='discussions.Post', null=True),
),
]
|
ZackYovel/studybuddy
|
server/studybuddy/discussions/migrations/0006_auto_20150430_1648.py
|
Python
|
mit
| 428
|
# Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields
from netforce.access import get_active_user
import json
class FieldDefault(Model):
_name = "field.default"
_string = "Field Default"
_fields = {
"user_id": fields.Many2One("base.user", "User", required=True, search=True),
"model": fields.Char("Model", required=True, search=True),
"field": fields.Char("Field", required=True, search=True),
"value": fields.Text("Value"),
}
def set_default(self, model, field, value, context={}):
self.clear_default(model, field)
user_id = get_active_user()
vals = {
"user_id": user_id,
"model": model,
"field": field,
"value": value,
}
self.create(vals)
def clear_default(self, model, field, context={}):
user_id = get_active_user()
res = self.search([["user_id", "=", user_id], ["model", "=", model], ["field", "=", field]])
if res:
self.delete(res)
def get_default(self, model, field, context={}):
user_id = get_active_user()
res = self.search([["user_id", "=", user_id], ["model", "=", model], ["field", "=", field]])
if not res:
return None
obj_id = res[0]
obj = self.browse(obj_id)
return obj.value
FieldDefault.register()
|
sidzan/netforce
|
netforce_general/netforce_general/models/field_default.py
|
Python
|
mit
| 2,457
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A simple tokenizer used for the Full Text Search API stub."""
import re
from google.appengine.datastore import document_pb
from google.appengine.api.search.stub import tokens
_WORD_SEPARATORS = [
r'!', r'\"', r'%', r'\(', r'\)', r'\*', r',', r'\.', r'/', r'\:', r'=',
r'>', r'\?', r'@', r'\[', r'\\', r'\]', r'\^', r'\`', r'\{', r'\|', r'\}',
r'~', r'\t', r'\n', r'\f', r'\r', r' ', r'&', r'#', r'$', r';']
_WORD_SEPARATOR_RE = re.compile('|'.join(_WORD_SEPARATORS))
def _StripSeparators(value):
"""Remove special characters and collapse spaces."""
return re.sub(r' [ ]*', ' ', re.sub(_WORD_SEPARATOR_RE, ' ', value))
def NormalizeString(value):
"""Lowers case, removes punctuation and collapses whitespace."""
return _StripSeparators(value).lower().strip()
class SimpleTokenizer(object):
"""A tokenizer which converts text to a normalized stream of tokens.
Text normalization lowers case, removes punctuation and splits on whitespace.
"""
def __init__(self, split_restricts=True, preserve_case=False):
self._split_restricts = split_restricts
self._preserve_case = preserve_case
self._html_pattern = re.compile(r'<[^>]*>')
def SetCase(self, value):
if hasattr(self, '_preserve_case') and self._preserve_case:
return value
else:
return value.lower()
def TokenizeText(self, text, token_position=0):
"""Tokenizes the text into a sequence of Tokens."""
return self._TokenizeForType(field_type=document_pb.FieldValue.TEXT,
value=text, token_position=token_position)
def TokenizeValue(self, field_value, token_position=0):
"""Tokenizes a document_pb.FieldValue into a sequence of Tokens."""
if field_value.type() is document_pb.FieldValue.GEO:
return self._TokenizeForType(field_type=field_value.type(),
value=field_value.geo(),
token_position=token_position)
return self._TokenizeForType(field_type=field_value.type(),
value=field_value.string_value(),
token_position=token_position)
def _TokenizeString(self, value, field_type):
value = self.SetCase(value)
if field_type is not document_pb.FieldValue.ATOM:
if field_type is document_pb.FieldValue.HTML:
value = self._StripHtmlTags(value)
value = _StripSeparators(value)
return value.split()
else:
return [value]
def _StripHtmlTags(self, value):
"""Replace HTML tags with spaces."""
return self._html_pattern.sub(' ', value)
def _TokenizeForType(self, field_type, value, token_position=0):
"""Tokenizes value into a sequence of Tokens."""
if field_type is document_pb.FieldValue.NUMBER:
return [tokens.Token(chars=value, position=token_position)]
if field_type is document_pb.FieldValue.GEO:
return [tokens.GeoPoint(latitude=value.lat(), longitude=value.lng(),
position=token_position)]
tokens_found = []
token_strings = []
if not self._split_restricts:
token_strings = self.SetCase(value).split()
else:
token_strings = self._TokenizeString(value, field_type)
for token in token_strings:
if ':' in token and self._split_restricts:
for subtoken in token.split(':'):
tokens_found.append(
tokens.Token(chars=subtoken, position=token_position))
token_position += 1
elif '"' in token:
for subtoken in token.split('"'):
if not subtoken:
tokens_found.append(
tokens.Quote(chars='"', position=token_position))
else:
tokens_found.append(
tokens.Token(chars=subtoken, position=token_position))
token_position += 1
else:
tokens_found.append(tokens.Token(chars=token, position=token_position))
token_position += 1
return tokens_found
|
GdZ/scriptfile
|
software/googleAppEngine/google/appengine/api/search/stub/simple_tokenizer.py
|
Python
|
mit
| 4,586
|
"""
Django settings for amcatdashboard project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import hashlib
import sys
from datetime import timedelta
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
if "DJANGO_DEBUG" in os.environ:
DEBUG = os.environ.get("DJANGO_DEBUG") == "1"
else:
DEBUG = "runserver" in sys.argv
if "DJANGO_SECRET_KEY" not in os.environ and not DEBUG:
raise ValueError("You must supply DJANGO_SECRET_KEY as environment variable.")
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get("DJANGO_SECRET_KEY", "default")
CRON_SECRET = hashlib.sha256((SECRET_KEY + "6132600e-47a5-49e6-a3ff-4af22b02cd71").encode()).hexdigest()[:20]
ALLOWED_HOSTS = os.environ.get("DJANGO_ALLOWED_HOSTS", "").split(",")
if DEBUG:
ALLOWED_HOSTS += ["*"]
# Disable large #field checks to allow for large queries
DATA_UPLOAD_MAX_NUMBER_FIELDS = None
STATIC_ROOT = os.path.join(BASE_DIR, "srv")
MEDIA_ROOT = os.path.join(BASE_DIR, "srv/media")
# Application definition
AUTH_USER_MODEL = 'dashboard.User'
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
LOGIN_EXEMPT_URLS = [
'^account/.+',
'^login/$',
'^dashboard/token_setup$',
'^dashboard/amcat',
'^dashboard/cron-trigger/\w+$'
]
MIGRATION_MODULES = {
'account': 'amcatdashboard.account_migrations'
}
if DEBUG:
SITE_ID = 1
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
else:
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = os.environ.get("DASHBOARD_EMAIL_HOST", 'localhost')
EMAIL_PORT = os.environ.get("DASHBOARD_EMAIL_PORT", 587)
EMAIL_HOST_USER = os.environ.get("DASHBOARD_EMAIL_USER", '')
EMAIL_HOST_PASSWORD = os.environ.get("DASHBOARD_EMAIL_PASSWORD", '')
EMAIL_USE_TLS = os.environ.get("DASHBOARD_EMAIL_TLS", 'Y') in ("1", "Y", "ON")
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django_extensions',
'bootstrapform',
'account',
'amcatdashboard',
'dashboard',
'pinax_theme_bootstrap',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'dashboard.middleware.LoginRequiredMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'account.middleware.LocaleMiddleware',
'account.middleware.TimezoneMiddleware',
'dashboard.middleware.APITokenNeededMiddleware',
'dashboard.middleware.MethodOverrideMiddleware'
)
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
ROOT_URLCONF = 'amcatdashboard.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ["templates"],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'account.context_processors.account',
'dashboard.context_preprocessors.dashboard_settings'
],
'builtins': [
'django.templatetags.i18n'
]
},
},
]
WSGI_APPLICATION = 'amcatdashboard.wsgi.application'
if "DASHBOARD_MEDIA_LIST" in os.environ:
MEDIA_LIST = os.environ.get("DASHBOARD_MEDIA_LIST")
else:
MEDIA_LIST = os.path.join(BASE_DIR, "media_list.csv")
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get("DASHBOARD_DB_NAME", "dashboard"),
'USER': os.environ.get("DASHBOARD_DB_USER", ""),
'PASSWORD': os.environ.get("DASHBOARD_DB_PASSWORD", ""),
'HOST': os.environ.get("DASHBOARD_DB_HOST", ""),
'PORT': os.environ.get("DASHBOARD_DB_PORT", "")
}
}
CACHES = {
'default': {
# LocMemCache doesn't share its contents with other processes.
# Only use default for small things that can be replicated in memory safely.
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'dashboard-cache',
},
# FileBasedCache isn't particularly fast, and it is recommended to a configure a different caching backend instead.
'query': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': 'cache/dashboard/queries',
'OPTIONS': {
'eviction_policy': 'least-recently-used'
}
},
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en'
gettext_noop = lambda s: s
LANGUAGES = (('en', gettext_noop('English')), ('nl', gettext_noop('Dutch')))
LOCALE_PATHS = (
'locale',
)
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
# django-user-accounts
ACCOUNT_EMAIL_UNIQUE = True
ACCOUNT_EMAIL_CONFIRMATION_REQUIRED = True
ACCOUNT_USER_DISPLAY = lambda user: user.email
SESSION_COOKIE_NAME = 'dashboard__sessionid'
CSRF_COOKIE_NAME = 'dashboard__csrftoken'
SESSION_ID = os.environ.get("DJANGO_SESSION_ID")
# Comment this line to fall back to the default theme.
if os.environ.get("DASHBOARD_THEME"):
GLOBAL_THEME = os.environ['DASHBOARD_THEME']
DASHBOARD_ALLOW_MULTIPLE_SYSTEMS = True
if DEBUG:
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
},
}
else:
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/var/log/amcat/dashboard_bzk.log',
},
},
'loggers': {
'': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
amcat/amcat-dashboard
|
amcatdashboard/settings.py
|
Python
|
agpl-3.0
| 7,503
|
from django.contrib import admin
from data.models import *
class EntryAdmin(admin.ModelAdmin):
pass
class GroupAdmin(admin.ModelAdmin):
pass
class FieldAdmin(admin.ModelAdmin):
pass
class RecordAdmin(admin.ModelAdmin):
pass
admin.site.register(Entry, EntryAdmin)
admin.site.register(Group, GroupAdmin)
admin.site.register(Field, FieldAdmin)
admin.site.register(Record, RecordAdmin)
|
jochenklar/quantify
|
data/admin.py
|
Python
|
apache-2.0
| 403
|
# This file is part of the dionaea honeypot
#
# SPDX-FileCopyrightText: 2015 Tan Kean Siong
# SPDX-FileCopyrightText: 2016 PhiBo (DinoTools)
#
# SPDX-License-Identifier: GPL-2.0-or-later
from dionaea import ServiceLoader
from .mqtt import mqttd
class MQTTService(ServiceLoader):
name = "mqtt"
@classmethod
def start(cls, addr, iface=None, config=None):
daemon = mqttd()
daemon.bind(addr, 1883, iface=iface)
daemon.apply_config(config)
daemon.listen()
return daemon
|
dionaea-honeypot/dionaea
|
modules/python/dionaea/mqtt/__init__.py
|
Python
|
gpl-2.0
| 522
|
#
# gPrime - A web-based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ...const import LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gprime modules
#
#-------------------------------------------------------------------------
from . import Rule
#-------------------------------------------------------------------------
# "People marked private"
#-------------------------------------------------------------------------
class IsPrivate(Rule):
"""Objects marked private."""
name = 'Objects marked private'
description = "Matches objects that are indicated as private"
category = _('General filters')
def apply(self, db, obj):
return obj.get_privacy()
|
sam-m888/gprime
|
gprime/filters/rules/_isprivate.py
|
Python
|
gpl-2.0
| 1,613
|
import datetime
import math
import os
from collections import namedtuple
from urllib.parse import urlparse
from django.conf import settings
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.db.models import Count, Max, Q
from django.template import loader
from django.utils.functional import cached_property
from django.urls import reverse
from olympia import amo
from olympia.addons.models import Addon, AddonCategory
from olympia.amo.reverse import get_url_prefix, override_url_prefix
from olympia.amo.templatetags.jinja_helpers import absolutify
from olympia.constants.categories import CATEGORIES
from olympia.constants.promoted import RECOMMENDED
from olympia.bandwagon.models import Collection
from olympia.files.utils import id_to_path
from olympia.promoted.models import PromotedAddon
from olympia.tags.models import AddonTag, Tag
from olympia.users.models import UserProfile
# These constants are from:
# https://github.com/mozilla/addons-frontend/blob/master/src/amo/reducers/addonsByAuthors.js
EXTENSIONS_BY_AUTHORS_PAGE_SIZE = 10
THEMES_BY_AUTHORS_PAGE_SIZE = 12
# top 10 locales by visitor from GA (as of May 2021)
FRONTEND_LANGUAGES = [
'de',
'en-GB',
'en-US',
'es',
'fr',
'ja',
'pl',
'pt-BR',
'ru',
'zh-CN',
]
class LazyTupleList:
"""Lazily emulates a generated list like:
[
(item_a, item_b)
for item_b in list_b
for item_a in list_a
]
"""
def __init__(self, list_a, list_b):
self.list_a = list_a
self.list_b = list_b
def __len__(self):
return len(self.list_a) * len(self.list_b)
def __getitem__(self, key):
a_len = len(self.list_a)
def get(index):
return (self.list_a[index % a_len], self.list_b[index // a_len])
return (
[get(idx) for idx in range(key.start, key.stop, key.step or 1)]
if isinstance(key, slice)
else get(key)
)
class Sitemap(DjangoSitemap):
limit = 2000
i18n = True
languages = FRONTEND_LANGUAGES
alternates = True
# x_default = False # TODO: enable this when we can validate it works well
_cached_items = []
protocol = urlparse(settings.EXTERNAL_SITE_URL).scheme
def _location(self, item, force_lang_code=None):
# modified from Django implementation - we don't rely on locale for urls
if self.i18n:
obj, lang_code = item
# Doing .replace is hacky, but `override_url_prefix` is slow at scale
return self.location(obj).replace(
settings.LANGUAGE_CODE, force_lang_code or lang_code, 1
)
return self.location(item)
def _items(self):
items = self.items()
if self.i18n:
# Create (item, lang_code) tuples for all items and languages.
# This is necessary to paginate with all languages already considered.
return LazyTupleList(items, self._languages())
return items
def items(self):
return self._cached_items
def get_domain(self, site):
if not site:
if not hasattr(self, 'domain'):
self.domain = urlparse(settings.EXTERNAL_SITE_URL).netloc
return self.domain
return super().get_domain(site=site)
def get_urls(self, page=1, site=None, protocol=None, *, app_name=None):
with override_url_prefix(app_name=app_name):
return super().get_urls(page=page, site=site, protocol=protocol)
@cached_property
def template(self):
return loader.get_template('sitemap.xml')
def render(self, app_name, page):
context = {'urlset': self.get_urls(page=page, app_name=app_name)}
return self.template.render(context)
@property
def _current_app(self):
return amo.APPS[get_url_prefix().app]
def get_android_promoted_addons():
return PromotedAddon.objects.filter(
Q(application_id=amo.ANDROID.id) | Q(application_id__isnull=True),
group_id=RECOMMENDED.id,
addon___current_version__promoted_approvals__application_id=(amo.ANDROID.id),
addon___current_version__promoted_approvals__group_id=RECOMMENDED.id,
)
class AddonSitemap(Sitemap):
item_tuple = namedtuple('Item', ['last_updated', 'url', 'page'], defaults=(1,))
@cached_property
def _cached_items(self):
current_app = self._current_app
addons_qs = Addon.objects.public().filter(
_current_version__apps__application=current_app.id
)
# android is currently limited to a small number of recommended addons, so get
# the list of those and filter further
if current_app == amo.ANDROID:
promoted_addon_ids = get_android_promoted_addons().values_list(
'addon_id', flat=True
)
addons_qs = addons_qs.filter(id__in=promoted_addon_ids)
addons = list(
addons_qs.order_by('-last_updated')
.values_list(
'last_updated',
'slug',
'text_ratings_count',
named=True,
)
.iterator()
)
items = [
self.item_tuple(
addon.last_updated,
reverse('addons.detail', args=[addon.slug]),
)
for addon in addons
]
# add pages for ratings - and extra pages when needed to paginate
page_size = settings.REST_FRAMEWORK['PAGE_SIZE']
for addon in addons:
pages_needed = math.ceil((addon.text_ratings_count or 1) / page_size)
items.extend(
self.item_tuple(
addon.last_updated,
reverse('addons.ratings.list', args=[addon.slug]),
page,
)
for page in range(1, pages_needed + 1)
)
return items
def lastmod(self, item):
return item.last_updated
def location(self, item):
return item.url + (f'?page={item.page}' if item.page > 1 else '')
class AMOSitemap(Sitemap):
lastmod = datetime.datetime.now()
_cached_items = [
# frontend pages
('home', amo.FIREFOX),
('home', amo.ANDROID),
('pages.about', None),
('pages.review_guide', None),
('browse.extensions', amo.FIREFOX),
('browse.themes', amo.FIREFOX),
('browse.language-tools', amo.FIREFOX),
# server pages
('devhub.index', None),
('apps.appversions', amo.FIREFOX),
('apps.appversions', amo.ANDROID),
]
def location(self, item):
urlname, app = item
if app:
with override_url_prefix(app_name=app.short):
return reverse(urlname)
else:
return reverse(urlname)
class CategoriesSitemap(Sitemap):
lastmod = datetime.datetime.now()
@cached_property
def _cached_items(self):
page_size = settings.REST_FRAMEWORK['PAGE_SIZE']
page_count_max = settings.ES_MAX_RESULT_WINDOW // page_size
def additems(type):
items = []
for category in CATEGORIES[current_app.id][type].values():
items.append((category, 1))
pages_needed = min(
math.ceil(addon_counts.get(category.id, 1) / page_size),
page_count_max,
)
for page in range(2, pages_needed + 1):
items.append((category, page))
return items
current_app = self._current_app
counts_qs = (
AddonCategory.objects.filter(
addon___current_version__isnull=False,
addon___current_version__apps__application=current_app.id,
addon__disabled_by_user=False,
addon__status__in=amo.REVIEWED_STATUSES,
)
.values('category_id')
.annotate(count=Count('addon_id'))
)
addon_counts = {cat['category_id']: cat['count'] for cat in counts_qs}
items = additems(amo.ADDON_EXTENSION)
if current_app == amo.FIREFOX:
items.extend(additems(amo.ADDON_STATICTHEME))
return items
def location(self, item):
(category, page) = item
return category.get_url_path() + (f'?page={page}' if page > 1 else '')
class CollectionSitemap(Sitemap):
@cached_property
def _cached_items(self):
return list(
Collection.objects.filter(author_id=settings.TASK_USER_ID)
.order_by('-modified')
.values_list('modified', 'slug', 'author_id', named=True)
.iterator()
)
def lastmod(self, item):
return item.modified
def location(self, item):
return Collection.get_url_path(item)
class AccountSitemap(Sitemap):
item_tuple = namedtuple(
'AccountItem',
['addons_updated', 'url', 'extension_page', 'theme_page'],
defaults=(1, 1),
)
@cached_property
def _cached_items(self):
current_app = self._current_app
addon_q = Q(
addons___current_version__isnull=False,
addons___current_version__apps__application=current_app.id,
addons__disabled_by_user=False,
addons__status__in=amo.REVIEWED_STATUSES,
addonuser__listed=True,
addonuser__role__in=(amo.AUTHOR_ROLE_DEV, amo.AUTHOR_ROLE_OWNER),
)
# android is currently limited to a small number of recommended addons, so get
# the list of those and filter further
if current_app == amo.ANDROID:
promoted_addon_ids = get_android_promoted_addons().values_list(
'addon_id', flat=True
)
addon_q = addon_q & Q(addons__id__in=promoted_addon_ids)
users = (
UserProfile.objects.filter(is_public=True, deleted=False)
.annotate(
theme_count=Count(
'addons', filter=Q(addon_q, addons__type=amo.ADDON_STATICTHEME)
)
)
.annotate(
extension_count=Count(
'addons', filter=Q(addon_q, addons__type=amo.ADDON_EXTENSION)
)
)
.annotate(addons_updated=Max('addons__last_updated', filter=addon_q))
.order_by('-addons_updated', '-modified')
.values_list(
'addons_updated', 'id', 'extension_count', 'theme_count', named=True
)
.iterator()
)
items = []
for user in users:
if not user.extension_count and not user.theme_count:
# some users have an empty page for various reasons, no need to include
continue
extension_pages_needed = math.ceil(
(user.extension_count or 1) / EXTENSIONS_BY_AUTHORS_PAGE_SIZE
)
theme_pages_needed = math.ceil(
(user.theme_count or 1) / THEMES_BY_AUTHORS_PAGE_SIZE
)
items.extend(
self.item_tuple(
user.addons_updated,
reverse('users.profile', args=[user.id]),
ext_page,
1,
)
for ext_page in range(1, extension_pages_needed + 1)
)
# start themes at 2 because we don't want (1, 1) twice
items.extend(
self.item_tuple(
user.addons_updated,
reverse('users.profile', args=[user.id]),
1,
theme_page,
)
for theme_page in range(2, theme_pages_needed + 1)
)
return items
def lastmod(self, item):
return item.addons_updated
def location(self, item):
urlargs = '&'.join(
([f'page_e={item.extension_page}'] if item.extension_page > 1 else [])
+ ([f'page_t={item.theme_page}'] if item.theme_page > 1 else [])
)
return item.url + (f'?{urlargs}' if urlargs else '')
class TagPagesSitemap(Sitemap):
lastmod = datetime.datetime.now()
@cached_property
def _cached_items(self):
page_size = settings.REST_FRAMEWORK['PAGE_SIZE']
page_count_max = settings.ES_MAX_RESULT_WINDOW // page_size
current_app = self._current_app
counts_qs = (
AddonTag.objects.filter(
addon___current_version__isnull=False,
addon___current_version__apps__application=current_app.id,
addon__disabled_by_user=False,
addon__status__in=amo.REVIEWED_STATUSES,
)
.values('tag_id')
.annotate(count=Count('addon_id'))
)
addon_counts = {tag['tag_id']: tag['count'] for tag in counts_qs}
items = []
for tag in Tag.objects.all():
items.append((tag, 1))
pages_needed = min(
math.ceil(addon_counts.get(tag.id, 1) / page_size),
page_count_max,
)
for page in range(2, pages_needed + 1):
items.append((tag, page))
return items
def location(self, item):
(tag, page) = item
return tag.get_url_path() + (f'?page={page}' if page > 1 else '')
def get_sitemaps():
return {
# because some urls are app-less, we specify per item, so don't specify an app
('amo', None): AMOSitemap(),
('addons', amo.FIREFOX): AddonSitemap(),
('addons', amo.ANDROID): AddonSitemap(),
# category pages aren't supported on android, so firefox only
('categories', amo.FIREFOX): CategoriesSitemap(),
# we don't expose collections on android, so firefox only
('collections', amo.FIREFOX): CollectionSitemap(),
('users', amo.FIREFOX): AccountSitemap(),
('users', amo.ANDROID): AccountSitemap(),
('tags', amo.FIREFOX): TagPagesSitemap(),
('tags', amo.ANDROID): TagPagesSitemap(),
}
OTHER_SITEMAPS = [
'/blog/sitemap.xml',
]
def get_sitemap_section_pages(sitemaps):
pages = []
for (section, app), site in sitemaps.items():
if not app:
pages.extend((section, None, page) for page in site.paginator.page_range)
continue
with override_url_prefix(app_name=app.short):
# Add all pages of the sitemap section.
pages.extend(
(section, app.short, page) for page in site.paginator.page_range
)
return pages
def render_index_xml(sitemaps):
sitemap_url = reverse('amo.sitemap')
server_urls = (
f'{sitemap_url}?section={section}'
+ (f'&app_name={app_name}' if app_name else '')
+ (f'&p={page}' if page != 1 else '')
for section, app_name, page in get_sitemap_section_pages(sitemaps)
)
urls = list(server_urls) + OTHER_SITEMAPS
return loader.render_to_string(
'sitemap_index.xml',
{'sitemaps': (absolutify(url) for url in urls)},
)
def get_sitemap_path(section, app, page=1):
if section is None or app is None:
# If we don't have a section or app, we don't need a complex directory
# structure and we can call the first page 'sitemap' for convenience
# (it's likely going to be the only page).
endpath = str(page) if page != 1 else 'sitemap'
else:
endpath = id_to_path(page)
return os.path.join(
settings.SITEMAP_STORAGE_PATH,
section or '',
app or '',
f'{endpath}.xml',
)
|
mozilla/addons-server
|
src/olympia/amo/sitemap.py
|
Python
|
bsd-3-clause
| 15,675
|
__author__ = 'D. Garlisi'
"""
EU project WISHFUL
"""
import abc
"""
The WISHFUL interface definitions - UPIs (UPI_M) for install/update/active/deactive software modules.
"""
"""
The UPI_M - UPI for managing protocol software modules at any layer.
"""
class UPI_M(object):
__metaclass__ = abc.ABCMeta
""" Generic functions for configuration
"""
@abc.abstractmethod
def installExecutionEngine(self, param_key):
"""Set the ...
:param
type ...
path ...
:return result
"""
return
@abc.abstractmethod
def initTest(self, param_key):
"""Activate the
:param type ...
:return result
"""
return
|
pruckebusch/WiSHFUL_UPI_SHOWCASES
|
upis/upi_m.py
|
Python
|
gpl-3.0
| 715
|
# -*- coding: utf-8 -*-
# KodiAddon
#
from lib.scraper import myAddon
import re
import sys
# Start of Module
addonName = re.search('plugin\://plugin.video.(.+?)/',str(sys.argv[0])).group(1)
ma = myAddon(addonName)
ma.processAddonEvent()
|
odicraig/kodi2odi
|
addons/plugin.video.metv/default.py
|
Python
|
gpl-3.0
| 240
|
class ParsingObject:
def __init__(self, pos):
self.oid = 0
self.start_pos = pos
self.end_pos = pos
self.tag = '#empty'
self.value = None
self.parent = None
self.child = None
class ParsingLog:
def __init__(self):
self.next = None
self.index = 0
self.childNode = None
class ParsingContext:
def __init__(self, inputs):
self.inputs = inputs
self.pos = 0
self.left = None
self.logStackSize = 0
self.logStack = None
self.unusedLog = None
def newLog(self):
if(self.unusedLog == None):
l = ParsingLog()
l.next = None
l.childNode = None
return l
l = self.unusedLog
self.unusedLog = l.next
l.next = None
return l
def unuseLog(self, log):
log.childNode = None
log.next = self.unusedLog
self.unusedLog = log
def Parsing_markLogStack(self):
return self.logStackSize
def lazyLink(self, parent, index, child):
l = self.newLog()
l.childNode = child
child.parent = parent
l.index = index
l.next = self.logStack
self.logStack = l
self.logStackSize += 1
def lazyJoin(self, left):
l = self.newLog()
l.childNode = left
l.index = -9
l.next = self.logStack
self.logStack = l
self.logStackSize += 1
def commitLog(self, mark, newnode):
first = None
objectSize = 0
while(mark < self.logStackSize) :
cur = self.logStack
self.logStack = self.logStack.next
self.logStackSize--
if(cur.index == -9) : ## lazyCommit
self.commitLog(mark, cur.childNode)
self.unuseLog(cur)
break
if(cur.childNode.parent == newnode) :
cur.next = first
first = cur
objectSize += 1
else :
self.unuseLog(cur)
if(objectSize > 0) :
newnode.child = [None] * objectSize
newnode.child_size = objectSize
i = 0
while(i < objectSize) :
cur = first
first = first.next
if(cur.index == -1) :
cur.index = i
newnode.child[cur.index] = cur.childNode
self.unuseLog(cur)
i += 1
i = 0
while(i < objectSize) :
if(newnode.child[i] == None) :
newnode.child[i] = ParsingObject(0)
i += 1
def abortLog(self, mark):
while(mark < self.logStackSize) :
l = self.logStack
self.logStack = self.logStack.next
self.logStackSize--
self.unusedLog(l)
|
Kouhei-Moriya/peg4d-java
|
libnez/libnez.py
|
Python
|
bsd-2-clause
| 2,876
|
#! /usr/bin/env python
###############################################################################
#
# simulavr - A simulator for the Atmel AVR family of microcontrollers.
# Copyright (C) 2001, 2002 Theodore A. Roth
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
###############################################################################
#
# $Id: test_LD_X_incr.py,v 1.1 2004/07/31 00:59:11 rivetwa Exp $
#
"""Test the LD_X_incr opcode.
"""
import base_test
from registers import Reg, SREG
class LD_X_incr_TestFail(base_test.TestFail): pass
class base_LD_X_incr(base_test.opcode_test):
"""Generic test case for testing LD_X_incr opcode.
LD_X_incr - Load Indirect from data space to Register using index X and
post increment X.
Operation: Rd <- (X) then X <- X + 1
opcode is '1001 000d dddd 1101' where 0 <= d <= 31 and d != {26,27}
Only registers PC, R26, R27 and Rd should be changed.
"""
def setup(self):
# Set the register values
self.setup_regs[self.Rd] = 0
self.setup_regs[Reg.R26] = (self.X & 0xff)
self.setup_regs[Reg.R27] = ((self.X >> 8) & 0xff)
# set up the val in memory (memory is read before X is incremented,
# thus we need to write to memory _at_ X)
self.mem_byte_write( self.X, self.Vd )
# Return the raw opcode
return 0x900D | (self.Rd << 4)
def analyze_results(self):
self.reg_changed.extend( [self.Rd, Reg.R26, Reg.R27] )
# check that result is correct
expect = self.Vd
got = self.anal_regs[self.Rd]
if expect != got:
self.fail('LD_X_incr: expect=%02x, got=%02x' % (expect, got))
# check that X was incremented
expect = self.X + 1
got = (self.anal_regs[Reg.R26] & 0xff) | ((self.anal_regs[Reg.R27] << 8) & 0xff00)
if expect != got:
self.fail('LD_X_incr X not incr: expect=%04x, got=%04x' % (expect, got))
#
# Template code for test case.
# The fail method will raise a test specific exception.
#
template = """
class LD_X_incr_r%02d_X%04x_v%02x_TestFail(LD_X_incr_TestFail): pass
class test_LD_X_incr_r%02d_X%04x_v%02x(base_LD_X_incr):
Rd = %d
X = 0x%x
Vd = 0x%x
def fail(self,s):
raise LD_X_incr_r%02d_X%04x_v%02x_TestFail, s
"""
#
# automagically generate the test_LD_X_incr_rNN_vXX class definitions.
#
# Operation is undefined for d = 26 and d = 27.
#
code = ''
for d in range(0,26)+range(28,32):
for x in (0x20f, 0x2ff):
for v in (0xaa, 0x55):
args = (d,x,v)*4
code += template % args
exec code
|
chrta/simulavr
|
regress/test_opcodes/test_LD_X_incr.py
|
Python
|
gpl-2.0
| 3,078
|
import sys
import logging
sys.path.append('./entities')
from CommonDAO import CommonDAO
from datetime import datetime, date, time
class SourceHasFileDAO(CommonDAO):
jointable = "source_has_file"
def __init__(self):
CommonDAO.__init__(self)
def insertOrUpdate(self, sourceId, fileId, availability):
try:
rs = self.findBySourceFile(sourceId, fileId)
if rs is not None:
queryUpdate = "UPDATE %s SET availability = %s WHERE source_id = %s and file_id = %s" % (self.jointable, availability, sourceId, fileId)
logging.debug(queryUpdate)
self.cursor.execute(queryUpdate)
else:
firstSeen = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
queryInsert = "INSERT INTO %s(source_id, file_id, first_seen, availability) VALUES(%s, %s, '%s', %s)" % (self.jointable, sourceId, fileId, firstSeen, availability)
logging.debug(queryInsert)
self.cursor.execute(queryInsert)
except Exception, err:
sys.stderr.write('ERROR: %s\n' % str(err))
return None
#self.lastID(self.jointable)
def delete(self, sourceId, fileId):
self.cursor.execute("""DELETE FROM """+self.jointable+""" WHERE source_id = %s AND file_id = %s""", (sourceId, fileId))
def findBySourceFile(self, sourceId, fileId):
query = "SELECT * FROM %s WHERE source_id = %s AND file_id = %s" % (self.jointable, sourceId, fileId)
logging.debug(query)
self.cursor.execute(query)
rs = self.cursor.fetchall()
if not rs:
return None
return rs
#def findByName(self, name):
# self.cursor.execute("""SELECT * FROM """+self.tablename+""" WHERE name = %s""", (name,))
# rs = self.cursor.fetchall()
# if not rs:
# return None
# file = File()
# for row in rs:
# file.id = row[0]
# file.name = row[1]
# return file
|
tassia/DonkeySurvey
|
src/database/SourceHasFileDAO.py
|
Python
|
gpl-3.0
| 2,024
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
from setuptools import setup
setup(name='fondasms',
version='0.8.1',
description='Django app to add support for FondaSMS requests.',
long_description=("Allow any django app to handle SMS/Call requests "
"using the FondaSMS Android App."),
author='yeleman',
author_email='rgaudin@gmail.com',
url='http://github.com/yeleman/django-fondasms',
packages=['fondasms'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
]
)
|
yeleman/django-fondasms
|
setup.py
|
Python
|
unlicense
| 826
|
#!/usr/bin/env python
a = range(10)
for i in a:
print("Hello {}".format(i))
|
FryguyPA/pynet
|
hello10x.py
|
Python
|
apache-2.0
| 84
|
import logging
from flock.hsm import Hsm, HsmState
from flock.protocol import FlockProtocol
from flock.controller.enocean.packet import Packet
class EnoceanPacketHsm(Hsm):
class BaseState(HsmState):
def on_data(self, hsm, data):
return self
class SyncState(BaseState):
def on_data(self, hsm, data):
if ord(data) == 0x55:
hsm.data = ''
hsm.optional_data = ''
hsm.packet_type = None
return hsm.state_header
return self
class HeaderState(BaseState):
def on_entry(self, hsm, old_state):
self.size_left = 4
self.data = ''
def on_data(self, hsm, data):
self.data += data
self.size_left = self.size_left -1
if self.size_left == 0:
hsm.data_length = (ord(self.data[0]) << 8) + ord(self.data[1])
hsm.optional_data_length = ord(self.data[2])
hsm.packet_type = ord(self.data[3])
return hsm.state_crc_header
return self
class CrcHeaderState(BaseState):
def on_data(self, hsm, data):
if hsm.data_length != 0:
return hsm.state_data
elif hsm.optional_data_length != 0:
return hsm.state_optional_data
return hsm.state_crc_data
class DataState(BaseState):
def on_entry(self, hsm, old_state):
self.size_left = hsm.data_length
def on_data(self, hsm, data):
hsm.data += data
self.size_left = self.size_left -1
if self.size_left == 0:
if hsm.optional_data_length != 0:
return hsm.state_optional_data
else:
return hsm.state_crc_data
return self
class OptionalDataState(BaseState):
def on_entry(self, hsm, old_state):
self.size_left = hsm.optional_data_length
def on_data(self, hsm, data):
hsm.optional_data += data
self.size_left = self.size_left -1
if self.size_left == 0:
return hsm.state_crc_data
return self
class CrcDataState(BaseState):
def on_data(self, hsm, data):
hsm.packet = { 'data':hsm.data,
'optional_data':hsm.optional_data,
'type': hsm.packet_type}
return hsm.state_sync
def __init__(self):
super(EnoceanPacketHsm, self).__init__()
self.packet = None
self.packet_type = None
self.data = None
self.optional_data = None
self.data_length = 0
self.optional_data_length = 0
self.state_sync = EnoceanPacketHsm.SyncState()
self.state_header = EnoceanPacketHsm.HeaderState()
self.state_crc_header = EnoceanPacketHsm.CrcHeaderState()
self.state_data = EnoceanPacketHsm.DataState()
self.state_optional_data = EnoceanPacketHsm.OptionalDataState()
self.state_crc_data = EnoceanPacketHsm.CrcDataState()
self.transition(self.state_sync)
def get_packet(self):
""" Pops the current complete packet if available, None otherwise.
"""
packet = self.packet
self.packet = None
return packet
def on_data(self, data):
""" New data is received data is a single byte provided as a character
"""
return self.dispatch(self.current_state.on_data, data)
class EnoceanReceiver(FlockProtocol):
def __init__(self):
FlockProtocol.__init__(self)
self.__hsm = None
def connectionMade(self):
""" Resets the controller.
"""
if self.__hsm != None:
self.__hsm = None
self.__hsm = EnoceanPacketHsm()
logging.debug("Connected")
def byte_received(self, data):
if self.__hsm == None:
return
self.__hsm.on_data(data)
packet = self.__hsm.get_packet()
if packet != None:
message = self.packet_received(packet['type'], packet['data'], packet['optional_data'])
if message != None:
return message
return None
def packet_received(self, type, data, optional_data):
""" Processes a packet received from the controller.
This method must be overriden by inherited classes to process the
packet.
"""
return
class EnoceanProtocol(EnoceanReceiver):
def __init__(self):
EnoceanReceiver.__init__(self)
def packet_received(self, type, data, optional_data):
packet = Packet()
packet.load(type, data, optional_data)
logging.info(packet)
if packet.is_valid == True:
self.publish_packet(packet)
else:
logging.warning('received invalid packet. type: ' + str(type) +
'data: ' + data + 'optional_data: ' + optional_data)
|
MainRo/python-flock
|
flock/controller/enocean/protocol.py
|
Python
|
mit
| 4,961
|
from enigma import eTimer
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.Button import Button
from Components.Label import Label
from Components.ActionMap import ActionMap
from Components.ConfigList import ConfigListScreen
from Components.config import getConfigListEntry, config
from SIFTeam.Extra.SAPCL import SAPCL
from SIFTeam.Extra.ExtraActionBox import ExtraActionBox
import time
class AccountConfiguration(Screen, ConfigListScreen):
def __init__(self, session):
Screen.__init__(self, session)
self.list = [
getConfigListEntry(_("Username:"), config.sifteam.cloud.username),
getConfigListEntry(_("Password:"), config.sifteam.cloud.password)
]
ConfigListScreen.__init__(self, self.list, session = session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"ok": self.ok,
"cancel": self.keyCancel,
}, -2)
self["status"] = Label("")
self["info"] = Label("Register a new account: http://forum.sifteam.eu/register.php\nLost password: http://forum.sifteam.eu/login.php?do=lostpw")
self["key_green"] = Button("")
self["key_red"] = Button("")
self["key_blue"] = Button("")
self["key_yellow"] = Button("")
def executeRequest(self):
api = SAPCL()
return api.getAccount()
def executeRequestCallback(self, result):
if result["result"]:
self.keySave()
self.close()
else:
self["status"].setText(result["message"])
def ok(self):
self.session.openWithCallback(self.executeRequestCallback, ExtraActionBox, _("Validating on sifteam server..."), "Account Configuration", self.executeRequest)
|
SIFTeam/enigma2
|
lib/python/SIFTeam/Cloud/AccountConfiguration.py
|
Python
|
gpl-2.0
| 1,634
|
#!/usr/bin/env python
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import roslib
import rospy
from geometry_msgs.msg import Twist
import sys
import select
import termios
import tty
# roslib.load_manifest('turtlebot_teleop')
roslib.load_manifest('tut_ibx0020')
msg = """
Control Your Turtlebot!
---------------------------
Moving around:
u i o
j k l
m , .
q/z : increase/decrease max speeds by 10%
w/x : increase/decrease only linear speed by 10%
e/c : increase/decrease only angular speed by 10%
space key, k : force stop
anything else : stop smoothly
CTRL-C to quit
"""
moveBindings = {
'i': (1, 0),
'o': (1, -1),
'j': (0, 1),
'l': (0, -1),
'u': (1, 1),
',': (-1, 0),
'.': (-1, 1),
'm': (-1, -1),
}
speedBindings = {
'q': (1.1, 1.1),
'z': (.9, .9),
'w': (1.1, 1),
'x': (.9, 1),
'e': (1, 1.1),
'c': (1, .9),
}
def getKey():
tty.setraw(sys.stdin.fileno())
rlist, _, _ = select.select([sys.stdin], [], [], 0.1)
if rlist:
key = sys.stdin.read(1)
else:
key = ''
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
return key
speed = .5
turn = 1
def vels(speed, turn):
return "currently:\tspeed %s\tturn %s " % (speed, turn)
if __name__ == "__main__":
settings = termios.tcgetattr(sys.stdin)
rospy.init_node('turtlebot_teleop')
pub = rospy.Publisher('cmd_vel', Twist)
x = 0
th = 0
status = 0
count = 0
acc = 0.1
target_speed = 0
target_turn = 0
control_speed = 0
control_turn = 0
try:
print msg
print vels(speed, turn)
while(1):
key = getKey()
if key in moveBindings.keys():
x = moveBindings[key][0]
th = moveBindings[key][1]
count = 0
elif key in speedBindings.keys():
speed = speed * speedBindings[key][0]
turn = turn * speedBindings[key][1]
count = 0
print vels(speed, turn)
if (status == 14):
print msg
status = (status + 1) % 15
elif key == ' ' or key == 'k':
x = 0
th = 0
control_speed = 0
control_turn = 0
else:
count = count + 1
if count > 4:
x = 0
th = 0
if (key == '\x03'):
break
target_speed = speed * x
target_turn = turn * th
if target_speed > control_speed:
control_speed = min(target_speed, control_speed + 0.02)
elif target_speed < control_speed:
control_speed = max(target_speed, control_speed - 0.02)
else:
control_speed = target_speed
if target_turn > control_turn:
control_turn = min(target_turn, control_turn + 0.1)
elif target_turn < control_turn:
control_turn = max(target_turn, control_turn - 0.1)
else:
control_turn = target_turn
twist = Twist()
twist.linear.x = control_speed
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = control_turn
pub.publish(twist)
# print("loop: {0}".format(count))
# print("target: vx: {0}, wz: {1}".format(target_speed, target_turn))
# print("publihsed: vx: {0}, wz: {1}".format(twist.linear.x, twist.angular.z))
except:
print e
finally:
twist = Twist()
twist.linear.x = 0
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = 0
pub.publish(twist)
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
|
tut-yury/tut_ibx0020
|
scripts/teleop_key.py
|
Python
|
gpl-3.0
| 5,586
|
#1. Program - Python shell de çalışan 5 temel uygulama denemesi :)
metin = """
(1) Toplama yap
(2) Faktoriyel bul
(3) Öğrenci listesi işlemleri
(4) İngilizce sözlük işlemleri
(5) Asker oluştur :)
-------------------------
Lütfen Seçiminizi yapınız
Çıkmak için (q) ya basınız
"""
d ="Devam etmek için bir tuşa basınız"
go_on_champion = "Devam etmek için bir tuşa basınız!"
ogrenci_liste = ["Ali Taş", "Mehmet İnce", "Ayşe Uzun"]
sozluk = {"book": "kitap", "window": "pencere", "cat": "kedi"}
class Asker(object):
""" Asker sınıfı için gerekli açıklamaları burada yazacağız."""
def __init__(self, name, power):
self.name = name
self.power = power
print("{} isimli asker başarıyla oluşturuldu.".format(self.name))
self.attack()
def attack(self):
self.attack_power = self.power * 10
print("Askerin normal gücü: ", self.power)
print("Askerin saldırı gücü: ", self.attack_power)
input(go_on_champion)
def topla(a, b): # Toplama yapan fonksiyon :)
return a + b
def factorial(a): # Faktoriyel hesaplayan fonksiyon reqursive yalnız :)
if a < 2:
return 1
else:
return a * factorial(a - 1)
while True: # Q veya q seçilmediği sürece program çalışır.
print(metin)
choice = input("Yapmak istediğiniz işlemi seçiniz: ")
if choice == "1":
number1 = int(input("1. sayıyı giriniz: "))
number2 = int(input("2. sayıyı giriniz: "))
print("Sonuç: ", topla(number1, number2))
input(go_on_champion)
elif choice == "2":
factorial_number = int(input("Faktoriyelini bulmak istediğiniz sayıyı giriniz: "))
print("Sonuç: ", factorial(factorial_number))
input(go_on_champion)
elif choice == "3": # Tabi bunu nesne yönelimli yapsak çok daha şık olacak.
liste_metin = """
(1) Öğrenci listesini görüntüle
(2) Yeni öğrenci ekle
(3) Öğrenci ismi düzenle
(4) Listeden öğrenci sil
(5) Çıkış.
"""
while True:
print(liste_metin)
liste_secim = input("Yapmak istediğiniz işlemi seçiniz: ")
if liste_secim == "1":
print(ogrenci_liste)
elif liste_secim == "2":
ogrenci_isim = input("Eklemek istediğiniz öğrenciyi giriniz: ")
ogrenci_liste.append(ogrenci_isim)
print("{} isimli öğrenci başarıyla eklendi.".format(ogrenci_isim))
input(go_on_champion)
elif liste_secim == "3":
ogrenci_sirasi = int(input("Düzenlemek istediğiniz öğrenci sıra numarası giriniz: "))
print(ogrenci_liste[ogrenci_sirasi], "isimli öğrenciyi düzenliyorsunuz")
ogrenci_duzenleme = input("Yeni ismi giriniz: ")
ogrenci_liste[ogrenci_sirasi] = ogrenci_duzenleme
elif liste_secim == "4":
ogrenci_sirasi = int(input("Silmek istediğiniz öğrenci sıra numarası giriniz: "))
print(ogrenci_liste[ogrenci_sirasi], "isimli öğrenciyi sileceksiniz")
ogrenci_sil = input("Silmek için 'E' iptal için 'H' ")
if ogrenci_sil == "E":
print(ogrenci_liste[ogrenci_sirasi],"isimli öğrenci silindi")
del ogrenci_liste[ogrenci_sirasi]
elif ogrenci_sil == "H":
print("İşlem iptal edildi")
else:
print("Yanlış giriş yaptınız silme işlemi iptal edildi!")
input(go_on_champion)
elif liste_secim == "5":
print("Liste menüsünden çıkıldı.")
break
else:
print("Yanlış giriş.")
elif choice == "4":
sozluk_metin = """
(1) Sözlüğü görüntüle
(2) Yeni kelime ekle
(3) Kelime ara
(4) Çıkış.
"""
while True:
print(sozluk_metin)
sozluk_secim = input("Yapmak istediğiniz işlemi seçiniz: ")
if sozluk_secim == "1":
print(sozluk)
elif sozluk_secim == "2":
sozluk_key = input("Eklemek istediğiniz kelimeyi giriniz: ")
sozluk_value = input("Kelimenin anlamını giriniz: ")
sozluk[sozluk_key] = sozluk_value
print("{} kelimesi başarıyla eklendi.".format(sozluk[sozluk_key]))
input(go_on_champion)
elif sozluk_secim == "3":
kelime = input("Aradığınız kelimeyi giriniz.")
if kelime in sozluk:
print(kelime + ":" + sozluk[kelime])
else:
print("Aradığınız kelime malesef büyük sözlüğümüzde yok!")
input(go_on_champion)
elif sozluk_secim == "4":
print("Sözlükten çıkıldı.")
break
else:
print("Yanlış giriş.")
elif choice == "5":
name = input("Askerinizin ismini giriniz: ")
power = int(input("Askerinizin gücünü giriniz 0-100: "))
soldier = Asker(name, power)
elif choice == "Q" or choice == "q":
print("Çıkılıyor....")
break
else:
print("Yanlış giriş.")
print("Aşağıdaki seçeneklerden birini giriniz:")
|
ozgurturkiye/istihzapython
|
000.JustForFunPrograms/mixed_will_be_fixed_uygulama.py
|
Python
|
gpl-3.0
| 5,497
|
import socket
from pyrf.vrt import Stream
from pyrf.config import SweepEntry, TriggerSettings, TriggerSettingsError
class WSA4000(object):
"""
Interface for WSA4000
:meth:`.connect` must be called before other methods are used.
"""
ADC_DYNAMIC_RANGE = 72.5
def __init__(self):
pass
def connect(self, host):
"""
connect to a wsa
:param host: the hostname or IP to connect to
"""
self._sock_scpi = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock_scpi.connect((host, 37001))
self._sock_scpi.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True)
self._sock_vrt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock_vrt.connect((host, 37000))
self._vrt = Stream(self._sock_vrt)
def disconnect(self):
"""
close a connection to a wsa
"""
self._sock_scpi.shutdown(socket.SHUT_RDWR)
self._sock_scpi.close()
self._sock_vrt.shutdown(socket.SHUT_RDWR)
self._sock_vrt.close()
def scpiset(self, cmd):
"""
Send a SCPI command.
This is the lowest-level interface provided.
Please see the Programmer's Guide for information about
the commands available.
:param cmd: the command to send
:type cmd: str
"""
self._sock_scpi.send("%s\n" % cmd)
def scpiget(self, cmd):
"""
Send a SCPI command and wait for the response.
This is the lowest-level interface provided.
Please see the Programmer's Guide for information about
the commands available.
:param cmd: the command to send
:type cmd: str
:returns: the response back from the box if any
"""
self._sock_scpi.send("%s\n" % cmd)
buf = self._sock_scpi.recv(1024)
return buf
def id(self):
"""
Returns the WSA4000's identification information string.
:returns: "<Manufacturer>,<Model>,<Serial number>,<Firmware version>"
"""
return self.scpiget(":*idn?")
def freq(self, freq=None):
"""
This command sets or queries the tuned center frequency of the WSA.
:param freq: the new center frequency in Hz (0 - 10 GHz); None to query
:type freq: int
:returns: the frequency in Hz
"""
if freq is None:
buf = self.scpiget("FREQ:CENTER?")
freq = int(buf)
else:
self.scpiset(":FREQ:CENTER %d\n" % freq)
return freq
def fshift(self, shift=None):
"""
This command sets or queries the frequency shift value.
:param freq: the new frequency shift in Hz (0 - 125 MHz); None to query
:type freq: int
:returns: the amount of frequency shift
"""
if shift is None:
buf = self.scpiget("FREQ:SHIFT?")
shift = float(buf)
else:
self.scpiset(":FREQ:SHIFT %d\n" % shift)
return shift
def decimation(self, value=None):
"""
This command sets or queries the rate of decimation of samples in
a trace capture. This decimation method consists of cascaded
integrator-comb (CIC) filters and at every
*value* number of samples, one sample is captured. The supported
rate is 4 - 1023. When the rate is set to 1, no decimation is
performed on the trace capture.
:param value: new decimation value (1 or 4 - 1023); None to query
:type value: int
:returns: the decimation value
"""
if value is None:
buf = self.scpiget("SENSE:DECIMATION?")
value = int(buf)
else:
self.scpiset(":SENSE:DECIMATION %d\n" % value)
if value == 1:
# verify decimation was disabled correctly
if int(self.scpiget("SENSE:DECIMATION?")) != 1:
# firmware < 2.5.3
self.scpiset(":SENSE:DECIMATION %d\n" % 0)
# firmware < 2.5.3 returned 0 instead of 1
if value == 0:
value = 1
return value
def gain(self, gain=None):
"""
This command sets or queries RFE quantized gain configuration.
The RF front end (RFE) of the WSA4000 consists of multiple quantized
gain stages. The gain corresponding to each user-selectable setting
has been pre-calculated for either optimal sensitivity or linearity.
The parameter defines the total quantized gain of the RFE.
:param gain: 'high', 'medium', 'low' or 'vlow' to set; None to query
:returns: the RF gain value
"""
if gain is None:
gain = self.scpiget("INPUT:GAIN:RF?")
else:
self.scpiset(":INPUT:GAIN:RF %s\n" % gain)
return gain.lower()
def ifgain(self, gain=None):
"""
This command sets or queries variable IF gain stages of the RFE.
The gain has a range of -10 to 34 dB. This stage of the gain is
additive with the primary gain stages of the LNA
that are described in :meth:`gain`.
:param gain: float between -10 and 34 to set; None to query
:returns: the ifgain in dB
"""
if gain is None:
gain = self.scpiget(":INPUT:GAIN:IF?")
gain = gain.partition(" ")
gain = int(gain[0])
else:
self.scpiset(":INPUT:GAIN:IF %d\n" % gain)
return gain
def preselect_filter(self, enable=None):
"""
This command sets or queries the RFE preselect filter selection.
:param enable: True or False to set; None to query
:returns: the RFE preselect filter selection state
"""
if enable is None:
enable = self.scpiget(":INPUT:FILTER:PRESELECT?")
enable = bool(int(enable))
else:
self.scpiset(":INPUT:FILTER:PRESELECT %d" % int(enable))
return enable
def antenna(self, number=None):
"""
This command selects and queries the active antenna port.
:param number: 1 or 2 to set; None to query
:returns: active antenna port
"""
if number is None:
number = self.scpiget(":INPUT:ANTENNA?")
number = int(number)
else:
self.scpiset(":INPUT:ANTENNA %d" % number)
return number
def reset(self):
"""
Resets the WSA4000 to its default settings. It does not affect
the registers or queues associated with the IEEE mandated commands.
"""
self.scpiset(":*rst")
def flush(self):
"""
Flush capture memory of sweep captures.
"""
self.scpiset(":sweep:flush\n")
def trigger(self, settings=None):
"""
This command sets or queries the type of trigger event.
Setting the trigger type to "NONE" is equivalent to disabling
the trigger execution; setting to any other type will
enable the trigger engine.
:param settings: the new trigger settings; None to query
:type settings: pyrf.config.TriggerSettings
:returns: the trigger settings
"""
if settings is None:
# find out what kind of trigger is set
trigstr = self.scpiget(":TRIGGER:TYPE?")
if trigstr == "NONE":
settings = TriggerSettings("NONE")
elif trigstr == "LEVEL":
# build our return object
settings = TriggerSettings("LEVEL")
# read the settings from the box
trigstr = self.scpiget(":TRIGGER:LEVEL?")
settings.fstart, settings.fstop, settings.amplitude = trigstr.split(",")
# convert to integers
settings.fstart = int(settings.fstart)
settings.fstop = int(settings.fstop)
settings.amplitude = float(settings.amplitude)
else:
raise TriggerSettingsError("unsupported trigger type set: %s" % trigstr)
else:
if settings.trigtype == "NONE":
self.scpiset(":TRIGGER:TYPE NONE")
elif settings.trigtype == "LEVEL":
self.scpiset(":TRIGGER:LEVEL %d, %d, %d" % (settings.fstart, settings.fstop, settings.amplitude))
self.scpiset(":TRIGGER:TYPE LEVEL")
return settings
def capture(self, spp, ppb):
"""
This command will start the single block capture and the return of
*ppb* packets of *spp* samples each. The data
within a single block capture trace is continuous from one packet
to the other, but not necessary between successive block capture
commands issued.
:param spp: the number of samples in a packet
:param ppb: the number of packets in a capture
"""
self.scpiset(":TRACE:SPP %s\n" % (spp))
self.scpiset(":TRACE:BLOCK:PACKETS %s\n" % (ppb))
self.scpiset(":TRACE:BLOCK:DATA?\n")
def request_read_perm(self):
"""
Aquire exclusive permission to read data from the WSA.
:returns: True if allowed to read, False if not
"""
lockstr = self.scpiget(":SYSTEM:LOCK:REQUEST? ACQ\n")
return lockstr == "1"
def have_read_perm(self):
"""
Check if we have permission to read data.
:returns: True if allowed to read, False if not
"""
lockstr = self.scpiget(":SYSTEM:LOCK:HAVE? ACQ\n")
return lockstr == "1"
def eof(self):
"""
Check if the VRT stream has closed.
:returns: True if no more data, False if more data
"""
return self._vrt.eof
def has_data(self):
"""
Check if there is VRT data to read.
:returns: True if there is a packet to read, False if not
"""
return self._vrt.has_data()
def locked(self, modulestr):
"""
This command queries the lock status of the RF VCO (Voltage Control
Oscillator) in the Radio Front End (RFE) or the lock status of the
PLL reference clock in the digital card.
:param modulestr: 'vco' for rf lock status, 'clkref' for mobo lock status
:returns: True if locked
"""
if modulestr.upper() == 'VCO':
buf = self.scpiget("SENSE:LOCK:RF?")
return bool(int(buf))
elif modulestr.upper() == 'CLKREF':
buf = self.scpiget("SENSE:LOCK:REFERENCE?")
return bool(int(buf))
else:
return -1
def read(self):
"""
Read a single VRT packet from the WSA.
See :meth:`pyrf.vrt.Stream.read_packet`.
"""
return self._vrt.read_packet()
def raw_read(self, num):
"""
Raw read of VRT socket data from the WSA.
:param num: the number of bytes to read
:returns: bytes
"""
return self._sock_vrt.recv(num)
def sweep_add(self, entry):
"""
Add an entry to the sweep list
:param entry: the sweep entry to add
:type entry: pyrf.config.SweepEntry
"""
self.scpiset(":sweep:entry:new")
self.scpiset(":sweep:entry:freq:center %d, %d" % (entry.fstart, entry.fstop))
self.scpiset(":sweep:entry:freq:step %d" % (entry.fstep))
self.scpiset(":sweep:entry:freq:shift %d" % (entry.fshift))
self.scpiset(":sweep:entry:decimation %d" % (entry.decimation))
self.scpiset(":sweep:entry:antenna %d" % (entry.antenna))
self.scpiset(":sweep:entry:gain:rf %s" % (entry.gain))
self.scpiset(":sweep:entry:gain:if %d" % (entry.ifgain))
self.scpiset(":sweep:entry:spp %d" % (entry.spp))
self.scpiset(":sweep:entry:ppb %d" % (entry.ppb))
self.scpiset(":sweep:entry:trigger:type %s" % (entry.trigtype))
self.scpiset(":sweep:entry:trigger:level %d, %d, %d" % (entry.level_fstart, entry.level_fstop, entry.level_amplitude))
self.scpiset(":sweep:entry:save")
def sweep_read(self, index):
"""
Read an entry from the sweep list.
:param index: the index of the entry to read
:returns: sweep entry
:rtype: pyrf.config.SweepEntry
"""
ent = SweepEntry()
entrystr = self.scpiget(":sweep:entry:read? %d" % index)
(value, sep, entrystr) = entrystr.partition(',')
ent.fstart = int(value)
(value, sep, entrystr) = entrystr.partition(',')
ent.fstop = int(value)
(value, sep, entrystr) = entrystr.partition(',')
ent.fstep = int(value)
(value, sep, entrystr) = entrystr.partition(',')
ent.fshift = int(value)
(value, sep, entrystr) = entrystr.partition(',')
ent.decimation = int(value)
(value, sep, entrystr) = entrystr.partition(',')
ent.antenna = int(value)
(ent.gain, sep, entrystr) = entrystr.partition(',')
(value, sep, entrystr) = entrystr.partition(',')
ent.ifgain = int(value)
(value, sep, entrystr) = entrystr.partition(',')
ent.spp = int(value)
(value, sep, entrystr) = entrystr.partition(',')
ent.ppb = int(value)
(value, sep, entrystr) = entrystr.partition(',')
ent.dwell_s = int(value)
(value, sep, trigstr) = entrystr.partition(',')
ent.dwell_us = int(value)
if trigstr == "NONE":
ent.trigtype = "NONE"
else:
(ent.trigtype, trigstr) = trigstr.split(',')
if ent.trigtype == "LEVEL":
(value, sep, trigstr) = trigstr.partition(',')
ent.level_fstart = int(value)
(value, sep, trigstr) = trigstr.partition(',')
ent.level_fstop = int(value)
(value, sep, trigstr) = trigstr.partition(',')
ent.level_amplitude = int(value)
return ent
def sweep_clear(self):
"""
Remove all entries from the sweep list.
"""
self.scpiset(":sweep:entry:del all")
def sweep_start(self, start_id = None):
"""
Start the sweep engine.
"""
if start_id:
self.scpiset(":sweep:list:start %d" % start_id);
else:
self.scpiset(":sweep:list:start");
def sweep_stop(self):
"""
Stop the sweep engine.
"""
self.scpiset(":sweep:list:stop")
def flush_captures(self):
"""
Flush capture memory of sweep captures.
"""
self.scpiset(":sweep:flush")
|
wardi/python-thinkrf
|
pyrf/devices/thinkrf.py
|
Python
|
bsd-3-clause
| 14,687
|
#!/usr/bin/python
# Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Tests for database module."""
import logging.config
import os.path
import shutil
import tempfile
import unittest
import database
import idlnode
import idlparser
class DatabaseTestCase(unittest.TestCase):
def _ParseInterface(self, content):
ast = self._idl_parser.parse(content)
return idlnode.IDLFile(ast).interfaces[0]
def _ListInterfaces(self, db):
res = []
for interface in db.GetInterfaces():
name = interface.id
res.append(name)
return res
def setUp(self):
self._idl_parser = idlparser.IDLParser(idlparser.FREMONTCUT_SYNTAX)
working_dir = tempfile.mkdtemp()
self._database_dir = os.path.join(working_dir, 'database')
self.assertFalse(os.path.exists(self._database_dir))
# Create database and add one interface.
db = database.Database(self._database_dir)
interface = self._ParseInterface('interface I1 {};')
db.AddInterface(interface)
db.Save()
self.assertTrue(
os.path.exists(os.path.join(self._database_dir, 'I1.idl')))
def tearDown(self):
shutil.rmtree(self._database_dir)
def testCreate(self):
self.assertTrue(os.path.exists(self._database_dir))
def testListInterfaces(self):
db = database.Database(self._database_dir)
db.Load()
self.assertEquals(self._ListInterfaces(db), ['I1'])
def testHasInterface(self):
db = database.Database(self._database_dir)
db.Load()
self.assertTrue(db.HasInterface('I1'))
self.assertFalse(db.HasInterface('I2'))
def testAddInterface(self):
db = database.Database(self._database_dir)
db.Load()
interface = self._ParseInterface('interface I2 {};')
db.AddInterface(interface)
db.Save()
self.assertTrue(
os.path.exists(os.path.join(self._database_dir, 'I2.idl')))
self.assertEquals(self._ListInterfaces(db), ['I1', 'I2'])
def testDeleteInterface(self):
db = database.Database(self._database_dir)
db.Load()
db.DeleteInterface('I1')
db.Save()
self.assertFalse(
os.path.exists(os.path.join(self._database_dir, 'I1.idl')))
self.assertEquals(self._ListInterfaces(db), [])
def testGetInterface(self):
db = database.Database(self._database_dir)
db.Load()
interface = db.GetInterface('I1')
self.assertEquals(interface.id, 'I1')
if __name__ == '__main__':
logging.config.fileConfig('logging.conf')
if __name__ == '__main__':
unittest.main()
|
dartino/dart-sdk
|
tools/dom/scripts/database_test.py
|
Python
|
bsd-3-clause
| 2,860
|
# Zadání:
#########
#
# Napište funkci multiVecMat(v,m), která vypočte součin vektoru v a matice m.
#
# Pokud nesouhlasí rozměry matice a vektoru, pak funkce vrací None.
#
# Otestujte Váš program na těchto datech:
#
# m=[[0,0,1],[0,1,0],[1,0,0]]
# v=[2, 4, 6]
#
###############################################################################
def multiVecMat( vector, matrix ):
"""
Pronásobí matici vektorem zprava.
Parametry:
----------
vector: list
Vektor
matrix: list
Pronásobená matice. Její dimenze se musí shodovat s dimenzí
vektoru.
Vrací:
list
Pole velikosti vektoru.
"""
# Vytvoří pole o velikosti vektoru
result = [0] * len( matrix[0] )
# Projde matici po řádcích
for r, row in enumerate( matrix ):
# Pokud nesedí rozměry, končíme
if len(row) != len(vector):
return None
# Projde každý prvek v řádku
for i, elem in enumerate( row ):
# K poli s výsledkem přičte na index aktuálního řádku výsledek
# násobení aktuálního prvku v řádku a jemu odpovídajícího
# prvku z vektoru.
result[r] += elem * vector[i]
return result
print( multiVecMat( [2, 4, 6], [[0,0,1],[0,1,0],[1,0,0]] ) )
|
malja/cvut-python
|
cviceni05/04_nasobeni_vektoru_matice.py
|
Python
|
mit
| 1,362
|
from ucca import convert
import pickle
import os
import sys
def main():
dbpath = sys.argv[1]
textdir = sys.argv[2]
with open(dbpath, 'rb') as f:
passages = pickle.load(f)
for p in passages:
with open(os.path.join(textdir, p.ID + '.tagged')) as f:
tokens = []
for line in f:
tokens.extend(line.split())
for terminal in p.layer('0').all:
num_tokens = len(terminal.text.split())
curr, tokens = tokens[:num_tokens], tokens[num_tokens:]
tags = [x.split('_')[1] for x in curr]
terminal.extra['postag'] = " ".join(tags)
with open(dbpath + '.tags', 'wb') as f:
pickle.dump(passages, f)
if __name__ == '__main__':
main()
|
ffancellu/ucca
|
scripts/postag_passages.py
|
Python
|
gpl-3.0
| 780
|
"""Refinement steps for refining the 'crude' fits """
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import numpy as np
from numpy.testing import assert_allclose
from scipy.ndimage.interpolation import map_coordinates
import scipy.ndimage
try:
from skimage.filters import threshold_otsu
except ImportError:
from skimage.filter import threshold_otsu # skimage <= 0.10
from .algebraic import (ellipse_grid, ellipsoid_grid, fit_ellipse,
fit_ellipsoid, max_linregress, max_edge)
from .masks import slice_image, get_mask
def unwrap_ellipse(image, params, rad_range, num_points=None, spline_order=3):
""" Unwraps an circular or ellipse-shaped feature into elliptic coordinates.
Transforms an image in (y, x) space to (theta, r) space, using elliptic
coordinates. The theta coordinate is tangential to the ellipse, the r
coordinate is normal to the ellipse. r=0 at the ellipse: inside the ellipse,
r < 0.
Parameters
----------
image : ndarray, 2d
params : (yr, xr, yc, xc)
rad_range : tuple
A tuple defining the range of r to interpolate.
num_points : number, optional
The number of ``theta`` values. By default, this equals the
ellipse circumference: approx. every pixel there is an interpolation.
spline_order : number, optional
The order of the spline interpolation. Default 3.
Returns
-------
intensity : the interpolated image in (theta, r) space
pos : the (y, x) positions of the ellipse grid
normal : the (y, x) unit vectors normal to the ellipse grid
"""
yr, xr, yc, xc = params
# compute the r coordinates
steps = np.arange(rad_range[0], rad_range[1] + 1, 1)
# compute the (y, x) positions and unit normals of the ellipse
pos, normal = ellipse_grid((yr, xr), (yc, xc), n=num_points, spacing=1)
# calculate all the (y, x) coordinates on which the image interpolated.
# this is a 3D array of shape [n_theta, n_r, 2], with 2 being y and x.
coords = normal[:, :, np.newaxis] * steps[np.newaxis, np.newaxis, :] + \
pos[:, :, np.newaxis]
# interpolate the image on computed coordinates
intensity = map_coordinates(image, coords, order=spline_order,
output=np.float)
return intensity, pos, normal
def to_cartesian(r_dev, pos, normal):
""" Transform radial deviations from an ellipsoidal grid to Cartesian
Parameters
----------
r_dev : ndarray, shape (N, )
Array containing the N radial deviations from the ellipse. r < 0 means
inside the ellipse.
pos : ndarray, shape (2, N)
The N (y, x) positions of the ellipse (as given by ``ellipse_grid``)
normal : ndarray, shape (2, N)
The N (y, x) unit normals of the ellipse (as given by ``ellipse_grid``)
"""
coord_new = pos + r_dev * normal
coord_new = coord_new[:, np.isfinite(coord_new).all(0)]
return coord_new
def refine_ellipse(image, params, mode='ellipse_aligned', n=None,
rad_range=None, maxfit_size=2, spline_order=3,
threshold=0.1):
""" Interpolates the image along lines perpendicular to the ellipse.
The maximum along each line is found using linear regression of the
descrete derivative.
Parameters
----------
image : 2d numpy array of numbers
Image indices are interpreted as (y, x)
params : yr, xr, yc, xc
mode : {'ellipse', 'ellipse_aligned', 'circle'}
n: integer
number of points on the ellipse that are used for refine
rad_range: tuple of floats
length of the line (distance inwards, distance outwards)
maxfit_size: integer
pixels around maximum pixel that will be used in linear regression
spline_order: integer
interpolation order for edge crossections
threshold: float
a threshold is calculated based on the global maximum
fitregions are rejected if their average value is lower than this
Returns
-------
yr, xr, yc, xc
"""
if not np.all([x > 0 for x in params]):
raise ValueError("All yc, xc, yr, xr params should be positive")
assert image.ndim == 2
yr, xr, yc, xc = params
if rad_range is None:
rad_range = (-min(yr, xr) / 2, min(yr, xr) / 2)
# interpolate the image on calculated coordinates
intensity, pos, normal = unwrap_ellipse(image, params, rad_range, n)
# identify the regions around the max value
r_dev = max_linregress(intensity, maxfit_size, threshold) + rad_range[0]
coord_new = to_cartesian(r_dev, pos, normal)
# fit ellipse
radius, center, _ = fit_ellipse(coord_new, mode=mode)
return tuple(radius) + tuple(center), coord_new.T
def refine_ellipsoid_fast(image3d, p, n_xy=None, n_xz=None, rad_range=None,
maxfit_size=2, spline_order=3, threshold=0.1,
radius_rtol=0.5, radius_atol=30., center_atol=30.):
""" Refines coordinates of a 3D ellipsoid, starting from given parameters.
For fast analysis, it only analyzes YX and ZX middle slices.
Parameters
----------
image3d : 3D numpy array
p: tuple of floats
(zr, yr, xr, zc, yr, xr) coordinates of ellipsoid center
n_xy: integer
number of points on the ellipse that are used for refine in xy plane
n_xz: integer
number of points on the ellipse that are used for refine in xz plane
rad_range: tuple of floats
length of the line (distance inwards, distance outwards)
maxfit_size: integer
pixels around maximum pixel that will be used in linear regression
spline_order: integer
interpolation order for edge crossections
threshold: float
a threshold is calculated based on the global maximum
fitregions are rejected if their average value is lower than this
radius_rtol : float, optional
the maximum relative tolerance for the difference between initial
and refined radii, Default 0.5
radius_atol : float, optional
the maximum absolute tolerance for the difference between initial
and refined radii, Default 30.
center_atol : float, optional
the maximum absolute tolerance for the difference between initial
and refined radii, Default 30.
Returns
-------
(zr, yr, xr, zc, yc, xc), contour
"""
assert image3d.ndim == 3
zr0, yr0, xr0, zc0, yc0, xc0 = p
# refine X, Y radius and center on XY middle
middle_slice = image3d[int(zc0)] * (1 - zc0 % 1) + \
image3d[int(zc0) + 1] * (zc0 % 1)
(yr, xr, yc, xc), r = refine_ellipse(middle_slice, (yr0, xr0, yc0, xc0),
'ellipse_aligned', n_xy, rad_range,
maxfit_size, spline_order, threshold)
# refine Z radius and center on ZX middle (not ZY, is blurred by resonant)
middle_slice = image3d[:, int(yc0)] * (1 - yc0 % 1) + \
image3d[:, int(yc0) + 1] * (yc0 % 1)
(zr, _, zc, _), _ = refine_ellipse(middle_slice, (zr0, xr0, zc0, xc0),
'ellipse_aligned', n_xz, rad_range,
maxfit_size, spline_order, threshold)
assert_allclose([xr, yr, zr],
[xr0, yr0, zr0], radius_rtol, radius_atol,
err_msg='Refined value differs extremely from initial value.')
assert_allclose([xc, yc, zc],
[xc0, yc0, zc0], rtol=0, atol=center_atol,
err_msg='Refined value differs extremely from initial value.')
return (zr, yr, xr, zc, yc, xc), r
def refine_ellipsoid(image3d, params, spacing=1, rad_range=None, maxfit_size=2,
spline_order=3, threshold=0.1):
""" Refines coordinates of a 3D ellipsoid, starting from given parameters.
Interpolates the image along lines perpendicular to the ellipsoid.
The maximum along each line is found using linear regression of the
descrete derivative.
Parameters
----------
image3d : 3d numpy array of numbers
Image indices are interpreted as (z, y, x)
params : tuple
zr, yr, xr, zc, yc, xc
spacing: number
spacing along radial direction
rad_range: tuple of floats
length of the line (distance inwards, distance outwards)
maxfit_size: integer
pixels around maximum pixel that will be used in linear regression
spline_order: integer
interpolation order for edge crossections
threshold: float
a threshold is calculated based on the global maximum
fitregions are rejected if their average value is lower than this
Returns
-------
- zr, yr, xr, zc, yc, xc, skew_y, skew_x
- contour coordinates at z = 0
"""
if not np.all([x > 0 for x in params]):
raise ValueError("All zc, yc, xc, zr, yr, xr params should be positive")
assert image3d.ndim == 3
zr, yr, xr, zc, yc, xc = params
if rad_range is None:
rad_range = (-min(zr, yr, xr) / 2, min(zr, yr, xr) / 2)
steps = np.arange(rad_range[0], rad_range[1] + 1, 1)
pos, normal = ellipsoid_grid((zr, yr, xr), (zc, yc, xc), spacing=spacing)
coords = normal[:, :, np.newaxis] * steps[np.newaxis, np.newaxis, :] + \
pos[:, :, np.newaxis]
# interpolate the image on calculated coordinates
intensity = map_coordinates(image3d, coords, order=spline_order)
# identify the regions around the max value
r_dev = max_linregress(intensity, maxfit_size, threshold)
# calculate new coords
coord_new = pos + (r_dev + rad_range[0])*normal
coord_new = coord_new[:, np.isfinite(coord_new).all(0)]
# fit ellipsoid
radius, center, skew = fit_ellipsoid(coord_new, mode='xy',
return_mode='skew')
return tuple(radius) + tuple(center) + tuple(skew), coord_new.T
def refine_disks(image, blobs, rad_range=None, threshold=0.5, max_dev=1,
min_points=10, min_contrast=0):
""" Refine the position and size of multiple bright disks in an image """
result = blobs.copy()
if 'accum' in result:
del result['accum']
result['mass'] = np.nan
result['signal'] = np.nan
for i in result.index:
fit, _ = _refine_disks(image, blobs.loc[i], rad_range, threshold,
max_dev, min_points, min_contrast)
if fit is None:
result.loc[i, ['r', 'y', 'x']] = np.nan
result.loc[i, ['r', 'y', 'x']] = np.nan
continue
r, _, yc, xc = fit
result.loc[i, ['r', 'y', 'x']] = r, yc, xc
coords = np.array([(yc, xc)])
square, origin = slice_image(coords, image, r+1)
if origin is None: # outside of image
continue
mask = get_mask(coords - origin, square.shape, r)
result.loc[i, 'mass'] = square[mask].sum()
result.loc[i, 'signal'] = result.loc[i, 'mass'] / mask.sum()
return result
def _refine_disks(image, params, rad_range=None, threshold=0.5, max_dev=1,
min_points=10., min_contrast=0):
if rad_range is None:
rad_range = (-params.r / 2, params.r / 2)
# Get intensity in spline representation
coords = (params.r, params.r, params.y, params.x)
intensity, pos, normal = unwrap_ellipse(image, coords, rad_range)
# Check whether the intensity interpolation is bright on left, dark on right
if np.mean(intensity[:, 0]) - np.mean(intensity[:, -1]) < min_contrast:
return None, None
# Find the coordinates of the edge
r_dev = max_edge(intensity, threshold) + rad_range[0]
if np.sum(~np.isnan(r_dev)) < min_points:
return None, None
# Set outliers to mean of rest of x coords
# r_dev = remove_outliers(r_dev)
# Convert to cartesian
coord_new = to_cartesian(r_dev, pos, normal)
# Fit the circle
try:
(radius, _), (yc, xc), _ = fit_ellipse(coord_new, mode='xy')
except np.linalg.LinAlgError:
return None, None
if np.any(np.isnan([radius, yc, xc])):
return None, None
if not rad_range[0] < radius - params.r < rad_range[1]:
return None, None
# calculate deviations from circle
y, x = coord_new
deviations2 = (np.sqrt((xc - x)**2 + (yc - y)**2) - radius)**2
mask = deviations2 < max_dev**2
if np.sum(mask) < min_points:
return None, None
if np.any(~mask):
try:
(radius, _), (yc, xc), _ = fit_ellipse(coord_new[:, mask],
mode='xy')
except np.linalg.LinAlgError:
return None, None
if np.any(np.isnan([radius, yc, xc])):
return None, None
return (radius, radius, yc, xc), coord_new.T
|
caspervdw/circletracking
|
circletracking/refine.py
|
Python
|
bsd-3-clause
| 12,910
|
"""
Fixture to configure XQueue response.
"""
import json
import requests
from common.test.acceptance.fixtures import XQUEUE_STUB_URL
class XQueueResponseFixtureError(Exception):
"""
Error occurred while configuring the stub XQueue.
"""
pass
class XQueueResponseFixture(object):
"""
Configure the XQueue stub's response to submissions.
"""
def __init__(self, pattern, response_dict):
"""
Configure XQueue stub to POST `response_dict` (a dictionary)
back to the LMS when it receives a submission that contains the string
`pattern`.
Remember that there is one XQueue stub shared by all the tests;
if possible, you should have tests use unique queue names
to avoid conflict between tests running in parallel.
"""
self._pattern = pattern
self._response_dict = response_dict
def install(self):
"""
Configure the stub via HTTP.
"""
url = XQUEUE_STUB_URL + "/set_config"
# Configure the stub to respond to submissions to our queue
payload = {self._pattern: json.dumps(self._response_dict)}
response = requests.put(url, data=payload)
if not response.ok:
raise XQueueResponseFixtureError(
u"Could not configure XQueue stub for queue '{1}'. Status code: {2}".format(
self._pattern, self._response_dict))
|
edx-solutions/edx-platform
|
common/test/acceptance/fixtures/xqueue.py
|
Python
|
agpl-3.0
| 1,436
|
#!/usr/bin/env python3
"""
微信群回复方法
:author Wang Weiwei <email>weiwei02@vip.qq.com / weiwei.wang@100credit.com</email>
:sine 2017/8/14
:version 1.0
"""
from .MessageEvent import *
# 需要设置回复的群
REPLY_GROUPS_NAMES = []
IS_AT = False
@itchat.msg_register([TEXT, MAP, CARD, NOTE, SHARING, PICTURE, RECORDING, ATTACHMENT, VIDEO], isGroupChat=True)
def text_reply(msg):
queMsg = {"ToUserName": msg['ActualNickName'], TEXT: "", "FromUserName": msg["FromUserName"]}
if len(REPLY_GROUPS_NAMES) != 0 and msg['ToUserName'] not in REPLY_GROUPS_NAMES:
return
if IS_AT and not msg['isAt']:
return
if msg['Type'] in [TEXT, MAP, CARD, NOTE, SHARING]:
queMsg[TEXT] = msg['Content']
queMsg["Type"] = TEXT
else:
# 执行消息下载回调函数
msg[TEXT](msg['FileName'])
queMsg[TEXT] = '@%s@%s' % ({'Picture': 'img', 'Video': 'vid'}.get(msg['Type'], 'fil'), msg['FileName'])
msgQueue.put(queMsg)
|
weiwei02/Technical--Documentation
|
python/src/weichat/xiaobing/Groups.py
|
Python
|
apache-2.0
| 1,008
|
# -*- coding: utf-8 -*-
"""
Class for reading data from from Tucker Davis TTank format.
Terminology:
TDT hold data with tanks (actually a directory). And tanks hold sub block
(sub directories).
Tanks correspond to neo.Block and tdt block correspond to neo.Segment.
Note the name Block is ambiguous because it does not refer to same thing in TDT
terminology and neo.
Depend on:
Supported : Read
Author: sgarcia
"""
import os
import struct
import sys
import numpy as np
import quantities as pq
import itertools
from neo.io.baseio import BaseIO
from neo.core import Block, Segment, AnalogSignal, SpikeTrain, Event
from neo.io.tools import iteritems
PY3K = (sys.version_info[0] == 3)
def get_chunks(sizes, offsets, big_array):
# offsets are octect count
# sizes are not!!
# so need this (I really do not knwo why...):
sizes = (sizes -10) * 4 #
all = np.concatenate([ big_array[o:o+s] for s, o in itertools.izip(sizes, offsets) ])
return all
class TdtIO(BaseIO):
"""
Class for reading data from from Tucker Davis TTank format.
Usage:
>>> from neo import io
>>> r = io.TdtIO(dirname='aep_05')
>>> bl = r.read_block(lazy=False, cascade=True)
>>> print bl.segments
[<neo.core.segment.Segment object at 0x1060a4d10>]
>>> print bl.segments[0].analogsignals
[<AnalogSignal(array([ 2.18811035, 2.19726562, 2.21252441, ...,
1.33056641, 1.3458252 , 1.3671875 ], dtype=float32) * pA,
[0.0 s, 191.2832 s], sampling rate: 10000.0 Hz)>]
>>> print bl.segments[0].events
[]
"""
is_readable = True
is_writable = False
supported_objects = [Block, Segment , AnalogSignal, Event]
readable_objects = [Block, Segment]
writeable_objects = []
has_header = False
is_streameable = False
read_params = {
Block : [],
Segment : []
}
write_params = None
name = 'TDT'
extensions = [ ]
mode = 'dir'
def __init__(self , dirname=None) :
"""
**Arguments**
Arguments:
dirname: path of the TDT tank (a directory)
"""
BaseIO.__init__(self)
self.dirname = dirname
if self.dirname.endswith('/'):
self.dirname = self.dirname[:-1]
def read_segment(self, blockname=None, lazy=False, cascade=True, sortname=''):
"""
Read a single segment from the tank. Note that TDT blocks are Neo
segments, and TDT tanks are Neo blocks, so here the 'blockname' argument
refers to the TDT block's name, which will be the Neo segment name.
sortname is used to specify the external sortcode generated by offline spike sorting, if sortname=='PLX',
there should be a ./sort/PLX/*.SortResult file in the tdt block, which stores the sortcode for every spike,
default to '', which uses the original online sort
"""
if not blockname:
blockname = os.listdir(self.dirname)[0]
if blockname == 'TempBlk': return None
if not self.is_tdtblock(blockname): return None # if not a tdt block
subdir = os.path.join(self.dirname, blockname)
if not os.path.isdir(subdir): return None
seg = Segment(name=blockname)
tankname = os.path.basename(self.dirname)
#TSQ is the global index
tsq_filename = os.path.join(subdir, tankname+'_'+blockname+'.tsq')
dt = [('size','int32'),
('evtype','int32'),
('code','S4'),
('channel','uint16'),
('sortcode','uint16'),
('timestamp','float64'),
('eventoffset','int64'),
('dataformat','int32'),
('frequency','float32'),
]
tsq = np.fromfile(tsq_filename, dtype=dt)
#0x8801: 'EVTYPE_MARK' give the global_start
global_t_start = tsq[tsq['evtype']==0x8801]['timestamp'][0]
#TEV is the old data file
try:
tev_filename = os.path.join(subdir, tankname+'_'+blockname+'.tev')
#tev_array = np.memmap(tev_filename, mode = 'r', dtype = 'uint8') # if memory problem use this instead
tev_array = np.fromfile(tev_filename, dtype='uint8')
except IOError:
tev_filename = None
#if exists an external sortcode in ./sort/[sortname]/*.SortResult (generated after offline sortting)
sortresult_filename = None
if sortname is not '':
try:
for file in os.listdir(os.path.join(subdir, 'sort', sortname)):
if file.endswith(".SortResult"):
sortresult_filename = os.path.join(subdir, 'sort', sortname, file)
# get new sortcode
newsorcode = np.fromfile(sortresult_filename,'int8')[1024:] # the first 1024 byte is file header
# update the sort code with the info from this file
tsq['sortcode'][1:-1]=newsorcode
# print('sortcode updated')
break
except OSError:
sortresult_filename = None
except IOError:
sortresult_filename = None
for type_code, type_label in tdt_event_type:
mask1 = tsq['evtype']==type_code
codes = np.unique(tsq[mask1]['code'])
for code in codes:
mask2 = mask1 & (tsq['code']==code)
channels = np.unique(tsq[mask2]['channel'])
for channel in channels:
mask3 = mask2 & (tsq['channel']==channel)
if type_label in ['EVTYPE_STRON', 'EVTYPE_STROFF']:
if lazy:
times = [ ]*pq.s
labels = np.array([ ], dtype=str)
else:
times = (tsq[mask3]['timestamp'] - global_t_start) * pq.s
labels = tsq[mask3]['eventoffset'].view('float64').astype('S')
ea = Event(times=times,
name=code,
channel_index=int(channel),
labels=labels)
if lazy:
ea.lazy_shape = np.sum(mask3)
seg.events.append(ea)
elif type_label == 'EVTYPE_SNIP':
sortcodes = np.unique(tsq[mask3]['sortcode'])
for sortcode in sortcodes:
mask4 = mask3 & (tsq['sortcode']==sortcode)
nb_spike = np.sum(mask4)
sr = tsq[mask4]['frequency'][0]
waveformsize = tsq[mask4]['size'][0]-10
if lazy:
times = [ ]*pq.s
waveforms = None
else:
times = (tsq[mask4]['timestamp'] - global_t_start) * pq.s
dt = np.dtype(data_formats[ tsq[mask3]['dataformat'][0]])
waveforms = get_chunks(tsq[mask4]['size'],tsq[mask4]['eventoffset'], tev_array).view(dt)
waveforms = waveforms.reshape(nb_spike, -1, waveformsize)
waveforms = waveforms * pq.mV
if nb_spike > 0:
# t_start = (tsq['timestamp'][0] - global_t_start) * pq.s # this hould work but not
t_start = 0 *pq.s
t_stop = (tsq['timestamp'][-1] - global_t_start) * pq.s
else:
t_start = 0 *pq.s
t_stop = 0 *pq.s
st = SpikeTrain(times = times,
name = 'Chan{0} Code{1}'.format(channel,sortcode),
t_start = t_start,
t_stop = t_stop,
waveforms = waveforms,
left_sweep = waveformsize/2./sr * pq.s,
sampling_rate = sr * pq.Hz,
)
st.annotate(channel_index=channel)
if lazy:
st.lazy_shape = nb_spike
seg.spiketrains.append(st)
elif type_label == 'EVTYPE_STREAM':
dt = np.dtype(data_formats[ tsq[mask3]['dataformat'][0]])
shape = np.sum(tsq[mask3]['size']-10)
sr = tsq[mask3]['frequency'][0]
if lazy:
signal = [ ]
else:
if PY3K:
signame = code.decode('ascii')
else:
signame = code
sev_filename = os.path.join(subdir, tankname+'_'+blockname+'_'+signame+'_ch'+str(channel)+'.sev')
try:
#sig_array = np.memmap(sev_filename, mode = 'r', dtype = 'uint8') # if memory problem use this instead
sig_array = np.fromfile(sev_filename, dtype='uint8')
except IOError:
sig_array = tev_array
signal = get_chunks(tsq[mask3]['size'],tsq[mask3]['eventoffset'], sig_array).view(dt)
anasig = AnalogSignal(signal = signal* pq.V,
name = '{0} {1}'.format(code, channel),
sampling_rate = sr * pq.Hz,
t_start = (tsq[mask3]['timestamp'][0] - global_t_start) * pq.s,
channel_index = int(channel)
)
if lazy:
anasig.lazy_shape = shape
seg.analogsignals.append(anasig)
return seg
def read_block(self, lazy=False, cascade=True, sortname=''):
bl = Block()
tankname = os.path.basename(self.dirname)
bl.file_origin = tankname
if not cascade : return bl
for blockname in os.listdir(self.dirname):
if self.is_tdtblock(blockname): # if the folder is a tdt block
seg = self.read_segment(blockname, lazy, cascade, sortname)
bl.segments.append(seg)
bl.create_many_to_one_relationship()
return bl
# to determine if this folder is a TDT block, based on the extension of the files inside it
# to deal with unexpected files in the tank, e.g. .DS_Store on Mac machines
def is_tdtblock(self, blockname):
file_ext = list()
blockpath = os.path.join(self.dirname, blockname) # get block path
if os.path.isdir(blockpath):
for file in os.listdir( blockpath ): # for every file, get extension, convert to lowercase and append
file_ext.append( os.path.splitext( file )[1].lower() )
file_ext = set(file_ext)
tdt_ext = set(['.tbk', '.tdx', '.tev', '.tsq'])
if file_ext >= tdt_ext: # if containing all the necessary files
return True
else:
return False
tdt_event_type = [
#(0x0,'EVTYPE_UNKNOWN'),
(0x101, 'EVTYPE_STRON'),
(0x102,'EVTYPE_STROFF'),
#(0x201,'EVTYPE_SCALER'),
(0x8101, 'EVTYPE_STREAM'),
(0x8201, 'EVTYPE_SNIP'),
#(0x8801, 'EVTYPE_MARK'),
]
data_formats = {
0 : np.float32,
1 : np.int32,
2 : np.int16,
3 : np.int8,
4 : np.float64,
}
|
SummitKwan/python-neo
|
neo/io/tdtio.py
|
Python
|
bsd-3-clause
| 12,325
|
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from murano.dsl import murano_method
from murano.dsl import results_serializer
from murano.tests.unit import base
class TestActionsSerializer(base.MuranoTestCase):
def setUp(self):
super(TestActionsSerializer, self).setUp()
def test_old_actions_deletion(self):
old = {
'action1': {'name': 'name1', 'enabled': True},
'action2': {'name': 'name2', 'enabled': True},
'action3': {'name': 'name3', 'enabled': True},
}
new = {
'action2': {'name': 'name2', 'enabled': False},
'action3': {'name': 'name3', 'enabled': True},
}
result = results_serializer._merge_actions(old, new)
self.assertEqual(2, len(result))
self.assertNotIn('action1', result)
def test_actions_state_update(self):
old = {
'action1': {'name': 'name1', 'enabled': True},
'action2': {'name': 'name2', 'enabled': True},
}
new = {
'action1': {'name': 'name2', 'enabled': False},
'action2': {'name': 'name3', 'enabled': True},
}
result = results_serializer._merge_actions(old, new)
self.assertFalse(result['action1']['enabled'])
def _get_mocked_obj(self):
method1 = mock.Mock()
method1.usage = murano_method.MethodUsages.Action
method2 = mock.Mock()
method2.usage = murano_method.MethodUsages.Runtime
method3 = mock.Mock()
method3.usage = murano_method.MethodUsages.Action
obj2_type = mock.Mock()
obj2_type.parents = []
obj2_type.methods = {'method3': method3}
obj = mock.Mock()
obj.object_id = 'id1'
obj.type.parents = [obj2_type]
obj.type.methods = {'method1': method1, 'method2': method2}
return obj
def test_object_actions_serialization(self):
obj = self._get_mocked_obj()
obj_actions = results_serializer._serialize_available_action(obj)
expected_result = {'name': 'method1', 'enabled': True}
self.assertIn('id1_method1', obj_actions)
self.assertEqual(expected_result, obj_actions['id1_method1'])
def test_that_only_actions_are_serialized(self):
obj = self._get_mocked_obj()
obj_actions = results_serializer._serialize_available_action(obj)
self.assertNotIn('id1_method2', obj_actions)
def test_parent_actions_are_serialized(self):
obj = self._get_mocked_obj()
obj_actions = results_serializer._serialize_available_action(obj)
expected_result = {'name': 'method3', 'enabled': True}
self.assertIn('id1_method3', obj_actions)
self.assertEqual(expected_result, obj_actions['id1_method3'])
|
sergmelikyan/murano
|
murano/tests/unit/test_actions.py
|
Python
|
apache-2.0
| 3,357
|
# Copyright (c) 1999-2008 Mark D. Hill and David A. Wood
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from slicc.ast.DeclAST import DeclAST
from slicc.symbols import StateMachine, Type
class MachineAST(DeclAST):
def __init__(self, slicc, ident, pairs_ast, config_parameters, decls):
super(MachineAST, self).__init__(slicc, pairs_ast)
self.ident = ident
self.pairs_ast = pairs_ast
self.config_parameters = config_parameters
self.decls = decls
def __repr__(self):
return "[Machine: %r]" % self.ident
def files(self, parent=None):
s = set(('%s_Controller.cc' % self.ident,
'%s_Controller.hh' % self.ident,
'%s_Controller.py' % self.ident,
'%s_Profiler.cc' % self.ident,
'%s_Profiler.hh' % self.ident,
'%s_Transitions.cc' % self.ident,
'%s_Wakeup.cc' % self.ident))
s |= self.decls.files(self.ident)
return s
def generate(self):
# Make a new frame
self.symtab.pushFrame()
# Create a new machine
machine = StateMachine(self.symtab, self.ident, self.location,
self.pairs, self.config_parameters)
self.symtab.newCurrentMachine(machine)
# Generate code for all the internal decls
self.decls.generate()
# Build the transition table
machine.buildTable()
# Pop the frame
self.symtab.popFrame()
def findMachines(self):
# Add to MachineType enumeration
machine_type = self.symtab.find("MachineType", Type)
if not machine_type.enumAdd(self.ident, self.pairs_ast.pairs):
self.error("Duplicate machine name: %s:%s" % (machine_type,
self.ident))
# Generate code for all the internal decls
self.decls.findMachines()
|
liangwang/m5
|
src/mem/slicc/ast/MachineAST.py
|
Python
|
bsd-3-clause
| 3,435
|
from django.db import models
from django.db.models import Q
from django.db.models.loading import get_model
from cyder.base.eav.constants import ATTRIBUTE_INVENTORY
from cyder.base.eav.fields import EAVAttributeField
from cyder.base.eav.models import Attribute, EAVBase
from cyder.base.mixins import ObjectUrlMixin
from cyder.base.models import BaseModel
from cyder.base.utils import transaction_atomic
from cyder.core.system.validators import validate_no_spaces
class System(BaseModel, ObjectUrlMixin):
name = models.CharField(
max_length=255, unique=False, null=False, blank=False,
validators=[validate_no_spaces])
ctnr = models.ForeignKey("cyder.Ctnr", null=False,
verbose_name="Container")
search_fields = ('name',)
sort_fields = ('name',)
def __unicode__(self):
return self.name
class Meta:
app_label = 'cyder'
db_table = 'system'
@staticmethod
def filter_by_ctnr(ctnr, objects=None):
objects = objects if objects is not None else System.objects
return objects.filter(ctnr=ctnr)
def check_in_ctnr(self, ctnr):
return self.ctnr == ctnr
def details(self):
"""For tables."""
data = super(System, self).details()
data['data'] = [
('Name', 'name', self),
('Container', 'container', self.ctnr),
]
return data
@transaction_atomic
def delete(self, *args, **kwargs):
DynamicInterface = get_model('cyder', 'dynamicinterface')
for interface in DynamicInterface.objects.filter(system=self):
interface.delete(delete_system=False, commit=False)
StaticInterface = get_model('cyder', 'staticinterface')
for interface in StaticInterface.objects.filter(system=self):
interface.delete(delete_system=False, commit=False)
super(System, self).delete(*args, **kwargs)
@staticmethod
def eg_metadata():
"""EditableGrid metadata."""
return {'metadata': [
{'name': 'name', 'datatype': 'string', 'editable': True},
]}
@transaction_atomic
def save(self, *args, **kwargs):
self.full_clean()
super(System, self).save(*args, **kwargs)
def clean(self, *args, **kwargs):
for i in (list(self.staticinterface_set.all()) +
list(self.dynamicinterface_set.all())):
if self.ctnr != i.ctnr:
i.ctnr = self.ctnr
i.save()
super(System, self).clean(*args, **kwargs)
class SystemAV(EAVBase):
class Meta(EAVBase.Meta):
app_label = 'cyder'
db_table = 'system_av'
entity = models.ForeignKey(System)
attribute = EAVAttributeField(Attribute,
type_choices=(ATTRIBUTE_INVENTORY,))
|
murrown/cyder
|
cyder/core/system/models.py
|
Python
|
bsd-3-clause
| 2,803
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Country'
db.create_table('locations_country', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('locations', ['Country'])
# Adding model 'Region'
db.create_table('locations_region', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('country', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Country'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('locations', ['Region'])
# Adding model 'City'
db.create_table('locations_city', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('region', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Region'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('locations', ['City'])
def backwards(self, orm):
# Deleting model 'Country'
db.delete_table('locations_country')
# Deleting model 'Region'
db.delete_table('locations_region')
# Deleting model 'City'
db.delete_table('locations_city')
models = {
'locations.city': {
'Meta': {'object_name': 'City'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['locations.Region']"})
},
'locations.country': {
'Meta': {'object_name': 'Country'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'locations.region': {
'Meta': {'object_name': 'Region'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['locations.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['locations']
|
MauHernandez/cyclope
|
cyclope/apps/locations/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 2,676
|
from django.conf.urls.defaults import *
from lingcod.spacing.views import *
urlpatterns = patterns('',
# Example:
(r'^$', Index),
url(r'^land/kml/', LandKML, name='land_kml'),
url(r'^fish_distance/kml', FishDistanceKML, name='fish_distance_kml'),
url(r'^spacing_points/kml/', SpacingPointKML, name='spacing_point_kml'),
url(r'^spacing_network/kml/', SpacingNetworkKML, name='spacing_network_kml'),
)
|
Alwnikrotikz/marinemap
|
lingcod/spacing/urls.py
|
Python
|
bsd-3-clause
| 424
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017-2020 The Project X-Ray Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
import sys
from prjxray.segmaker import Segmaker
segmk = Segmaker("design_%s.bits" % sys.argv[1])
print("Loading tags from design_%s.txt." % sys.argv[1])
with open("design_%s.txt" % sys.argv[1], "r") as f:
for line in f:
line = line.split()
site = line[0]
bel = line[1]
init = int(line[2][4:], 16)
for i in range(64):
bitname = "%s.INIT[%02d]" % (bel, i)
bitname = bitname.replace("6LUT", "LUT")
segmk.add_site_tag(site, bitname, ((init >> i) & 1) != 0)
segmk.compile()
segmk.write(sys.argv[1])
|
SymbiFlow/prjxray
|
fuzzers/010-clb-lutinit/generate.py
|
Python
|
isc
| 864
|
#!/usr/bin/env python
"""
Unit tests for DVE authorizations
"""
import sys
import unittest
import dx_authorization
from lib.GetSession import GetSession
class DxAuthorizationTests(unittest.TestCase):
"""
Creates, activates, lists destroys Delphix Authorizations
Requirements: VDB named dx_vdb, group named Untitled, and user named jsuser.
Change target_vdb, group and user to reflect values in your environment.
"""
@classmethod
def setUpClass(cls):
super(DxAuthorizationTests, cls).setUpClass()
cls.server_obj = GetSession()
cls.server_obj.serversess(
"172.16.169.146", "delphix_admin", "delphix", "DOMAIN"
)
cls.server_obj.dlpx_engines["engine_name"] = "test_engine"
cls.user = "jsuser"
cls.target_vdb = "dx_vdb"
cls.group = "Untitled"
cls.target_type_db = "database"
cls.target_type_group = "group"
cls.role_data = "Data"
cls.role_read = "Read"
cls.role_owner = "OWNER"
def test_create_authorization_group(self):
dx_authorization.create_authorization(
self.server_obj,
self.role_data,
self.target_type_group,
self.group,
self.user,
)
self.assertIn("created for {}".format(self.user), sys.stdout.getvalue().strip())
def test_create_authorization_database(self):
dx_authorization.create_authorization(
self.server_obj,
self.role_data,
self.target_type_db,
self.target_vdb,
self.user,
)
self.assertIn("created for {}".format(self.user), sys.stdout.getvalue().strip())
def test_lists_dx_authorizations(self):
dx_authorization.list_authorization(self.server_obj)
self.assertIn("sysadmin", sys.stdout.getvalue().strip())
@classmethod
def tearDownClass(cls):
super(DxAuthorizationTests, cls).tearDownClass()
cls.server_obj = GetSession()
cls.server_obj.serversess(
"172.16.169.146", "delphix_admin", "delphix", "DOMAIN"
)
cls.user = "jsuser"
cls.target_vdb = "dx_vdb"
cls.group = "Untitled"
cls.target_type_db = "database"
cls.target_type_group = "group"
cls.role_data = "Data"
cls.role_read = "Read"
cls.role_owner = "OWNER"
dx_authorization.delete_authorization(
cls.server_obj, cls.role_data, cls.target_type_db, cls.target_vdb, cls.user
)
dx_authorization.delete_authorization(
cls.server_obj, cls.role_data, cls.target_type_group, cls.group, cls.user
)
# Run the test case
if __name__ == "__main__":
unittest.main(buffer=True)
|
CloudSurgeon/delphixpy-examples
|
tests/test_dx_authorization.py
|
Python
|
apache-2.0
| 2,758
|
from Screens.MessageBox import MessageBox
from Plugins.Plugin import PluginDescriptor
from Screens.Screen import Screen
from Components.ActionMap import ActionMap
from Components.Label import Label
from Components.Button import Button
from Tools.LoadPixmap import LoadPixmap
import xml.etree.cElementTree
from twisted.internet import reactor, defer
from twisted.web import client
import urllib
from Components.Pixmap import Pixmap
from enigma import ePicLoad
import string
import os
import time
from enigma import getDesktop
from Components.ConfigList import ConfigList, ConfigListScreen
from Components.AVSwitch import AVSwitch
from Components.ConfigList import ConfigList, ConfigListScreen
from Components.config import ConfigSubsection, ConfigSubList, ConfigText, ConfigInteger, config
from setup import initConfigfore, WeatherPluginEntriesListConfigScreenfore
from string import upper
from Screens.ChoiceBox import ChoiceBox
from enigma import eListboxPythonMultiContent, gFont, RT_HALIGN_LEFT, RT_HALIGN_RIGHT, RT_WRAP, RT_VALIGN_CENTER
from Components.MenuList import MenuList
from Components.MultiContent import MultiContentEntryText
from os import environ
from Tools.Directories import resolveFilename, SCOPE_PLUGINS, SCOPE_LANGUAGE, fileExists
config.plugins.AccuWeatherPlugin = ConfigSubsection()
config.plugins.AccuWeatherPlugin.entriescount = ConfigInteger(0)
config.plugins.AccuWeatherPlugin.default = ConfigInteger(1)
config.plugins.AccuWeatherPlugin.Entries = ConfigSubList()
config.plugins.AccuWeatherPlugin.acuentriescount = ConfigInteger(0)
config.plugins.AccuWeatherPlugin.acuEntries = ConfigSubList()
config.plugins.AccuWeatherPlugin.foreentriescount = ConfigInteger(0)
config.plugins.AccuWeatherPlugin.foreEntries = ConfigSubList()
initConfigfore()
UserAgent = 'Mozilla/5.0 (X11; U; Linux x86_64; de; rv:1.9.0.15) Gecko/2009102815 Ubuntu/9.04 (jaunty) Firefox/3.'
from enigma import eSize, ePoint
from Tools.Directories import fileExists, resolveFilename, SCOPE_SKIN
from Components.config import config
def weatherScrollBar(objectoself, nombrelista = 'lista', barra = 'barrascroll', altoitem = 25, imagen = None):
nombrebarraArr = barra + '_arr'
nombrebarraAbj = barra + '_abj'
numele = 999
if nombrelista == barra:
nombrelista = nombrebarraArr
elif not barra == 'servicelist':
try:
numele = len(objectoself[nombrelista].list)
except:
pass
try:
alto = objectoself[nombrelista].instance.size().height()
elepag = int(alto / altoitem)
if numele > elepag:
pass
else:
objectoself[nombrebarraArr].hide()
objectoself[nombrebarraAbj].hide()
return
ancho = objectoself[nombrelista].instance.size().width()
if ancho > 20:
if imagen:
nomskin = str(config.skin.primary_skin.value).split('/')[0]
rutaSkin = resolveFilename(SCOPE_SKIN) + nomskin + '/'
if fileExists(rutaSkin + 'scroll.png'):
laimagen = rutaSkin + 'scroll.png'
objectoself[nombrebarraArr].instance.setPixmapFromFile(laimagen)
else:
return
if fileExists(rutaSkin + 'scrollb.png'):
laimagen = laimagen = rutaSkin + '/scrollb.png'
objectoself[nombrebarraAbj].instance.setPixmapFromFile(laimagen)
else:
return
posx = objectoself[nombrelista].instance.position().x()
posy = objectoself[nombrelista].instance.position().y()
wsize = (20, alto - 30)
asizex = objectoself[nombrebarraArr].instance.size().width()
asizey = objectoself[nombrebarraArr].instance.size().height()
if not asizex == 20 or not asizey == alto - 30:
objectoself[nombrebarraArr].instance.resize(eSize(*wsize))
wsize = (20, 30)
asizex = objectoself[nombrebarraAbj].instance.size().width()
asizey = objectoself[nombrebarraAbj].instance.size().height()
if not asizex == 20 or not asizey == 30:
objectoself[nombrebarraAbj].instance.resize(eSize(*wsize))
ax = objectoself[nombrebarraArr].instance.position().x()
ay = objectoself[nombrebarraArr].instance.position().y()
if not ax == posx + ancho - 20 or not ay == posy:
objectoself[nombrebarraArr].instance.move(ePoint(posx + ancho - 20, posy))
ax = objectoself[nombrebarraAbj].instance.position().x()
ay = objectoself[nombrebarraAbj].instance.position().y()
if not ax == posx + ancho - 20 or not ay == posy + alto - 30:
objectoself[nombrebarraAbj].instance.move(ePoint(posx + ancho - 20, posy + alto - 30))
objectoself[nombrebarraArr].show()
objectoself[nombrebarraAbj].show()
except:
pass
def devimagentemperatura(temperatura):
if temperatura <= 3:
laimagen = 't0-fs8.png'
elif temperatura <= 10:
laimagen = 't1-fs8.png'
elif temperatura <= 19:
laimagen = 't2-fs8.png'
elif temperatura <= 28:
laimagen = 't3-fs8.png'
elif temperatura <= 36:
laimagen = 't4-fs8.png'
else:
laimagen = 't5-fs8.png'
return laimagen
def equivnoche(imagen):
listaequi = {'21': '43',
'14': '39',
'17': '41',
'13': '40',
'5': '37',
'3': '35',
'4': '36',
'6': '38',
'1': '33',
'2': '34',
'16': '42',
'20': '44',
'23': '44'}
if imagen not in listaequi:
return imagen
return listaequi[imagen]
def devgificono(imagen, actual = False):
listaimagenes = {'chance_of_rain.gif': '14',
'chance_of_snow.gif': '21',
'chance_of_storm.gif': '17',
'cloudy.gif': '7',
'dusty.gif': '11',
'flurries.gif': '13',
'fog.gif': '5',
'foggy.gif': '5',
'hazy.gif': '5',
'icy.gif': '31',
'mist.gif': '18',
'mostly_cloudy.gif': '6',
'mostly_sunny.gif': '4',
'partly_cloudy.gif': '3',
'rain.gif': '12',
'rain_snow.gif': '26',
'showers.gif': '14',
'sleet.gif': '29',
'smoke.gif': '8',
'snow.gif': '22',
'storm.gif': '15',
'sunny.gif': '1',
'thunderstorm.gif': '99'}
if imagen not in listaimagenes:
return '14_int-fs8.png'
valor = listaimagenes[imagen]
if actual:
t2 = time.localtime()
chora = int(time.strftime('%H', t2))
valornoche = equivnoche(valor)
if valornoche and (chora >= 21 or chora < 7):
valor = valornoche
return valor + '_int-fs8.png'
def devImagen(cadena, etiqueta):
tempcad = devStrTm(cadena, etiqueta, '')
devcad = devStrTm(tempcad, 'src="', '"')
return devcad
def devHtml(cadena, etiqueta):
tempcad = devStrTm(cadena, etiqueta, '')
devcad = devStrTm(tempcad, '>', '<')
return devcad.replace('°', '\xc2\xba')
def devStrTm(cadena, inicio, fin):
try:
if inicio not in cadena:
return ''
str = cadena.split(inicio)[1]
if not fin == '':
str = str.split(fin)[0]
return str
except:
return ''
class WeatherIconItem():
def __init__(self, url = '', filename = '', descarga = None, index = -1, error = False):
self.url = url
self.filename = filename
self.index = index
self.error = error
self.cancel = False
self.descarga = descarga
def getXML(url):
return client.getPage(url, agent=UserAgent)
def download(item):
return client.downloadPage(item.url, file(item.filename, 'wb'), agent=UserAgent)
def main(session, **kwargs):
config.plugins.AccuWeatherPlugin.default.value = 1
config.plugins.AccuWeatherPlugin.default.save()
config.plugins.AccuWeatherPlugin.save()
session.open(ForecaWeatherPlugin)
def Plugins(**kwargs):
list = [PluginDescriptor(name=_('Weather Info'), description=_('Weather info Plugin'), where=[PluginDescriptor.WHERE_PLUGINMENU], fnc=main)]
return list
class IniciaSelDetalle(MenuList):
def __init__(self, list, enableWrapAround = False):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.l.setItemHeight(66)
self.l.setFont(0, gFont('Regular', 28))
self.l.setFont(1, gFont('Regular', 21))
self.l.setFont(2, gFont('Regular', 25))
self.l.setFont(3, gFont('Regular', 18))
self.l.setFont(4, gFont('Regular', 19))
def IniciaSelDetalleEntry(texto, hora = None, iconotiempo = None, temperatura = None, iconoviento = None, viento = None, descripcion = None):
res = [texto]
if hora == None:
res.append(MultiContentEntryText(pos=(0, 0), size=(970, 66), flags=RT_HALIGN_LEFT | RT_VALIGN_CENTER, font=0, text=texto))
else:
res.append(MultiContentEntryText(pos=(0, 0), size=(74, 66), flags=RT_HALIGN_RIGHT | RT_VALIGN_CENTER, font=1, text=hora))
png = '/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/acuwheathericons/' + iconotiempo
if fileExists(png):
fpng = LoadPixmap(png)
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND,
85,
-4,
74,
66,
fpng))
tempimagen = None
try:
tempimagen = devimagentemperatura(int(temperatura.strip().split('\xc2\xba')[0]))
except:
pass
if not tempimagen == None:
png = '/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/acuwheathericons/p' + tempimagen
if fileExists(png):
fpng = LoadPixmap(png)
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND,
157,
18,
18,
27,
fpng))
res.append(MultiContentEntryText(pos=(173, 0), size=(140, 66), flags=RT_HALIGN_LEFT | RT_VALIGN_CENTER, font=2, text=temperatura))
png = '/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/acuwheathericons/' + iconoviento
if fileExists(png):
fpng = LoadPixmap(png)
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND,
276,
13,
29,
40,
fpng))
res.append(MultiContentEntryText(pos=(310, 0), size=(140, 66), flags=RT_HALIGN_LEFT | RT_VALIGN_CENTER, font=3, text=viento))
res.append(MultiContentEntryText(pos=(422, 0), size=(530, 66), flags=RT_HALIGN_LEFT | RT_WRAP | RT_VALIGN_CENTER, font=4, text=descripcion))
return res
class IniciaSelList2(MenuList):
def __init__(self, list, enableWrapAround = False):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.l.setItemHeight(70)
self.l.setFont(0, gFont('Regular', 21))
self.l.setFont(1, gFont('Regular', 17))
def IniciaSelListEntry2(numero):
res = [numero]
if numero == 0:
texto = _('google.com API Wheater')
elif numero == 1:
texto = _('foreca.com Wheater Information')
else:
texto = _('accuWheater.com Information')
res.append(MultiContentEntryText(pos=(150, 0), size=(620, 70), flags=RT_HALIGN_LEFT | RT_WRAP | RT_VALIGN_CENTER, font=0, text=texto))
png = '/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/icono' + str(numero) + '-fs8.png'
if fileExists(png):
fpng = LoadPixmap(png)
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND,
30,
3,
107,
64,
fpng))
if numero == config.plugins.AccuWeatherPlugin.default.value:
png = '/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/checkok.png'
fpng = LoadPixmap(png)
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND,
1,
18,
24,
24,
fpng))
return res
class MainWheaterPlugin(Screen):
skin = '''<screen name="MainWheaterPlugin" position="center,center" size="660,258" title="%s %s">
<widget name="list" position="9,29" size="642,140" scrollbarMode="showOnDemand" />
<widget name="key_green" position="345,209" size="140,40" zPosition="5" valign="center" halign="center" backgroundColor="red" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" shadowOffset="-1,-1" />
<ePixmap name="green" position="346,209" zPosition="4" size="140,40" pixmap="skin_default/buttons/green.png" transparent="1" alphatest="on" />
<widget name="key_yellow" position="510,209" size="140,40" zPosition="5" valign="center" halign="center" backgroundColor="yellow" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" shadowOffset="-1,-1" />
<ePixmap name="chekgreen" position="323,218" zPosition="4" size="24,24" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/checkok.png" transparent="1" alphatest="on" />
<ePixmap name="yellow" position="511,209" zPosition="4" size="140,40" pixmap="skin_default/buttons/yellow.png" transparent="1" alphatest="on" />
</screen>''' % (_('AccuWeatherWeatherPlugin: Select Source'), ' ')
def __init__(self, session):
Screen.__init__(self, session)
self.session = session
self['key_green'] = Button(_('Default'))
self['key_yellow'] = Button(_('Setup'))
self['list'] = IniciaSelList2([])
self['actions'] = ActionMap(['WizardActions', 'MenuActions', 'ShortcutActions'], {'ok': self.keyOK,
'back': self.keyClose,
'green': self.keyGreen,
'yellow': self.keyYellow}, -1)
self.onLayoutFinish.append(self.buildList)
def buildList(self):
nlista = []
nlista.append(IniciaSelListEntry2(0))
nlista.append(IniciaSelListEntry2(1))
self['list'].setList(nlista)
self['list'].moveToIndex(config.plugins.AccuWeatherPlugin.default.value)
def updateList(self):
self.buildList()
def keyClose(self):
self.close()
def keyGreen(self):
indice = self['list'].getSelectionIndex()
config.plugins.AccuWeatherPlugin.default.value = indice
config.plugins.AccuWeatherPlugin.default.save()
os.system('rm -f /tmp/wf_acuspz.xml')
os.system('rm -f /tmp/wf_forespz.xml')
os.system('rm -f /tmp/wf_spz.xml')
self.updateList()
def keyOK(self):
indice = self['list'].getSelectionIndex()
if indice == 0:
self.session.open(AccuWeatherPlugin)
elif indice == 1:
self.session.open(ForecaWeatherPlugin)
def keyYellow(self):
indice = self['list'].getSelectionIndex()
if indice == 0:
self.session.openWithCallback(self.setupFinished, WeatherPluginEntriesListConfigScreen)
elif indice == 1:
self.session.openWithCallback(self.setupFinished, WeatherPluginEntriesListConfigScreenfore)
def setupFinished(self, asnw, ans2):
pass
class ForecaWeatherPlugin(Screen):
skin = '''<screen name="ForecaWeatherPlugin" position="center,65" size="970,625" title="AccuWeather - foreca.com">
<widget name="lugar" position="36,3" zPosition="2" size="512,28" font="Regular;23" transparent="1" valign="center" backgroundColor="#000000" />
<widget name="info_entradas" position="753,3" zPosition="3" size="166,22" font="Regular; 16" transparent="1" halign="right" backgroundColor="#000000" />
<widget name="current_icon" position="7,46" zPosition="1" size="74,74" alphatest="blend" />\n
<widget name="temperatura" position="98,79" zPosition="3" size="98,30" font="Regular; 26" transparent="1" noWrap="1" halign="left" backgroundColor="#000000" />
<widget name="condicion" position="201,48" zPosition="1" size="270,225" font="Regular; 18" transparent="1" backgroundColor="#000000" valign="top" />
<widget name="viento" position="7,147" zPosition="1" size="191,65" font="Regular;17" transparent="1" backgroundColor="#000000" valign="top" />
<eLabel name="lin0" position="473,25" size="1,250" zPosition="1" backgroundColor="#05303030" />
<widget name="dia1" position="561,29" zPosition="1" size="400,25" halign="left" valign="center" font="Regular; 22" transparent="1" backgroundColor="#000000" />
<widget name="icono_dia1" position="479,29" zPosition="1" size="79,79" alphatest="blend" />
<widget name="info_dia1" position="561,53" zPosition="1" size="412,55" halign="left" valign="top" font="Regular; 17" transparent="1" backgroundColor="#000000" />
<widget name="dia2" position="561,112" zPosition="1" size="400,25" halign="left" valign="center" font="Regular; 22" transparent="1" backgroundColor="#000000" />
<widget name="icono_dia2" position="479,112" zPosition="1" size="79,79" alphatest="blend" />
<widget name="info_dia2" position="561,136" zPosition="1" size="412,55" halign="left" valign="top" font="Regular; 17" transparent="1" backgroundColor="#000000" />
<widget name="dia3" position="561,194" zPosition="1" size="400,25" halign="left" valign="center" font="Regular; 22" transparent="1" backgroundColor="#000000" />
<widget name="icono_dia3" position="479,194" zPosition="1" size="79,79" alphatest="blend" />
<widget name="info_dia3" position="561,218" zPosition="1" size="412,55" halign="left" valign="top" font="Regular; 17" transparent="1" backgroundColor="#000000" />
<widget name="statustext" position="0,23" zPosition="2" size="970,605" font="Regular;20" halign="center" valign="center" transparent="1" backgroundColor="#10bfbfbf" />
<widget name="ico_left" position="15,3" size="16,26" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/nm_left-fs8.png" alphatest="blend" zPosition="1" />
<widget name="ico_right" position="930,3" size="16,26" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/nm_right-fs8.png" alphatest="blend" zPosition="1" />
<widget name="ico_menu" position="750,355" size="35,25" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/menu.png" alphatest="blend" zPosition="1" />
<widget name="key_menu" position="786,356" size="182,25" transparent="1" font="Regular; 16" zPosition="1" noWrap="1" />
<widget name="ico_l" position="750,413" size="35,25" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/izq-fs8.png" alphatest="blend" zPosition="1" />
<widget name="key_yellow" position="786,414" size="182,25" transparent="1" font="Regular; 16" zPosition="1" noWrap="1" />
<widget name="ico_r" position="750,444" size="35,25" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/dch-fs8.png" alphatest="blend" zPosition="1" />
<widget name="key_blue" position="786,445" size="181,25" transparent="1" font="Regular; 16" zPosition="1" noWrap="1" />
<eLabel name="lin1" position="10,278" size="951,1" zPosition="1" backgroundColor="#05303030" />
<eLabel name="lin2" position="726,288" size="1,330" zPosition="1" backgroundColor="#05303030" />
<widget name="ico_blue" position="750,544" size="35,25" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/green.png" alphatest="blend" zPosition="1" />
<widget name="key_green" position="786,545" size="182,25" transparent="1" font="Regular; 16" zPosition="1" noWrap="1" />
<widget name="satelite" position="132,59" size="720,480" alphatest="blend" zPosition="10" transparent="1" />
<widget name="statustext2" position="0,3" zPosition="8" size="970,627" font="Regular;20" halign="center" valign="center" transparent="0" />
<widget name="barra" position="11,288" zPosition="2" size="702,330" alphatest="blend" />
<widget name="info_barra" position="62,308" zPosition="1" size="600,290" font="Regular;20" halign="center" valign="center" transparent="0" />
<ePixmap name="logofuente" position="10,213" size="107,63" zPosition="1" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/icono1-fs8.png" transparent="1" alphatest="blend" />
<widget name="le_barra" position="132,542" zPosition="10" size="600,35" alphatest="blend" />
<widget name="ico_temp" position="59,46" zPosition="2" size="74,74" alphatest="blend" />
<widget name="fondo_detalle" position="0,31" size="948,594" alphatest="blend" zPosition="10" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/fondolista-fs8.png"/>
<widget name="detallado" position="0,31" zPosition="8" size="970,594" scrollbarMode="showOnDemand" />
<widget name="ico_red" position="750,504" size="35,25" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/red.png" alphatest="blend" zPosition="1" />
<widget name="key_red" position="786,505" size="182,25" transparent="1" font="Regular; 16" zPosition="1" noWrap="1" />
<widget name="ico_vie" position="5,132" zPosition="2" size="29,40" alphatest="blend" />
<widget name="barrapix_arr" position="0,31" zPosition="9" size="970,594" alphatest="blend" transparent="1" />
<widget name="barrapix_abj" position="0,0" zPosition="9" size="20,20" alphatest="blend" transparent="1" />
</screen>'''
def __init__(self, session):
self.session = session
Screen.__init__(self, session)
self['actions'] = ActionMap(['WizardActions',
'DirectionActions',
'ColorActions',
'EPGSelectActions',
'MenuActions'], {'back': self.exit,
'input_date_time': self.config,
'menu': self.config,
'right': self.nextItem,
'blue': self.nextItem,
'left': self.previousItem,
'yellow': self.previousItem,
'up': self.kup,
'down': self.kdown,
'green': self.selimagen,
'red': self.pronostico}, -1)
self.sisat = False
self.listatitulos = []
self['statustext'] = Label()
self['statustext2'] = Label()
self['detallado'] = IniciaSelDetalle([])
self['fondo_detalle'] = Pixmap()
self.detalle = False
self['barrapix_arr'] = Pixmap()
self['barrapix_abj'] = Pixmap()
self['infosat'] = Pixmap()
self['barra'] = WeatherIcon()
self['ico_temp'] = Pixmap()
self['ico_vie'] = Pixmap()
self['le_barra'] = Pixmap()
self['ico_menu'] = Pixmap()
self['ico_left'] = Pixmap()
self['ico_right'] = Pixmap()
self['ico_r'] = Pixmap()
self['ico_l'] = Pixmap()
self['key_menu'] = Label(_('List'))
self['key_yellow'] = Label(_('Previous'))
self['key_blue'] = Label(_('Next'))
self['current_icon'] = WeatherIcon()
self['satelite'] = WeatherIcon()
self['lugar'] = Label()
self['info_barra'] = Label(_('Downloading Detailed image 5 day forecast'))
self['info_entradas'] = Label()
self['temperatura'] = Label()
self['condicion'] = Label()
self['viento'] = Label()
self.descargaactiva = None
self['key_green'] = Label(_('Images satellite') + '...')
self['key_red'] = Label(_('Detailed forecast'))
self['ico_blue'] = Pixmap()
self['ico_red'] = Pixmap()
for i in range(1, 4):
self['dia%s' % i] = Label()
self['icono_dia%s' % i] = WeatherIcon()
self['info_dia%s' % i] = Label()
self.appdir = '/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/acuwheathericons/'
if not os.path.exists(self.appdir):
os.mkdir(self.appdir)
self.maxdetalle = 3
self.inidetalle = 3
self.weatherPluginEntryIndex = -1
self.weatherPluginEntryCount = config.plugins.AccuWeatherPlugin.foreentriescount.value
if self.weatherPluginEntryCount >= 1:
self.weatherPluginEntry = config.plugins.AccuWeatherPlugin.foreEntries[0]
self.weatherPluginEntryIndex = 1
else:
self.weatherPluginEntry = None
self.descargando = False
self.diadetalle = 0
self.onLayoutFinish.append(self.startRun)
self.chequeado = False
self.onShow.append(self.chequeaVacio)
def chequeaVacio(self):
if not self.chequeado and config.plugins.AccuWeatherPlugin.foreentriescount.value == 0:
self.chequeado = True
self.config()
return
if config.plugins.AccuWeatherPlugin.foreentriescount.value == 0:
self.exit()
def actualizaScrolls(self, forzar = False):
if self.detalle and not forzar:
weatherScrollBar(objectoself=self, nombrelista='barrapix', barra='barrapix', altoitem=25, imagen=True)
self['barrapix_arr'].show()
self['barrapix_abj'].show()
else:
self['barrapix_arr'].hide()
self['barrapix_abj'].hide()
def xmlCallbackPro(self, xmlstring, num):
self.setalle = True
xmlstring = devStrTm(xmlstring, '<h4>', '<div class="datecopy">').replace('<strong>', '').replace('</strong>', '').replace('\t', '').replace(' ', '').replace(' ', '').replace('<br /><br />', '<br />').replace('<br /> <br />', '<br />').replace('\n', '').replace('°', '\xc2\xbaC')
if num == 0:
self.listatitulos = []
lista = []
else:
lista = self['detallado'].list
lafecha = devStrTm(xmlstring, '<h6>', '</h6>').replace('<span>', '').replace('</span>', '').strip()
if num == 0:
lafecha = _('Today') + ', ' + lafecha
elif num == 1:
lafecha = _('Tomorrow') + ', ' + lafecha
lista.append(IniciaSelDetalleEntry('--- ' + lafecha + ' ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------'))
self.listatitulos.append(lafecha)
arrlista = xmlstring.split('<div class="row')
for ele in arrlista:
hora = devHtml(ele, 'class="c0"').replace('\t', '').replace('\n', '').replace('<span>', '').replace('</span>', '').strip()
if len(hora) > 3:
texto = hora
iconotiempo = devStrTm(ele, '<div class="c1">', '</div>').replace('\t', '').replace('\n', '').replace('<span>', '').replace('</span>', '').strip()
iconotiempo = self.gesImagenLista(iconotiempo)
temperatura = devStrTm(ele, '<div class="c4">', '</div>')
if '<span' in temperatura:
temperatura = devHtml(temperatura, 'span')
temperatura = temperatura.replace('\t', '').replace('<br />', '').replace('\n', '').replace('<span>', '').replace('</span>', '').strip()
viento = devHtml(ele, 'class="c2"').replace('\t', '').replace('\n', '').replace('<span>', '').replace('</span>', '').strip()
iconoviento = devStrTm(ele, '<img src="/img/symb-wind/', '.gif') + '-fs8.png'
viento = devHtml(ele, 'img src').replace('\t', '').replace('\n', '').replace('<span class="warm">', '').replace('<span>', '').replace('</span>', '').strip()
descripcion = devStrTm(ele, '<div class="c3">', '</div>').replace('\t', '').replace('\n', '').replace('<span>', '').replace('</span>', '').replace('<br />', 'x$x').replace('%x$x', '%, ').replace('x$x', '\n').strip()[:-1]
lista.append(IniciaSelDetalleEntry(texto, hora, iconotiempo, temperatura, iconoviento, viento, descripcion))
self.listatitulos.append(lafecha)
self['detallado'].setList(lista)
if num == 0:
try:
self.setTitle(self['lugar'].getText() + ' - ' + lafecha)
except:
pass
self['detallado'].moveToIndex(0)
if num >= self.maxdetalle - 1:
self['statustext2'].hide()
self.detalle = True
self['detallado'].show()
self['fondo_detalle'].show()
self.actualizaScrolls()
if num < self.maxdetalle and self.maxdetalle != self.inidetalle:
try:
self['detallado'].moveToIndex(self['detallado'].getSelectionIndex() + 1)
except:
pass
try:
self.setTitle(self['lugar'].getText() + ' - ' + self.listatitulos[self['detallado'].getSelectionIndex()])
except:
pass
if num >= self.maxdetalle:
pass
else:
self.devPro(num + 1)
def devPro(self, num = 0):
ciudad = self.weatherPluginEntry.city.value
dominio = self.weatherPluginEntry.dominio.value
pais = self.weatherPluginEntry.pais.value
t2 = time.localtime()
dia = int(time.strftime('%d', t2)) + num
cdia = str(dia)
if len(cdia) == 1:
cdia = '0' + cdia
cmes = str(time.strftime('%m', t2))
cano = str(time.strftime('%Y', t2))
cfecha = cano + cmes + cdia
asp = '?details=' + cfecha
url = 'http://www.foreca.%s/%s/%s%s' % (dominio,
pais,
ciudad,
asp)
txt = '...'
self['statustext2'].setText(_('Downloading') + ' ' + _('Detailed forecast') + '\n' + _('Wait') + txt + '[' + str(self.maxdetalle - num) + ']')
try:
self.setTitle(self['lugar'].getText() + ' - ' + _('Detailed forecast'))
except:
pass
self.actualizaScrolls(True)
getXML(url).addCallback(self.xmlCallbackPro, num).addErrback(self.error)
def pronostico(self):
if self.detalle:
self.desdet()
return
if self.sisat:
self.dessat()
return
if self.diadetalle > 0:
self['statustext2'].setText(_('Downloading') + ' ' + _('Detailed forecast') + '\n' + _('Wait') + '')
self['statustext2'].show()
self.detalle = True
if self.weatherPluginEntry is not None:
self.maxdetalle = self.inidetalle
self.devPro()
def selimagen(self):
if self.detalle:
return
if self.sisat:
return
contextFileList = [(_('Satellite'), 'sat'),
(_('Temperature'), 'temp'),
(_('Cloudiness'), 'cloud'),
(_('Amount of Precipitacion'), 'rain')]
dei = self.session.openWithCallback(self.SysExecution, ChoiceBox, title=_('Select satellite image') + ':', list=contextFileList)
def SysExecution(self, answer):
answer = answer and answer[1]
asp = None
numero = None
if answer:
if answer == 'temp':
numero = '1'
elif answer == 'rain' or answer == 'pressure':
numero = '2'
asp = '?map=' + answer
if asp:
self.muestrasat(asp, numero)
def desdet(self):
self['detallado'].hide()
self['fondo_detalle'].hide()
self.detalle = False
self['statustext2'].hide()
self.actualizaScrolls(True)
self.setTitle(_('AccuWeatherWeatherPlugin') + ' (foreca.com)')
def exit(self):
if self.detalle:
self.desdet()
return
if self.sisat:
self.dessat()
return
self.close()
def kup(self):
if self.detalle:
self['detallado'].pageUp()
self.actualizapos()
return
def kdown(self):
if self.detalle:
self['detallado'].pageDown()
self.actualizapos()
return
def muestrasat(self, asp, numero):
if self.sisat:
self.dessat()
return
appimagedir = '/usr/lib/enigma2/python/Plugins/Extensions/AccuWeather/image/'
if numero:
self['le_barra'].instance.setPixmapFromFile(appimagedir + 'barrafore' + numero + '.png')
self['le_barra'].show()
else:
self['le_barra'].hide()
self['statustext2'].setText(_('Downloading') + ' ' + _('Images satellite') + '\n' + _('Wait') + '...')
self['statustext2'].show()
self.sisat = True
if self.weatherPluginEntry is not None:
ciudad = self.weatherPluginEntry.city.value
dominio = self.weatherPluginEntry.dominio.value
pais = self.weatherPluginEntry.pais.value
url = 'http://www.foreca.%s/%s/%s%s' % (dominio,
pais,
ciudad,
asp)
getXML(url).addCallback(self.xmlCallbackSat).addErrback(self.error)
def startRun(self):
self['detallado'].hide()
self['fondo_detalle'].hide()
self.descargaactiva = None
self.diadetalle = 0
self.detalle = False
self['infosat'].hide()
self['satelite'].hide()
self['barra'].hide()
self['le_barra'].hide()
self['info_barra'].hide()
self['ico_temp'].hide()
self['ico_vie'].hide()
if self.weatherPluginEntry is not None:
ciudad = self.weatherPluginEntry.city.value
dominio = self.weatherPluginEntry.dominio.value
pais = self.weatherPluginEntry.pais.value
self['ico_menu'].hide()
self['key_menu'].hide()
self['ico_left'].hide()
self['ico_right'].hide()
self['ico_l'].hide()
self['ico_r'].hide()
self['key_yellow'].hide()
self['key_blue'].hide()
self['key_green'].hide()
self['ico_blue'].hide()
self['ico_red'].hide()
self['key_red'].hide()
self['statustext2'].hide()
self['statustext'].setText(_('Getting weather information...') + '\n[' + ciudad + ', ' + ' - ' + pais + ']')
cana = ''
if self.weatherPluginEntryIndex == 1:
cana = '(' + _('Default') + ') '
self['info_entradas'].setText(cana + str(self.weatherPluginEntryIndex) + ' ' + _('of') + ' ' + str(self.weatherPluginEntryCount))
url = 'http://www.foreca.%s/%s/%s' % (dominio, pais, ciudad)
self.descargando = True
getXML(url).addCallback(self.xmlCallback).addErrback(self.error)
else:
self['statustext'].setText(_("No locations defined...\nPress 'Menu' to do that."))
self['ico_menu'].show()
self['key_menu'].show()
self['ico_left'].hide()
self['ico_right'].hide()
self['ico_l'].hide()
self['ico_r'].hide()
self['key_yellow'].hide()
self['key_blue'].hide()
self['key_green'].hide()
self['ico_red'].hide()
self['key_red'].hide()
self['ico_blue'].hide()
self['statustext'].show()
def dessat(self):
os.system('rm /tmp/foreca/*')
self['satelite'].hide()
self['statustext2'].hide()
self['infosat'].hide()
self['le_barra'].hide()
self.sisat = False
def actualizapos(self):
numero = len(self['detallado'].list)
if numero <= 0:
return
indice = self['detallado'].getSelectionIndex()
try:
self.setTitle(self['lugar'].getText() + ' - ' + self.listatitulos[indice])
except:
pass
if numero == indice + 1:
if self.maxdetalle == self.inidetalle:
self.session.openWithCallback(self.cerrarcb, MessageBox, _('Want to download forecast of the next 4 days?'), default=True)
else:
self['detallado'].moveToIndex(0)
try:
self.setTitle(self['lugar'].getText() + ' - ' + self.listatitulos[0])
except:
pass
def cerrarcb(self, respuesta):
if respuesta:
self.maxdetalle = self.maxdetalle + 4
self['detallado'].hide()
self['fondo_detalle'].hide()
self['statustext2'].show()
self.devPro(self.inidetalle + 1)
else:
self['detallado'].moveToIndex(0)
try:
self.setTitle(self['lugar'].getText() + ' - ' + self.listatitulos[0])
except:
pass
def nextItem(self):
if self.detalle:
self['detallado'].pageDown()
self.actualizapos()
return
if self.descargando:
return
if self.sisat:
self.dessat()
return
if self.weatherPluginEntryCount != 0:
if self.weatherPluginEntryIndex < self.weatherPluginEntryCount:
self.weatherPluginEntryIndex = self.weatherPluginEntryIndex + 1
else:
self.weatherPluginEntryIndex = 1
self.setItem()
def previousItem(self):
if self.detalle:
self['detallado'].pageUp()
self.actualizapos()
return
if self.descargando:
return
if self.sisat:
self.dessat()
return
if self.weatherPluginEntryCount != 0:
if self.weatherPluginEntryIndex >= 2:
self.weatherPluginEntryIndex = self.weatherPluginEntryIndex - 1
else:
self.weatherPluginEntryIndex = self.weatherPluginEntryCount
self.setItem()
def setItem(self):
self.weatherPluginEntry = config.plugins.AccuWeatherPlugin.foreEntries[self.weatherPluginEntryIndex - 1]
self.clearFields()
self.startRun()
def clearFields(self):
self['le_barra'].hide()
self['lugar'].setText('')
self['info_entradas'].setText('')
self['temperatura'].setText('')
self['condicion'].setText('')
self['viento'].setText('')
self['info_barra'].hide()
self['barra'].hide()
self['key_green'].hide()
self['ico_blue'].hide()
self['ico_temp'].hide()
self['ico_vie'].hide()
self['ico_red'].hide()
self['key_red'].hide()
for i in range(1, 4):
self['dia%s' % i].setText('')
self['icono_dia%s' % i].hide()
self['info_dia%s' % i].setText('')
self['current_icon'].hide()
self['ico_menu'].hide()
self['key_menu'].hide()
self['ico_left'].hide()
self['ico_right'].hide()
self['ico_l'].hide()
self['ico_r'].hide()
self['key_yellow'].hide()
self['key_blue'].hide()
def errorIconDownload(self, error = None, item = None):
item.error = True
def finishedIconDownload(self, result, item):
if not item.error:
if item.descarga == None or item.descarga == self.descargaactiva:
self.showIcon(item.index, item.filename)
def showIcon(self, nombre, filename):
self[nombre].updateIcon(filename)
self[nombre].show()
self[nombre].cancel = False
def xmlCallbackSat(self, xmlstring):
self['statustext2'].show()
laimagen = devImagen(xmlstring, '<div id="symb_clipcontainer"')
if len(laimagen) <= 2:
laimagen = devImagen(xmlstring, 'id="animap"')
url = laimagen
nombre = 'satelite'
if 'http://' not in url:
url = 'http://www.foreca.' + self.weatherPluginEntry.dominio.value + url
parts = string.split(url, '/')
os.system('mkdir /tmp/foreca')
filename = '/tmp/foreca/' + devStrTm('*' + parts[-1], '*', '?')
IconDownloadList = []
IconDownloadList.append(WeatherIconItem(url=url, filename=filename, index=nombre))
if len(IconDownloadList) != 0:
ds = defer.DeferredSemaphore(tokens=len(IconDownloadList))
downloads = [ ds.run(download, item).addErrback(self.errorIconDownload, item).addCallback(self.finishedIconDownload, item) for item in IconDownloadList ]
finished = defer.DeferredList(downloads).addErrback(self.error)
def xmlCallback(self, xmlstring):
self.descargando = False
self['statustext'].hide()
self['ico_menu'].show()
self['key_menu'].show()
self['key_green'].show()
self['ico_blue'].show()
self['info_barra'].show()
self['ico_red'].show()
self['key_red'].show()
if self.weatherPluginEntryCount > 1:
self['ico_left'].show()
self['ico_right'].show()
self['ico_l'].show()
self['ico_r'].show()
self['key_yellow'].show()
self['key_blue'].show()
self['lugar'].show()
self['temperatura'].show()
self['condicion'].show()
self['viento'].show()
for i in range(1, 4):
self['dia%s' % i].show()
self['info_dia%s' % i].show()
xmlstring = devStrTm(xmlstring, '<h1 class="entry-title"', '<div class="datecopy">').replace('<strong>', '').replace('</strong>', '').replace('\t', '').replace(' ', '').replace(' ', '').replace('<br /><br />', '<br />').replace('<br /> <br />', '<br />').replace('\n', '').replace('°', '\xc2\xbaC')
self['lugar'].setText(devStrTm(xmlstring, '>', '<').strip())
tempact = devHtml(xmlstring, 'warm txt-xxlarge').replace('\t', '').replace('\n', '').strip()
self['temperatura'].setText(tempact)
lacondicion = devStrTm(xmlstring, '<div class="right txt-tight">', '</div>').replace('<br />', '\n').replace('\n ', '\n')
self['condicion'].setText(lacondicion)
simirar = False
itemp = None
if 'cc_symb">' not in xmlstring:
simirar = True
try:
tempact = tempact.split(' ')[0]
itemp = int(tempact)
except:
pass
if not itemp == None and not simirar:
self['ico_temp'].show()
filenamepng = self.appdir + devimagentemperatura(itemp)
self['ico_temp'].instance.setPixmapFromFile(filenamepng)
if not simirar:
itemp = None
temp = self.gesImagen('current_icon', xmlstring, simirar, itemp)
tempviento = devStrTm(xmlstring, '/img/symb-wind', '<br />')
dirviento = devStrTm(tempviento, 'alt="', '"')
dirviento = dirviento.replace('N', _('North')).replace('S', _('South')).replace('E', _('East')).replace('W', _('West')).replace('NorteOeste', 'NorOeste').replace('NorteEste', 'NorEste')
iviento = devStrTm(tempviento, '/>', '').replace('<br />', '')
if len(dirviento + ' ' + iviento) > 2:
self['viento'].setText(' ' + _('Wind') + ':\n' + dirviento + ' ' + iviento)
ivien = devStrTm(xmlstring, '/img/symb-wind/', '.gif') + '-fs8.png'
filenamepng = self.appdir + ivien
self['ico_vie'].instance.setPixmapFromFile(filenamepng)
self['ico_vie'].show()
else:
self['viento'].setText(' ')
tempdias = devStrTm(xmlstring, '<div class="c2">', '').replace('<br />', '')
arrdias = tempdias.split('<div class="c2_a">')
self.diadetalle = []
for i in range(1, len(arrdias)):
if i >= 4:
break
eltexto = arrdias[i]
texto = devStrTm(eltexto, 'title="', '"')
temp1 = devStrTm(eltexto, '<span>', '</span>')
temp2 = devStrTm(eltexto, '</span>', '</span>')
temp2 = devStrTm(temp2, '<span>', '</span>')
texto = texto + '\n' + temp1 + ' / ' + temp2
nombre = devHtml(eltexto, 'a href="')
self['dia%s' % i].setText(nombre)
temp = self.gesImagen('icono_dia%s' % i, eltexto)
self['info_dia%s' % i].setText(texto)
self['info_barra'].show()
self['barra'].hide()
laimagen = devImagen(xmlstring, '"meteogram"')
nombre = 'barra'
url = 'http://www.foreca.' + self.weatherPluginEntry.dominio.value + laimagen
os.system('mkdir /tmp/foreca')
os.system('rm /tmp/foreca/barra*.png')
erid = devStrTm(url, 'loc_id=', '&')
self.descargaactiva = erid
t2 = time.localtime()
chora = str(time.strftime('%H%M', t2))
filename = '/tmp/foreca/barra_' + erid + '_' + chora + '.png'
IconDownloadList = []
IconDownloadList.append(WeatherIconItem(url=url.replace('&', '&'), filename=filename, index=nombre, descarga=erid))
if len(IconDownloadList) != 0:
ds = defer.DeferredSemaphore(tokens=len(IconDownloadList))
downloads = [ ds.run(download, item).addErrback(self.errNada, item).addCallback(self.finishedIconDownload, item) for item in IconDownloadList ]
finished = defer.DeferredList(downloads).addErrback(self.errNada)
if self.weatherPluginEntryIndex == 1:
try:
ertexto = xmlstring
sourceEncoding = 'utf-8'
targetEncoding = 'iso-8859-1'
f = open('/tmp/wf_forespz.xml', 'w')
f.write(ertexto)
f.close()
except:
pass
def errNada(self, error = None, item = None):
self['info_barra'].hide()
def getImagen(self, codigo, mirarnoche = False):
listaimagenes = {'111': '5',
'000': '1',
'100': '2',
'200': '3',
'210': '14',
'211': '21',
'212': '21',
'220': '13',
'221': '20',
'222': '20',
'240': '17',
'300': '6',
'310': '13',
'311': '23',
'312': '23',
'320': '13',
'321': '20',
'322': '20',
'340': '16',
'400': '7',
'410': '8',
'411': '24',
'412': '24',
'420': '18',
'421': '26',
'422': '19',
'430': '12',
'431': '29',
'432': '22',
'440': '15'}
imagen = codigo[1:]
if imagen not in listaimagenes:
valor = '5'
else:
valor = listaimagenes[imagen]
if codigo[0] == 'n' or mirarnoche:
valor = equivnoche(valor)
return valor + '_int-fs8.png'
def gesImagenLista(self, xmlstring, mirarnoche = False, temperatura = None):
if temperatura == None:
tempxml = devStrTm(xmlstring, '<div class="symbol_', '"')
tempxml = devStrTm(tempxml, 'symbol_', '_')
if mirarnoche:
t2 = time.localtime()
chora = int(time.strftime('%H', t2))
if chora >= 21 or chora < 7:
mirarnoche = True
else:
mirarnoche = False
if len(tempxml) <= 2:
laimagen = self.getImagen('d111', mirarnoche)
else:
laimagen = self.getImagen(tempxml, mirarnoche)
else:
laimagen = devimagentemperatura(temperatura)
filename = laimagen
filenamepng = filename.replace('.jpg', '-fs8.png')
return filenamepng
def gesImagen(self, nombre, xmlstring, mirarnoche = False, temperatura = None):
if temperatura == None:
tempxml = devStrTm(xmlstring, '<div class="symbol_', '</div>')
tempxml = devStrTm(tempxml, 'symbol_', '_')
if mirarnoche:
t2 = time.localtime()
chora = int(time.strftime('%H', t2))
if chora >= 21 or chora < 7:
mirarnoche = True
else:
mirarnoche = False
if len(tempxml) <= 2:
laimagen = self.getImagen('d111', mirarnoche)
else:
laimagen = self.getImagen(tempxml, mirarnoche)
else:
laimagen = devimagentemperatura(temperatura)
filename = self.appdir + laimagen
filenamepng = filename.replace('.jpg', '-fs8.png')
if os.path.exists(filenamepng):
self[nombre].instance.setPixmapFromFile(filenamepng)
self[nombre].show()
return
else:
self[nombre].hide()
return
def devsem(self, dia):
ret = dia
if self.weatherPluginEntry.language.value == 'es':
if dia == 'lun':
ret = 'Lunes'
elif dia == 'mar':
ret = 'Martes'
elif dia == 'mi\xc3\xa9':
ret = 'Mi\xc3\xa9rcoles'
elif dia == 'jue':
ret = 'Jueves'
elif dia == 'vie':
ret = 'Viernes'
elif dia == 's\xc3\xa1b':
ret = 'S\xc3\xa1bado'
elif dia == 'dom':
ret = 'Domingo'
else:
ret = upper(ret)
else:
ret = upper(ret)
return ret
def config(self):
if self.detalle:
self.desdet()
return
if self.sisat:
self.dessat()
return
self.session.openWithCallback(self.setupFinished, WeatherPluginEntriesListConfigScreenfore)
def setupFinished(self, index, entry = None):
self.weatherPluginEntryCount = config.plugins.AccuWeatherPlugin.foreentriescount.value
if self.weatherPluginEntryCount >= 1:
if entry is not None:
self.weatherPluginEntry = entry
self.weatherPluginEntryIndex = index + 1
if self.weatherPluginEntry is None:
self.weatherPluginEntry = config.plugins.AccuWeatherPlugin.foreEntries[0]
self.weatherPluginEntryIndex = 1
else:
self.weatherPluginEntry = None
self.weatherPluginEntryIndex = -1
self.clearFields()
self.startRun()
def error(self, error = None):
self.descargando = False
if error is not None:
self.clearFields()
self['statustext'].setText(str(error.getErrorMessage()))
self['statustext'].show()
self['statustext2'].hide()
class WeatherIcon(Pixmap):
def __init__(self):
Pixmap.__init__(self)
self.IconFileName = ''
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.paintIconPixmapCB)
def reinicia(self):
self.picload = None
self.IconFileName = ''
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.paintIconPixmapCB)
def onShow(self):
Pixmap.onShow(self)
try:
sc = AVSwitch().getFramebufferScale()
self.picload.setPara((self.instance.size().width(),self.instance.size().height(),sc[0],sc[0],0,0,'#00000000'))
except:
pass
def paintIconPixmapCB(self, picInfo = None):
ptr = self.picload.getData()
if ptr != None:
self.instance.setPixmap(ptr.__deref__())
def updateIcon(self, filename):
new_IconFileName = filename
if self.IconFileName != new_IconFileName:
self.IconFileName = new_IconFileName
self.picload.startDecode(self.IconFileName)
|
postla/OpenNFR-E2
|
lib/python/Plugins/Extensions/AccuWeather/plugin.py
|
Python
|
gpl-2.0
| 51,351
|
#!flask/bin/python
from app import db, models
from config import ALTMETRIC_KEY
import urllib2
import json
from app.scripts import fetch_articles
query_term = 'genome'
try:
response = urllib2.urlopen(
'http://api.altmetric.com/v1/citations/1d?q={0}&key={1}'.format(query_term, ALTMETRIC_KEY))
except urllib2.HTTPError:
print "HTTPError: Query term not found. Database not modified. Aborting..."
exit()
old_defaults = models.Article.query.all()
for a in old_defaults:
db.session.delete(a)
db.session.commit()
data = json.load(response)
articles = fetch_articles(data)
for article in articles:
article.default_set = True
if article.is_unique():
db.session.add(article)
db.session.commit()
|
alexherns/sciral-ocf-dev
|
create_sample_set.py
|
Python
|
mit
| 730
|
def validate_user_id(api, params, user):
if params.get('user_id') in ['001', '002', '003']:
return True
return False
|
devashishsharma2302/testing-heroku-deployment
|
example/exampleapp/custom_validators.py
|
Python
|
mit
| 133
|
from sys import argv
file = open(argv[1])
lines = file.readlines()
sum = int(0)
for line in lines:
line = line.strip()
sum += (len(line) - len(eval(line)))
print str(sum)
|
burakozhan/adventofcode2015
|
8/8p1.py
|
Python
|
gpl-3.0
| 178
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest as ut
import logging
from ....datastructures.auxiliary import FileFrom
class TestFileFrom(ut.TestCase):
def setUp(self):
str_generator = (str(i) for i in range(11))
int_generator = (i for i in range(2))
self.str_stream = FileFrom(str_generator)
self.int_stream = FileFrom(int_generator)
self.no_stream = FileFrom(12.3)
def test_read_like_file_str_works(self):
should_be = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10']
actual = []
with self.str_stream as file:
for line in file:
actual.append(line)
self.assertListEqual(should_be, actual)
def test_read_like_file_int_fails(self):
log_msg = ['ERROR:root:Line read from file-like object is not a'
' string. Was it created from a string iterator?']
err_msg = 'Line read from file-like object is not a string!'
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(TypeError, msg=err_msg) as err:
with self.int_stream as file:
for line in file:
_ = line
self.assertListEqual(log.output, log_msg)
self.assertEqual(err.msg, err_msg)
def test_readline_like_file_fails(self):
log_msg = ['ERROR:root:Failed to read line from file-like object.'
' Was it created from an iterator?']
err_msg = 'Object was not created from an iterator!'
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(TypeError, msg=err_msg) as err:
with self.no_stream as file:
for line in file:
_ = line
self.assertListEqual(log.output, log_msg)
self.assertEqual(err.msg, err_msg)
if __name__ == '__main__':
ut.main()
|
yedivanseven/bestPy
|
tests/datastructures/auxiliary/test_filefrom.py
|
Python
|
gpl-3.0
| 1,933
|
#
# QAPI visitor generator
#
# Copyright IBM, Corp. 2011
#
# Authors:
# Anthony Liguori <aliguori@us.ibm.com>
# Michael Roth <mdroth@linux.vnet.ibm.com>
#
# This work is licensed under the terms of the GNU GPLv2.
# See the COPYING.LIB file in the top-level directory.
from ordereddict import OrderedDict
from qapi import *
import sys
import os
import getopt
import errno
def generate_visit_struct_body(field_prefix, name, members):
ret = mcgen('''
if (!error_is_set(errp)) {
''')
push_indent()
if len(field_prefix):
field_prefix = field_prefix + "."
ret += mcgen('''
Error **errp = &err; /* from outer scope */
Error *err = NULL;
visit_start_struct(m, NULL, "", "%(name)s", 0, &err);
''',
name=name)
else:
ret += mcgen('''
Error *err = NULL;
visit_start_struct(m, (void **)obj, "%(name)s", name, sizeof(%(name)s), &err);
''',
name=name)
ret += mcgen('''
if (!err) {
if (!obj || *obj) {
''')
push_indent()
push_indent()
for argname, argentry, optional, structured in parse_args(members):
if optional:
ret += mcgen('''
visit_start_optional(m, obj ? &(*obj)->%(c_prefix)shas_%(c_name)s : NULL, "%(name)s", &err);
if (obj && (*obj)->%(prefix)shas_%(c_name)s) {
''',
c_prefix=c_var(field_prefix), prefix=field_prefix,
c_name=c_var(argname), name=argname)
push_indent()
if structured:
ret += generate_visit_struct_body(field_prefix + argname, argname, argentry)
else:
ret += mcgen('''
visit_type_%(type)s(m, obj ? &(*obj)->%(c_prefix)s%(c_name)s : NULL, "%(name)s", &err);
''',
c_prefix=c_var(field_prefix), prefix=field_prefix,
type=type_name(argentry), c_name=c_var(argname),
name=argname)
if optional:
pop_indent()
ret += mcgen('''
}
visit_end_optional(m, &err);
''')
pop_indent()
ret += mcgen('''
error_propagate(errp, err);
err = NULL;
}
''')
pop_indent()
pop_indent()
ret += mcgen('''
/* Always call end_struct if start_struct succeeded. */
visit_end_struct(m, &err);
}
error_propagate(errp, err);
}
''')
return ret
def generate_visit_struct(name, members):
ret = mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s ** obj, const char *name, Error **errp)
{
''',
name=name)
push_indent()
ret += generate_visit_struct_body("", name, members)
pop_indent()
ret += mcgen('''
}
''')
return ret
def generate_visit_list(name, members):
return mcgen('''
void visit_type_%(name)sList(Visitor *m, %(name)sList ** obj, const char *name, Error **errp)
{
GenericList *i, **prev = (GenericList **)obj;
Error *err = NULL;
if (!error_is_set(errp)) {
visit_start_list(m, name, &err);
if (!err) {
for (; (i = visit_next_list(m, prev, &err)) != NULL; prev = &i) {
%(name)sList *native_i = (%(name)sList *)i;
visit_type_%(name)s(m, &native_i->value, NULL, &err);
}
error_propagate(errp, err);
err = NULL;
/* Always call end_list if start_list succeeded. */
visit_end_list(m, &err);
}
error_propagate(errp, err);
}
}
''',
name=name)
def generate_visit_enum(name, members):
return mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s * obj, const char *name, Error **errp)
{
visit_type_enum(m, (int *)obj, %(name)s_lookup, "%(name)s", name, errp);
}
''',
name=name)
def generate_visit_union(name, members):
ret = generate_visit_enum('%sKind' % name, members.keys())
ret += mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s ** obj, const char *name, Error **errp)
{
Error *err = NULL;
if (!error_is_set(errp)) {
visit_start_struct(m, (void **)obj, "%(name)s", name, sizeof(%(name)s), &err);
if (!err) {
if (!obj || *obj) {
visit_type_%(name)sKind(m, &(*obj)->kind, "type", &err);
if (!err) {
switch ((*obj)->kind) {
''',
name=name)
push_indent()
push_indent()
for key in members:
ret += mcgen('''
case %(abbrev)s_KIND_%(enum)s:
visit_type_%(c_type)s(m, &(*obj)->%(c_name)s, "data", &err);
break;
''',
abbrev = de_camel_case(name).upper(),
enum = c_fun(de_camel_case(key),False).upper().lstrip('_'),
c_type=members[key],
c_name=c_fun(key))
ret += mcgen('''
default:
abort();
}
}
error_propagate(errp, err);
err = NULL;
}
''')
pop_indent()
ret += mcgen('''
/* Always call end_struct if start_struct succeeded. */
visit_end_struct(m, &err);
}
error_propagate(errp, err);
}
''')
pop_indent();
ret += mcgen('''
}
''')
return ret
def generate_declaration(name, members, genlist=True):
ret = mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s ** obj, const char *name, Error **errp);
''',
name=name)
if genlist:
ret += mcgen('''
void visit_type_%(name)sList(Visitor *m, %(name)sList ** obj, const char *name, Error **errp);
''',
name=name)
return ret
def generate_decl_enum(name, members, genlist=True):
return mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s * obj, const char *name, Error **errp);
''',
name=name)
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "chp:o:",
["source", "header", "prefix=", "output-dir="])
except getopt.GetoptError, err:
print str(err)
sys.exit(1)
output_dir = ""
prefix = ""
c_file = 'qapi-visit.c'
h_file = 'qapi-visit.h'
do_c = False
do_h = False
for o, a in opts:
if o in ("-p", "--prefix"):
prefix = a
elif o in ("-o", "--output-dir"):
output_dir = a + "/"
elif o in ("-c", "--source"):
do_c = True
elif o in ("-h", "--header"):
do_h = True
if not do_c and not do_h:
do_c = True
do_h = True
c_file = output_dir + prefix + c_file
h_file = output_dir + prefix + h_file
try:
os.makedirs(output_dir)
except os.error, e:
if e.errno != errno.EEXIST:
raise
def maybe_open(really, name, opt):
if really:
return open(name, opt)
else:
import StringIO
return StringIO.StringIO()
fdef = maybe_open(do_c, c_file, 'w')
fdecl = maybe_open(do_h, h_file, 'w')
fdef.write(mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QAPI visitor functions
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <aliguori@us.ibm.com>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#include "%(header)s"
''',
header=basename(h_file)))
fdecl.write(mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QAPI visitor function
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <aliguori@us.ibm.com>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#ifndef %(guard)s
#define %(guard)s
#include "qapi/qapi-visit-core.h"
#include "%(prefix)sqapi-types.h"
''',
prefix=prefix, guard=guardname(h_file)))
exprs = parse_schema(sys.stdin)
for expr in exprs:
if expr.has_key('type'):
ret = generate_visit_struct(expr['type'], expr['data'])
ret += generate_visit_list(expr['type'], expr['data'])
fdef.write(ret)
ret = generate_declaration(expr['type'], expr['data'])
fdecl.write(ret)
elif expr.has_key('union'):
ret = generate_visit_union(expr['union'], expr['data'])
ret += generate_visit_list(expr['union'], expr['data'])
fdef.write(ret)
ret = generate_decl_enum('%sKind' % expr['union'], expr['data'].keys())
ret += generate_declaration(expr['union'], expr['data'])
fdecl.write(ret)
elif expr.has_key('enum'):
ret = generate_visit_enum(expr['enum'], expr['data'])
fdef.write(ret)
ret = generate_decl_enum(expr['enum'], expr['data'])
fdecl.write(ret)
fdecl.write('''
#endif
''')
fdecl.flush()
fdecl.close()
fdef.flush()
fdef.close()
|
mithleshvrts/qemu-kvm-rhel6
|
scripts/qapi-visit.py
|
Python
|
gpl-2.0
| 8,655
|
# coding=utf-8
import unittest
"""782. Transform to Chessboard
https://leetcode.com/problems/transform-to-chessboard/description/
An N x N `board` contains only `0`s and `1`s. In each move, you can swap any 2
rows with each other, or any 2 columns with each other.
What is the minimum number of moves to transform the board into a "chessboard"
\- a board where no `0`s and no `1`s are 4-directionally adjacent? If the task
is impossible, return -1.
**Examples:**
**Input:** board = [[0,1,1,0],[0,1,1,0],[1,0,0,1],[1,0,0,1]]
**Output:** 2
**Explanation:**
One potential sequence of moves is shown below, from left to right:
0110 1010 1010
0110 -- > 1010 --> 0101
1001 0101 1010
1001 0101 0101
The first move swaps the first and second column.
The second move swaps the second and third row.
**Input:** board = [[0, 1], [1, 0]]
**Output:** 0
**Explanation:**
Also note that the board with 0 in the top left corner,
01
10
is also a valid chessboard.
**Input:** board = [[1, 0], [1, 0]]
**Output:** -1
**Explanation:**
No matter what sequence of moves you make, you cannot end with a valid chessboard.
**Note:**
* `board` will have the same number of rows and columns, a number in the range `[2, 30]`.
* `board[i][j]` will be only `0`s or `1`s.
Similar Questions:
"""
class Solution(object):
def movesToChessboard(self, board):
"""
:type board: List[List[int]]
:rtype: int
"""
def test(self):
pass
if __name__ == "__main__":
unittest.main()
|
openqt/algorithms
|
leetcode/python/lc782-transform-to-chessboard.py
|
Python
|
gpl-3.0
| 1,708
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-11 11:19
from __future__ import unicode_literals
from django.db import migrations, models
import yksi.custom_storages
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Candidate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=30)),
('last_name', models.CharField(max_length=30)),
('photo', models.FileField(blank=True, null=True, upload_to='candidate-photos')),
('secure_photo', models.FileField(blank=True, null=True, storage=yksi.custom_storages.SecureStorage(), upload_to='candidate-photos-secure')),
],
),
]
|
vchrisb/django-apps
|
yksi/django/candidate/migrations/0001_initial.py
|
Python
|
mit
| 904
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for tensor_util."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import sys
from absl.testing import parameterized
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import indexed_slices
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_state_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.platform import test
@test_util.run_all_in_graph_and_eager_modes
class TensorUtilTest(test.TestCase, parameterized.TestCase):
def testFloat(self):
value = 10.0
t = tensor_util.make_tensor_proto(value)
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape {}
float_val: %.1f
""" % value, t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.float32, a.dtype)
self.assertAllClose(np.array(value, dtype=np.float32), a)
def testFloatN(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0])
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTyped(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], dtype=dtypes.float32)
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTypeCoerce(self):
t = tensor_util.make_tensor_proto([10, 20, 30], dtype=dtypes.float32)
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatTypeCoerceNdarray(self):
arr = np.asarray([10, 20, 30], dtype="int")
t = tensor_util.make_tensor_proto(arr, dtype=dtypes.float32)
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testFloatSizes(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], shape=[1, 3])
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.float32, a.dtype)
self.assertAllClose(np.array([[10.0, 20.0, 30.0]], dtype=np.float32), a)
def testFloatSizes2(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], shape=[3, 1])
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } dim { size: 1 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } dim { size: 1 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.float32, a.dtype)
self.assertAllClose(np.array([[10.0], [20.0], [30.0]], dtype=np.float32), a)
def testFloatSizesLessValues(self):
t = tensor_util.make_tensor_proto(10.0, shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_FLOAT
tensor_shape { dim { size: 1 } dim { size: 3 } }
float_val: 10.0
""", t)
# No conversion to Ndarray for this one: not enough values.
def testFloatNpArrayFloat64(self):
t = tensor_util.make_tensor_proto(
np.array([[10.0, 20.0, 30.0]], dtype=np.float64))
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_DOUBLE
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "@$\000\000\000\000\000\000@4\000\000\000\000\000\000@>\000\000\000\000\000\000"
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_DOUBLE
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\000\000\000\000\000\000$@\000\000\000\000\000\0004@\000\000\000\000\000\000>@"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.float64, a.dtype)
self.assertAllClose(
np.array([[10.0, 20.0, 30.0]], dtype=np.float64),
tensor_util.MakeNdarray(t))
def testFloatTypesWithImplicitRepeat(self):
for dtype, nptype in [(dtypes.float32, np.float32),
(dtypes.float64, np.float64)]:
t = tensor_util.make_tensor_proto([10.0], shape=[3, 4], dtype=dtype)
a = tensor_util.MakeNdarray(t)
self.assertAllClose(
np.array(
[[10.0, 10.0, 10.0, 10.0],
[10.0, 10.0, 10.0, 10.0],
[10.0, 10.0, 10.0, 10.0]],
dtype=nptype),
a)
def testFloatMutateArray(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, 30.0], dtype=dtypes.float32)
a = tensor_util.MakeNdarray(t)
a[0] = 5.0
self.assertEqual(np.float32, a.dtype)
self.assertAllClose(np.array([5.0, 20.0, 30.0], dtype=np.float32), a)
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "A \000\000A\240\000\000A\360\000\000"
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_FLOAT
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000 A\000\000\240A\000\000\360A"
""", t)
def testHalf(self):
t = tensor_util.make_tensor_proto(np.array([10.0, 20.0], dtype=np.float16))
self.assertProtoEquals(
"""
dtype: DT_HALF
tensor_shape { dim { size: 2 } }
tensor_content: "\000I\000M"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.float16, a.dtype)
self.assertAllClose(np.array([10.0, 20.0], dtype=np.float16), a)
def testBfloat16(self):
test_type = dtypes.bfloat16.as_numpy_dtype
t = tensor_util.make_tensor_proto(np.array([10.0, 20.0], dtype=test_type))
# 10.0: 16672 = 010000010(130) 0100000: (1+0/2+1/4) * 2^(130-127)
# 20.0: 16800 = 010000011(131) 0100000: (1+0/2+1/4) * 2^(131-127)
self.assertProtoEquals("""
dtype: DT_BFLOAT16
tensor_shape {
dim {
size: 2
}
}
half_val: 16672
half_val: 16800
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(test_type, a.dtype)
self.assertAllClose(np.array([10.0, 20.0], dtype=test_type), a)
def testInt(self):
t = tensor_util.make_tensor_proto(10)
self.assertProtoEquals("""
dtype: DT_INT32
tensor_shape {}
int_val: 10
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.int32, a.dtype)
self.assertAllClose(np.array(10, dtype=np.int32), a)
def testLargeInt(self):
value = np.iinfo(np.int64).max
t = tensor_util.make_tensor_proto(value)
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_val: %d
""" % value, t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.int64, a.dtype)
self.assertAllClose(np.array(value, dtype=np.int64), a)
def testLargeNegativeInt(self):
# We don't use the min np.int64 value here
# because it breaks np.abs().
#
# np.iinfo(np.int64).min = -9223372036854775808
# np.iinfo(np.int64).max = 9223372036854775807
# np.abs(-9223372036854775808) = -9223372036854775808
value = np.iinfo(np.int64).min + 1
t = tensor_util.make_tensor_proto(value)
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_val: %d
""" % value, t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.int64, a.dtype)
self.assertAllClose(np.array(value, dtype=np.int64), a)
def testIntNDefaultType(self):
t = tensor_util.make_tensor_proto([10, 20, 30, 40], shape=[2, 2])
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_INT32
tensor_shape { dim { size: 2 } dim { size: 2 } }
tensor_content: "\000\000\000\n\000\000\000\024\000\000\000\036\000\000\000("
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_INT32
tensor_shape { dim { size: 2 } dim { size: 2 } }
tensor_content: "\n\000\000\000\024\000\000\000\036\000\000\000(\000\000\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.int32, a.dtype)
self.assertAllClose(np.array([[10, 20], [30, 40]], dtype=np.int32), a)
@parameterized.named_parameters(
("_int8", dtypes.int8, np.int8), ("_int16", dtypes.int16, np.int16),
("_int32", dtypes.int32, np.int32), ("_int64", dtypes.int64, np.int64),
("_uint8", dtypes.uint8, np.uint8), ("_uint16", dtypes.uint16, np.uint16),
("_uint32", dtypes.uint32, np.uint32),
("_uint64", dtypes.uint64, np.uint64))
def testIntTypes(self, dtype, nptype):
# Test with array.
t = tensor_util.make_tensor_proto([10, 20, 30], dtype=dtype)
self.assertEqual(dtype, t.dtype)
self.assertProtoEquals("dim { size: 3 }", t.tensor_shape)
a = tensor_util.MakeNdarray(t)
self.assertEqual(nptype, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=nptype), a)
# Test with ndarray.
t = tensor_util.make_tensor_proto(np.array([10, 20, 30], dtype=nptype))
self.assertEqual(dtype, t.dtype)
self.assertProtoEquals("dim { size: 3 }", t.tensor_shape)
a = tensor_util.MakeNdarray(t)
self.assertEqual(nptype, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=nptype), a)
@parameterized.named_parameters(
("_int8", dtypes.int8, np.int8), ("_int16", dtypes.int16, np.int16),
("_int32", dtypes.int32, np.int32), ("_int64", dtypes.int64, np.int64),
("_uint8", dtypes.uint8, np.uint8), ("_uint16", dtypes.uint16, np.uint16),
("_uint32", dtypes.uint32, np.uint32),
("_uint64", dtypes.uint64, np.uint64))
def testIntTypesWithImplicitRepeat(self, dtype, nptype):
self.assertAllEqual(
np.array([[10, 11, 12, 12], [12, 12, 12, 12], [12, 12, 12, 12]],
dtype=nptype),
tensor_util.MakeNdarray(
tensor_util.make_tensor_proto([10, 11, 12],
shape=[3, 4],
dtype=dtype)))
def testIntMixedWithDimension(self):
# Github issue: 11974
dtype = dtypes.int32
nptype = np.int32
t = tensor_util.make_tensor_proto(
[10, tensor_shape.Dimension(20), 30], dtype=dtype)
self.assertEqual(dtype, t.dtype)
a = tensor_util.MakeNdarray(t)
self.assertEqual(nptype, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=nptype), a)
@parameterized.named_parameters(
("_int64", dtypes.int64, np.int64, "DT_INT64", "int64_val"),
("_uint64", dtypes.uint64, np.uint64, "DT_UINT64", "uint64_val"))
def testLong(self, dtype, nptype, proto_dtype, proto_value_name):
t = tensor_util.make_tensor_proto(10, dtype=dtype)
self.assertProtoEquals(
"""
dtype: %s
tensor_shape {}
%s: 10
""" % (proto_dtype, proto_value_name), t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(nptype, a.dtype)
self.assertAllClose(np.array(10, dtype=nptype), a)
@parameterized.named_parameters(
("_int64", dtypes.int64, np.int64, "DT_INT64"),
("_uint64", dtypes.uint64, np.uint64, "DT_UINT64"))
def testLongN(self, dtype, nptype, proto_dtype):
t = tensor_util.make_tensor_proto([10, 20, 30], shape=[1, 3], dtype=dtype)
if sys.byteorder == "big":
# pylint: disable=line-too-long
self.assertProtoEquals(
r"""
dtype: %s
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\000\000\000\000\000\000\000\n\000\000\000\000\000\000\000\024\000\000\000\000\000\000\000\036"
""" % proto_dtype, t)
# pylint: enable=line-too-long
else:
# pylint: disable=line-too-long
self.assertProtoEquals(
r"""
dtype: %s
tensor_shape { dim { size: 1 } dim { size: 3 } }
tensor_content: "\n\000\000\000\000\000\000\000\024\000\000\000\000\000\000\000\036\000\000\000\000\000\000\000"
""" % proto_dtype, t)
# pylint: enable=line-too-long
a = tensor_util.MakeNdarray(t)
self.assertEqual(nptype, a.dtype)
self.assertAllClose(np.array([[10, 20, 30]], dtype=nptype), a)
@parameterized.named_parameters(("_int64", np.int64, "DT_INT64"),
("_uint64", np.uint64, "DT_UINT64"))
def testLongNpArray(self, nptype, proto_dtype):
t = tensor_util.make_tensor_proto(np.array([10, 20, 30], dtype=nptype))
if sys.byteorder == "big":
# pylint: disable=line-too-long
self.assertProtoEquals(
r"""
dtype: %s
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000\000\000\000\000\000\n\000\000\000\000\000\000\000\024\000\000\000\000\000\000\000\036"
""" % proto_dtype, t)
# pylint: enable=line-too-long
else:
# pylint: disable=line-too-long
self.assertProtoEquals(
r"""
dtype: %s
tensor_shape { dim { size: 3 } }
tensor_content: "\n\000\000\000\000\000\000\000\024\000\000\000\000\000\000\000\036\000\000\000\000\000\000\000"
""" % proto_dtype, t)
# pylint: enable=line-too-long
a = tensor_util.MakeNdarray(t)
self.assertEqual(nptype, a.dtype)
self.assertAllClose(np.array([10, 20, 30], dtype=nptype), a)
def testQuantizedTypes(self):
# Test with array.
data = [(21,), (22,), (23,)]
t = tensor_util.make_tensor_proto(data, dtype=dtypes.qint32)
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_QINT32
tensor_shape { dim { size: 3 } }
tensor_content: "\000\000\000\025\000\000\000\026\000\000\000\027"
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_QINT32
tensor_shape { dim { size: 3 } }
tensor_content: "\025\000\000\000\026\000\000\000\027\000\000\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(dtypes.qint32.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=dtypes.quint8)
self.assertProtoEquals(r"""
dtype: DT_QUINT8
tensor_shape { dim { size: 3 } }
tensor_content: "\025\026\027"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(dtypes.quint8.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=dtypes.qint8)
self.assertProtoEquals(r"""
dtype: DT_QINT8
tensor_shape { dim { size: 3 } }
tensor_content: "\025\026\027"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(dtypes.qint8.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=dtypes.quint16)
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_QUINT16
tensor_shape { dim { size: 3 } }
tensor_content: "\000\025\000\026\000\027"
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_QUINT16
tensor_shape { dim { size: 3 } }
tensor_content: "\025\000\026\000\027\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(dtypes.quint16.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
t = tensor_util.make_tensor_proto(data, dtype=dtypes.qint16)
if sys.byteorder == "big":
self.assertProtoEquals(r"""
dtype: DT_QINT16
tensor_shape { dim { size: 3 } }
tensor_content: "\000\025\000\026\000\027"
""", t)
else:
self.assertProtoEquals(r"""
dtype: DT_QINT16
tensor_shape { dim { size: 3 } }
tensor_content: "\025\000\026\000\027\000"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(dtypes.qint16.as_numpy_dtype, a.dtype)
self.assertAllEqual(np.array(data, dtype=a.dtype), a)
def testString(self):
t = tensor_util.make_tensor_proto("foo")
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape {}
string_val: "foo"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.object_, a.dtype)
self.assertEqual([b"foo"], a)
def testStringWithImplicitRepeat(self):
t = tensor_util.make_tensor_proto(["f", "g"], shape=[3, 4])
a = tensor_util.MakeNdarray(t)
self.assertAllEqual(
np.array([[b"f", b"g", b"g", b"g"], [b"g", b"g", b"g", b"g"],
[b"g", b"g", b"g", b"g"]],
dtype=np.object_), a)
def testStringN(self):
t = tensor_util.make_tensor_proto([b"foo", b"bar", b"baz"], shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 1 } dim { size: 3 } }
string_val: "foo"
string_val: "bar"
string_val: "baz"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.object_, a.dtype)
self.assertAllEqual(np.array([[b"foo", b"bar", b"baz"]]), a)
def testStringNpArray(self):
t = tensor_util.make_tensor_proto(
np.array([[b"a", b"ab"], [b"abc", b"abcd"]]))
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 2 } dim { size: 2 } }
string_val: "a"
string_val: "ab"
string_val: "abc"
string_val: "abcd"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.object_, a.dtype)
self.assertAllEqual(np.array([[b"a", b"ab"], [b"abc", b"abcd"]]), a)
def testArrayMethod(self):
class Wrapper(object):
def __array__(self):
return np.array([b"foo", b"bar", b"baz"])
t = tensor_util.make_tensor_proto(Wrapper(), shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 1 } dim { size: 3 } }
string_val: "foo"
string_val: "bar"
string_val: "baz"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.object_, a.dtype)
self.assertAllEqual(np.array([[b"foo", b"bar", b"baz"]]), a)
def testArrayInterface(self):
class Wrapper(object):
@property
def __array_interface__(self):
return np.array([b"foo", b"bar", b"baz"]).__array_interface__
t = tensor_util.make_tensor_proto(Wrapper(), shape=[1, 3])
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 1 } dim { size: 3 } }
string_val: "foo"
string_val: "bar"
string_val: "baz"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.object_, a.dtype)
self.assertAllEqual(np.array([[b"foo", b"bar", b"baz"]]), a)
def testStringTuple(self):
t = tensor_util.make_tensor_proto((b"a", b"ab", b"abc", b"abcd"))
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 4 } }
string_val: "a"
string_val: "ab"
string_val: "abc"
string_val: "abcd"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.object_, a.dtype)
self.assertAllEqual(np.array((b"a", b"ab", b"abc", b"abcd")), a)
def testStringNestedTuple(self):
t = tensor_util.make_tensor_proto(((b"a", b"ab"), (b"abc", b"abcd")))
self.assertProtoEquals("""
dtype: DT_STRING
tensor_shape { dim { size: 2 } dim { size: 2 } }
string_val: "a"
string_val: "ab"
string_val: "abc"
string_val: "abcd"
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.object_, a.dtype)
self.assertAllEqual(np.array(((b"a", b"ab"), (b"abc", b"abcd"))), a)
def testComplex64(self):
t = tensor_util.make_tensor_proto((1 + 2j), dtype=dtypes.complex64)
self.assertProtoEquals("""
dtype: DT_COMPLEX64
tensor_shape {}
scomplex_val: 1
scomplex_val: 2
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.complex64, a.dtype)
self.assertAllEqual(np.array(1 + 2j), a)
def testComplex128(self):
t = tensor_util.make_tensor_proto((1 + 2j), dtype=dtypes.complex128)
self.assertProtoEquals("""
dtype: DT_COMPLEX128
tensor_shape {}
dcomplex_val: 1
dcomplex_val: 2
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.complex128, a.dtype)
self.assertAllEqual(np.array(1 + 2j), a)
def testComplexWithImplicitRepeat(self):
for dtype, np_dtype in [(dtypes.complex64, np.complex64),
(dtypes.complex128, np.complex128)]:
t = tensor_util.make_tensor_proto((1 + 1j), shape=[3, 4], dtype=dtype)
a = tensor_util.MakeNdarray(t)
self.assertAllClose(
np.array(
[[(1 + 1j), (1 + 1j), (1 + 1j), (1 + 1j)],
[(1 + 1j), (1 + 1j), (1 + 1j), (1 + 1j)],
[(1 + 1j), (1 + 1j), (1 + 1j), (1 + 1j)]],
dtype=np_dtype),
a)
def testComplex64N(self):
t = tensor_util.make_tensor_proto(
[(1 + 2j), (3 + 4j), (5 + 6j)], shape=[1, 3], dtype=dtypes.complex64)
self.assertProtoEquals("""
dtype: DT_COMPLEX64
tensor_shape { dim { size: 1 } dim { size: 3 } }
scomplex_val: 1
scomplex_val: 2
scomplex_val: 3
scomplex_val: 4
scomplex_val: 5
scomplex_val: 6
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.complex64, a.dtype)
self.assertAllEqual(np.array([[(1 + 2j), (3 + 4j), (5 + 6j)]]), a)
def testComplex128N(self):
t = tensor_util.make_tensor_proto(
[(1 + 2j), (3 + 4j), (5 + 6j)], shape=[1, 3], dtype=dtypes.complex128)
self.assertProtoEquals("""
dtype: DT_COMPLEX128
tensor_shape { dim { size: 1 } dim { size: 3 } }
dcomplex_val: 1
dcomplex_val: 2
dcomplex_val: 3
dcomplex_val: 4
dcomplex_val: 5
dcomplex_val: 6
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.complex128, a.dtype)
self.assertAllEqual(np.array([[(1 + 2j), (3 + 4j), (5 + 6j)]]), a)
def testComplex64NpArray(self):
t = tensor_util.make_tensor_proto(
np.array([[(1 + 2j), (3 + 4j)], [(5 + 6j), (7 + 8j)]]),
dtype=dtypes.complex64)
# scomplex_val are real_0, imag_0, real_1, imag_1, ...
self.assertProtoEquals("""
dtype: DT_COMPLEX64
tensor_shape { dim { size: 2 } dim { size: 2 } }
scomplex_val: 1
scomplex_val: 2
scomplex_val: 3
scomplex_val: 4
scomplex_val: 5
scomplex_val: 6
scomplex_val: 7
scomplex_val: 8
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.complex64, a.dtype)
self.assertAllEqual(
np.array([[(1 + 2j), (3 + 4j)], [(5 + 6j), (7 + 8j)]]), a)
def testComplex128NpArray(self):
t = tensor_util.make_tensor_proto(
np.array([[(1 + 2j), (3 + 4j)], [(5 + 6j), (7 + 8j)]]),
dtype=dtypes.complex128)
# scomplex_val are real_0, imag_0, real_1, imag_1, ...
self.assertProtoEquals("""
dtype: DT_COMPLEX128
tensor_shape { dim { size: 2 } dim { size: 2 } }
dcomplex_val: 1
dcomplex_val: 2
dcomplex_val: 3
dcomplex_val: 4
dcomplex_val: 5
dcomplex_val: 6
dcomplex_val: 7
dcomplex_val: 8
""", t)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.complex128, a.dtype)
self.assertAllEqual(
np.array([[(1 + 2j), (3 + 4j)], [(5 + 6j), (7 + 8j)]]), a)
def testNestedNumpyArrayWithoutDType(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, np.array(30.0)])
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testNestedNumpyArrayWithDType(self):
t = tensor_util.make_tensor_proto([10.0, 20.0, np.array(30.0)],
dtype=dtypes.float32)
a = tensor_util.MakeNdarray(t)
self.assertEqual(np.float32, a.dtype)
self.assertAllClose(np.array([10.0, 20.0, 30.0], dtype=np.float32), a)
def testUnsupportedDTypes(self):
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto(np.array([1]), 0)
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto(3, dtype=dtypes.qint8)
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto([3], dtype=dtypes.qint8)
# Validate the helpful error message when trying to convert an
# unconvertible list as strings.
with self.assertRaisesRegex(TypeError, "Failed to convert object"):
tensor_util.make_tensor_proto([tensor_shape.Dimension(1)])
def testTensorShapeVerification(self):
array = np.array([[1], [2]])
correct_shape = (2, 1)
incorrect_shape = (1, 2)
tensor_util.make_tensor_proto(array, shape=correct_shape, verify_shape=True)
with self.assertRaises(TypeError):
tensor_util.make_tensor_proto(
array, shape=incorrect_shape, verify_shape=True)
def testShapeTooLarge(self):
with self.assertRaises(ValueError):
tensor_util.make_tensor_proto(np.array([1, 2]), shape=[1])
def testLowRankSupported(self):
t = tensor_util.make_tensor_proto(np.array(7))
self.assertProtoEquals("""
dtype: DT_INT64
tensor_shape {}
int64_val: 7
""", t)
def testShapeEquals(self):
t = tensor_util.make_tensor_proto([10, 20, 30, 40], shape=[2, 2])
self.assertTrue(tensor_util.ShapeEquals(t, [2, 2]))
self.assertTrue(tensor_util.ShapeEquals(t, (2, 2)))
self.assertTrue(
tensor_util.ShapeEquals(t, tensor_shape.as_shape([2, 2]).as_proto()))
self.assertFalse(tensor_util.ShapeEquals(t, [5, 3]))
self.assertFalse(tensor_util.ShapeEquals(t, [1, 4]))
self.assertFalse(tensor_util.ShapeEquals(t, [4]))
@test_util.run_all_in_graph_and_eager_modes
class IsTensorTest(test.TestCase):
def testConstantTensor(self):
np_val = np.random.rand(3).astype(np.int32)
tf_val = constant_op.constant(np_val)
self.assertFalse(tensor_util.is_tf_type(np_val))
self.assertTrue(tensor_util.is_tf_type(tf_val))
def testRaggedTensor(self):
rt = ragged_factory_ops.constant([[1, 2], [3]])
rt_value = self.evaluate(rt)
self.assertTrue(tensor_util.is_tf_type(rt))
self.assertFalse(tensor_util.is_tf_type(rt_value))
def testSparseTensor(self):
st = sparse_tensor.SparseTensor([[1, 2]], [3], [10, 10])
st_value = self.evaluate(st)
self.assertTrue(tensor_util.is_tf_type(st))
self.assertFalse(tensor_util.is_tf_type(st_value))
def testIndexedSlices(self):
x = indexed_slices.IndexedSlices(
constant_op.constant([1, 2, 3]), constant_op.constant([10, 20, 30]))
x_value = indexed_slices.IndexedSlicesValue(
np.array([1, 2, 3]), np.array([10, 20, 30]), np.array([100]))
self.assertTrue(tensor_util.is_tf_type(x))
self.assertFalse(tensor_util.is_tf_type(x_value))
def testVariable(self):
v = variables.Variable([1, 2, 3])
self.assertTrue(tensor_util.is_tf_type(v))
class ConstantValueTest(test.TestCase):
def testConstant(self):
np_val = np.random.rand(3, 4, 7).astype(np.float32)
tf_val = constant_op.constant(np_val)
self.assertAllClose(np_val, tensor_util.constant_value(tf_val))
np_val = np.random.rand(3, 0, 7).astype(np.float32)
tf_val = constant_op.constant(np_val)
self.assertAllClose(np_val, tensor_util.constant_value(tf_val))
def testUnknown(self):
with ops.Graph().as_default():
tf_val = gen_state_ops.variable(
shape=[3, 4, 7],
dtype=dtypes.float32,
name="tf_val",
container="",
shared_name="")
self.assertIs(None, tensor_util.constant_value(tf_val))
def testShape(self):
np_val = np.array([1, 2, 3], dtype=np.int32)
tf_val = array_ops.shape(constant_op.constant(0.0, shape=[1, 2, 3]))
c_val = tensor_util.constant_value(tf_val)
self.assertAllEqual(np_val, c_val)
self.assertEqual(np.int32, c_val.dtype)
def testFill(self):
np_val = np.array([-1, -1, -1], dtype=np.float32)
tf_val = array_ops.fill([3], constant_op.constant(-1.0))
c_val = tensor_util.constant_value(tf_val)
self.assertAllEqual(np_val, c_val)
self.assertEqual(np.float32, c_val.dtype)
def testSize(self):
tf_val = array_ops.size(constant_op.constant(0.0, shape=[1, 2, 3]))
c_val = tensor_util.constant_value(tf_val)
self.assertEqual(6, c_val)
def testSizeOfScalar(self):
tf_val = array_ops.size(constant_op.constant(0.0))
c_val = tensor_util.constant_value(tf_val)
self.assertEqual(1, c_val)
self.assertIn(type(c_val), [np.ndarray, np.int32])
def testRank(self):
tf_val = array_ops.rank(constant_op.constant(0.0, shape=[1, 2, 3]))
c_val = tensor_util.constant_value(tf_val)
self.assertIn(type(c_val), [np.ndarray, np.int32])
self.assertEqual((), c_val.shape)
self.assertEqual(3, c_val)
# Repeat test using array_ops.rank_internal to avoid the optimization that
# happens in the rank function.
tf_val = array_ops.rank_internal(
constant_op.constant(
0.0, shape=[1, 2, 3]), optimize=False)
c_val = tensor_util.constant_value(tf_val)
self.assertIn(type(c_val), [np.ndarray, np.int32])
self.assertEqual((), c_val.shape)
self.assertEqual(3, c_val)
self.assertEqual([3], c_val)
def testCast(self):
np_val = np.random.rand(3, 4, 7).astype(np.float32)
tf_val = math_ops.cast(constant_op.constant(np_val), dtypes.float64)
c_val = tensor_util.constant_value(tf_val)
self.assertAllClose(np_val.astype(np.float64), c_val)
np_val = np.random.rand(3, 0, 7).astype(np.float32)
tf_val = math_ops.cast(constant_op.constant(np_val), dtypes.float64)
c_val = tensor_util.constant_value(tf_val)
self.assertAllClose(np_val.astype(np.float64), c_val)
def testConcat(self):
np_val = np.random.rand(3, 4, 7).astype(np.float32)
tf_val = array_ops.concat(
[np_val[0:1, :, :], np_val[1:2, :, :], np_val[2:3, :, :]], 0)
c_val = tensor_util.constant_value(tf_val)
self.assertAllClose(np_val, c_val)
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
tf_val = array_ops.concat(
[np_val[0, :, :], np_val[1, :, :], np_val[2, :, :]],
array_ops.placeholder(dtypes.int32))
c_val = tensor_util.constant_value(tf_val)
self.assertIs(None, c_val)
tf_val = array_ops.concat([
np_val[0, :, :],
array_ops.placeholder(dtypes.float32), np_val[2, :, :]
], 1)
c_val = tensor_util.constant_value(tf_val)
self.assertIs(None, c_val)
def testPack_Axis0(self):
inputs = [np.random.rand(4, 7) for _ in range(3)]
np_val = np.array(inputs)
tf_val = array_ops.stack(inputs)
c_val = tensor_util.constant_value(tf_val)
self.assertAllClose(np_val, c_val)
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
tf_val = array_ops.stack(
[inputs[0],
array_ops.placeholder(dtypes.float32), inputs[2]])
c_val = tensor_util.constant_value(tf_val)
self.assertIs(None, c_val)
def testPack_Axis1(self):
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
inputs = [np.random.rand(4, 7) for _ in range(3)]
tf_val = array_ops.stack(inputs, axis=1)
c_val = tensor_util.constant_value(tf_val)
self.assertIsNone(c_val)
tf_val = array_ops.stack(
[inputs[0],
array_ops.placeholder(dtypes.float32), inputs[2]], axis=1)
c_val = tensor_util.constant_value(tf_val)
self.assertIs(None, c_val)
def testPack_Partial_Axis0(self):
input_ = np.random.rand(4, 7)
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
tf_val = array_ops.stack([input_, array_ops.placeholder(dtypes.float32)])
c_val = tensor_util.constant_value(tf_val, partial=True)
self.assertAllClose(input_, c_val[0])
self.assertIsNone(c_val[1])
def testPack_Partial_Axis1(self):
input_ = np.random.rand(4, 7)
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
tf_val = array_ops.stack(
[input_, array_ops.placeholder(dtypes.float32)], axis=1)
c_val = tensor_util.constant_value(tf_val, partial=True)
self.assertIsNone(c_val)
def testUnpack_Axis0(self):
inputs = np.random.rand(3, 4, 7)
tf_vals = array_ops.unstack(inputs)
c_vals = [tensor_util.constant_value(x) for x in tf_vals]
self.assertAllClose(inputs, c_vals)
def testUnpack_Partial_Axis0(self):
input_ = np.random.rand(4, 7)
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
packed = array_ops.stack([input_, array_ops.placeholder(dtypes.float32)])
tf_vals = array_ops.unstack(packed)
c_vals = [tensor_util.constant_value(x, partial=True) for x in tf_vals]
self.assertAllClose(input_, c_vals[0])
self.assertIsNone(c_vals[1])
def testSplit_Axis0(self):
inputs = np.random.rand(6, 5, 7)
tf_vals = array_ops.split(inputs, 3)
c_vals = [tensor_util.constant_value(x) for x in tf_vals]
self.assertAllClose(np.split(inputs, 3), c_vals)
def testSplit_Partial_Axis0(self):
input_ = np.random.rand(4, 7)
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
placeholder = array_ops.placeholder(dtypes.float32, shape=(4, 7))
# it'd be better to use concat here, but concat doesn't support partial
packed = array_ops.stack([input_, placeholder])
tf_vals = array_ops.split(packed, 2)
c_vals = [tensor_util.constant_value(x, partial=True) for x in tf_vals]
self.assertAllClose(input_, c_vals[0][0])
self.assertIsNone(c_vals[1][0])
def testEqual(self):
# Scalar inputs.
tf_val = math_ops.equal(constant_op.constant(1), constant_op.constant(1))
self.assertEqual(tensor_util.constant_value(tf_val), True)
tf_val = math_ops.equal(constant_op.constant(1), constant_op.constant(0))
self.assertEqual(tensor_util.constant_value(tf_val), False)
# Shaped inputs with broadcast semantics.
tf_val = math_ops.equal(constant_op.constant([[0, 1]]),
constant_op.constant([[0], [1]]))
c_val = tensor_util.constant_value(tf_val)
self.assertAllEqual(c_val, [[True, False], [False, True]])
def testNotEqual(self):
# Scalar inputs.
tf_val = math_ops.not_equal(constant_op.constant(1),
constant_op.constant(1))
self.assertEqual(tensor_util.constant_value(tf_val), False)
tf_val = math_ops.not_equal(constant_op.constant(1),
constant_op.constant(0))
self.assertEqual(tensor_util.constant_value(tf_val), True)
# Shaped inputs with broadcast semantics.
tf_val = math_ops.not_equal(constant_op.constant([[0, 1]]),
constant_op.constant([[0], [1]]))
c_val = tensor_util.constant_value(tf_val)
self.assertAllEqual(c_val, [[False, True], [True, False]])
def testStopGradient(self):
input_ = np.random.rand(4, 7)
tf_val = array_ops.stop_gradient(input_)
c_val = tensor_util.constant_value(tf_val)
self.assertAllEqual(input_, c_val)
def testIdentity(self):
input_ = np.random.rand(4, 7)
tf_val = array_ops.identity(input_)
c_val = tensor_util.constant_value(tf_val)
self.assertAllEqual(input_, c_val)
def testLiteral(self):
x = "hi"
self.assertIs(x, tensor_util.constant_value(x))
def testNumpyNdarray(self):
np_val = np.random.rand(3, 4, 7).astype(np.float32)
self.assertIs(np_val, tensor_util.constant_value(np_val))
def testVariable(self):
var = variables.Variable(1.0, name="variable_node")
self.assertIsNone(tensor_util.constant_value(var))
def testVariableV1(self):
var = variables.VariableV1(1.0, name="variable_node")
self.assertIsNone(tensor_util.constant_value(var))
class ConstantValueAsShapeTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testConstant(self):
np_val = np.random.rand(3).astype(np.int32)
tf_val = constant_op.constant(np_val)
self.assertEqual(
tensor_shape.TensorShape(np_val),
tensor_util.constant_value_as_shape(tf_val))
tf_val = constant_op.constant([], dtype=dtypes.int32)
self.assertEqual(
tensor_shape.TensorShape([]),
tensor_util.constant_value_as_shape(tf_val))
@test_util.run_in_graph_and_eager_modes
def testCast(self):
tf_val = math_ops.cast(
array_ops.shape(constant_op.constant(0.0, shape=[1, 2, 3])),
dtypes.int64)
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual(tensor_shape.TensorShape([1, 2, 3]), c_val)
@test_util.run_in_graph_and_eager_modes
def testCastWithUnknown(self):
tf_val = math_ops.cast(constant_op.constant([-1, 1, -1]), dtypes.int64)
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([None, 1, None], c_val.as_list())
@test_util.run_in_graph_and_eager_modes
def testShape(self):
tf_val = array_ops.shape(constant_op.constant(0.0, shape=[1, 2, 3]))
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual(tensor_shape.TensorShape([1, 2, 3]), c_val)
@test_util.run_in_graph_and_eager_modes
def testMinusOneBecomesNone(self):
tf_val = constant_op.constant([-1, 1, -1], shape=[3])
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([None, 1, None], c_val.as_list())
def testPack(self):
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
tf_val = array_ops.stack(
[constant_op.constant(16), 37,
array_ops.placeholder(dtypes.int32)])
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 37, None], c_val.as_list())
def testConcat(self):
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
tf_val = array_ops.concat(
[[16, 37], array_ops.placeholder(dtypes.int32, shape=(2,))], 0)
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 37, None, None], c_val.as_list())
tf_val = array_ops.concat(
[[16, 37],
array_ops.placeholder(dtypes.int32, shape=(1,)), [48]], 0)
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 37, None, 48], c_val.as_list())
def testSlice(self):
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
tf_val = array_ops.placeholder(dtypes.int32, shape=(4,))[0:2]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([None, None], c_val.as_list())
# begin:end
tf_val = constant_op.constant([10, 20, 30])[1:3]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([20, 30], c_val.as_list())
# begin:end:stride
tf_val = array_ops.strided_slice(
constant_op.constant([10, 20, 30]), [1], [3], strides=[2])
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([20], c_val.as_list())
# [1, 2, 16, 37, None, 48]
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
tf_val_orig = array_ops.concat(
[[1, 2, 16, 37],
array_ops.placeholder(dtypes.int32, shape=(1,)), [48]], 0)
# begin: no end
tf_val = tf_val_orig[2:]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 37, None, 48], c_val.as_list())
# begin::negative slice
tf_val = tf_val_orig[2::-1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([16, 2, 1], c_val.as_list())
# :end:negative slice
tf_val = tf_val_orig[:1:-2]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([48, 37], c_val.as_list())
# begin:end:negative slice
tf_val = tf_val_orig[3:1:-1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([37, 16], c_val.as_list())
# begin:negative end:slice
tf_val = tf_val_orig[1:-3:1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([2, 16], c_val.as_list())
# negative begin::slice
tf_val = tf_val_orig[-3::1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([37, None, 48], c_val.as_list())
# negative begin::negative slice
tf_val = tf_val_orig[-3::-1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([37, 16, 2, 1], c_val.as_list())
# negative begin:negative end:negative slice
tf_val = tf_val_orig[-3:-5:-1]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([37, 16], c_val.as_list())
# Do not support shape inference for additional arguments
tf_val = constant_op.constant([10, 20, 30])[...]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual([None, None, None], c_val.as_list())
# Do not support shape inference for tensor slices.
tf_val = constant_op.constant(
[10, 20, 30])[array_ops.placeholder(dtypes.int32, shape=()):]
c_val = tensor_util.constant_value_as_shape(tf_val)
self.assertEqual(tensor_shape.unknown_shape(), c_val)
# Do not support shape inference for higher rank
with self.assertRaises(ValueError):
tf_val = constant_op.constant([[10], [20], [30]])[:, 0:]
c_val = tensor_util.constant_value_as_shape(tf_val)
class MaybeSetStaticShapeTest(test.TestCase):
@contextlib.contextmanager
def disableSetStaticShape(self):
flag_old = tensor_util._ENABLE_MAYBE_SET_STATIC_SHAPE
tensor_util._ENABLE_MAYBE_SET_STATIC_SHAPE = False
try:
yield
finally:
tensor_util._ENABLE_MAYBE_SET_STATIC_SHAPE = flag_old
def testMaybeSetStaticShape(self):
shape = constant_op.constant([2, 5], dtype=dtypes.int32)
def reshape():
v = array_ops.zeros([10])
return array_ops.reshape(v, shape)
# This test needs a placeholder which means we need to construct a graph.
with ops.Graph().as_default():
with self.disableSetStaticShape():
graph_without_shape_propagation = func_graph.func_graph_from_py_func(
"without_shape_propagation", reshape, [], {})
graph_with_shape_propagation = func_graph.func_graph_from_py_func(
"with_shape_propagation", reshape, [], {})
self.assertCountEqual(
[op.type for op in graph_without_shape_propagation.get_operations()],
[op.type for op in graph_with_shape_propagation.get_operations()])
def testMaybeSetStaticShapeScalarShape(self):
def reshape():
v = array_ops.placeholder(dtypes.float32)
t = array_ops.reshape(v, [-1])
return t
with self.disableSetStaticShape():
graph_without_shape_propagation = func_graph.func_graph_from_py_func(
"without_shape_propagation", reshape, [], {})
graph_with_shape_propagation = func_graph.func_graph_from_py_func(
"with_shape_propagation", reshape, [], {})
self.assertCountEqual(
[op.type for op in graph_without_shape_propagation.get_operations()],
[op.type for op in graph_with_shape_propagation.get_operations()])
class ShapeTensorTest(test_util.TensorFlowTestCase):
@test_util.run_in_graph_and_eager_modes
def testConversion(self):
"""Make sure fully known TensorShape objects convert to Tensors."""
shape = tensor_shape.TensorShape([1, tensor_shape.Dimension(2)])
shape_tensor = tensor_util.shape_tensor(shape)
self.assertAllEqual((1, 2), shape_tensor)
if __name__ == "__main__":
test.main()
|
frreiss/tensorflow-fred
|
tensorflow/python/framework/tensor_util_test.py
|
Python
|
apache-2.0
| 47,223
|
#!/usr/bin/python
import socket
import time
import sys
import random
from Vms import Vms
IP=sys.argv[1]
range=int(sys.argv[2])
vmInst = Vms(IP, random.randint(1,range))
s = socket.socket()
port = 8899
host = '127.0.0.1'
s.connect((host, port))
buf = vmInst.getStats()
s.send(buf)
s.close()
|
nnadarajah/VMstats
|
vm-sim.py
|
Python
|
apache-2.0
| 296
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
# Copyright (c) 2016 Moisés López <moylop260@vauxoo.com>
# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
"""Unit tests for the return documentation checking in the
`DocstringChecker` in :mod:`pylint.extensions.check_docs`
"""
from __future__ import division, print_function, absolute_import
import astroid
from pylint.testutils import CheckerTestCase, Message, set_config
from pylint.extensions.docparams import DocstringParameterChecker
class TestDocstringCheckerReturn(CheckerTestCase):
"""Tests for pylint_plugin.RaiseDocChecker"""
CHECKER_CLASS = DocstringParameterChecker
def test_ignores_no_docstring(self):
return_node = astroid.extract_node('''
def my_func(self):
return False #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
@set_config(accept_no_return_doc=False)
def test_warns_no_docstring(self):
node = astroid.extract_node('''
def my_func(self):
return False
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-doc', node=node),
Message(msg_id='missing-return-type-doc', node=node)):
self.checker.visit_return(return_node)
def test_ignores_unknown_style(self):
return_node = astroid.extract_node('''
def my_func(self):
"""This is a docstring."""
return False #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_warn_partial_sphinx_returns(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
:returns: Always False
"""
return False
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-type-doc', node=node)):
self.checker.visit_return(return_node)
def test_warn_partial_sphinx_returns_type(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
:rtype: bool
"""
return False
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-doc', node=node)):
self.checker.visit_return(return_node)
def test_warn_missing_sphinx_returns(self):
node = astroid.extract_node('''
def my_func(self, doc_type):
"""This is a docstring.
:param doc_type: Sphinx
:type doc_type: str
"""
return False
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-doc', node=node),
Message(msg_id='missing-return-type-doc', node=node)):
self.checker.visit_return(return_node)
def test_warn_partial_google_returns(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns:
Always False
"""
return False
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-type-doc', node=node)):
self.checker.visit_return(return_node)
def test_warn_partial_google_returns_type(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns:
bool:
"""
return False
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-doc', node=node)):
self.checker.visit_return(return_node)
def test_warn_missing_google_returns(self):
node = astroid.extract_node('''
def my_func(self, doc_type):
"""This is a docstring.
Parameters:
doc_type (str): Google
"""
return False
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-doc', node=node),
Message(msg_id='missing-return-type-doc', node=node)):
self.checker.visit_return(return_node)
def test_warn_partial_numpy_returns_type(self):
node = astroid.extract_node('''
def my_func(self, doc_type):
"""This is a docstring.
Arguments
---------
doc_type : str
Numpy
Returns
-------
bool
"""
return False
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-doc', node=node)):
self.checker.visit_return(return_node)
def test_warn_missing_numpy_returns(self):
node = astroid.extract_node('''
def my_func(self, doc_type):
"""This is a docstring.
Arguments
---------
doc_type : str
Numpy
"""
return False
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-doc', node=node),
Message(msg_id='missing-return-type-doc', node=node)):
self.checker.visit_return(return_node)
def test_find_sphinx_returns(self):
return_node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
:return: Always False
:rtype: bool
"""
return False #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_find_google_returns(self):
return_node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns:
bool: Always False
"""
return False #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_find_numpy_returns(self):
return_node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns
-------
bool
Always False
"""
return False #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_ignores_sphinx_return_none(self):
return_node = astroid.extract_node('''
def my_func(self, doc_type):
"""This is a docstring.
:param doc_type: Sphinx
:type doc_type: str
"""
return #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_ignores_google_return_none(self):
return_node = astroid.extract_node('''
def my_func(self, doc_type):
"""This is a docstring.
Args:
doc_type (str): Google
"""
return #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_ignores_numpy_return_none(self):
return_node = astroid.extract_node('''
def my_func(self, doc_type):
"""This is a docstring.
Arguments
---------
doc_type : str
Numpy
"""
return #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_finds_sphinx_return_custom_class(self):
return_node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
:returns: An object
:rtype: :class:`mymodule.Class`
"""
return mymodule.Class() #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_finds_google_return_custom_class(self):
return_node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns:
mymodule.Class: An object
"""
return mymodule.Class() #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_finds_numpy_return_custom_class(self):
return_node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns
-------
mymodule.Class
An object
"""
return mymodule.Class() #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_finds_sphinx_return_list_of_custom_class(self):
return_node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
:returns: An object
:rtype: list(:class:`mymodule.Class`)
"""
return [mymodule.Class()] #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_finds_google_return_list_of_custom_class(self):
return_node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns:
list(:class:`mymodule.Class`): An object
"""
return [mymodule.Class()] #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_finds_numpy_return_list_of_custom_class(self):
return_node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns
-------
list(:class:`mymodule.Class`)
An object
"""
return [mymodule.Class()] #@
''')
with self.assertNoMessages():
self.checker.visit_return(return_node)
def test_warns_sphinx_return_list_of_custom_class_without_description(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
:rtype: list(:class:`mymodule.Class`)
"""
return [mymodule.Class()]
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-doc', node=node)):
self.checker.visit_return(return_node)
def test_warns_google_return_list_of_custom_class_without_description(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns:
list(:class:`mymodule.Class`):
"""
return [mymodule.Class()]
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-doc', node=node)):
self.checker.visit_return(return_node)
def test_warns_numpy_return_list_of_custom_class_without_description(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns
-------
list(:class:`mymodule.Class`)
"""
return [mymodule.Class()]
''')
return_node = node.body[0]
with self.assertAddsMessages(
Message(msg_id='missing-return-doc', node=node)):
self.checker.visit_return(return_node)
def test_warns_sphinx_redundant_return_doc(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
:returns: One
"""
return None
''')
with self.assertAddsMessages(
Message(msg_id='redundant-returns-doc', node=node)):
self.checker.visit_functiondef(node)
def test_warns_sphinx_redundant_rtype_doc(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
:rtype: int
"""
return None
''')
with self.assertAddsMessages(
Message(msg_id='redundant-returns-doc', node=node)):
self.checker.visit_functiondef(node)
def test_warns_google_redundant_return_doc(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns:
One
"""
return None
''')
with self.assertAddsMessages(
Message(msg_id='redundant-returns-doc', node=node)):
self.checker.visit_functiondef(node)
def test_warns_google_redundant_rtype_doc(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns:
int:
"""
return None
''')
with self.assertAddsMessages(
Message(msg_id='redundant-returns-doc', node=node)):
self.checker.visit_functiondef(node)
def test_warns_numpy_redundant_return_doc(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns
-------
int
One
"""
return None
''')
with self.assertAddsMessages(
Message(msg_id='redundant-returns-doc', node=node)):
self.checker.visit_functiondef(node)
def test_warns_numpy_redundant_rtype_doc(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns
-------
int
"""
return None
''')
with self.assertAddsMessages(
Message(msg_id='redundant-returns-doc', node=node)):
self.checker.visit_functiondef(node)
def test_ignores_sphinx_redundant_return_doc_multiple_returns(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
:returns: One
:rtype: int
:returns: None sometimes
:rtype: None
"""
if a_func():
return None
return 1
''')
with self.assertNoMessages():
self.checker.visit_functiondef(node)
def test_ignores_google_redundant_return_doc_multiple_returns(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns:
int or None: One, or sometimes None.
"""
if a_func():
return None
return 1
''')
with self.assertNoMessages():
self.checker.visit_functiondef(node)
def test_ignores_numpy_redundant_return_doc_multiple_returns(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns
-------
int
One
None
Sometimes
"""
if a_func():
return None
return 1
''')
with self.assertNoMessages():
self.checker.visit_functiondef(node)
def test_ignore_sphinx_redundant_return_doc_yield(self):
node = astroid.extract_node('''
def my_func_with_yield(self):
"""This is a docstring.
:returns: One
:rtype: generator
"""
for value in range(3):
yield value
''')
with self.assertNoMessages():
self.checker.visit_functiondef(node)
def test_warns_google_redundant_return_doc_yield(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns:
int: One
"""
yield 1
''')
with self.assertAddsMessages(
Message(msg_id='redundant-returns-doc', node=node)):
self.checker.visit_functiondef(node)
def test_warns_numpy_redundant_return_doc_yield(self):
node = astroid.extract_node('''
def my_func(self):
"""This is a docstring.
Returns
-------
int
One
"""
yield 1
''')
with self.assertAddsMessages(
Message(msg_id='redundant-returns-doc', node=node)):
self.checker.visit_functiondef(node)
|
arju88nair/projectCulminate
|
venv/lib/python3.5/site-packages/pylint/test/extensions/test_check_return_docs.py
|
Python
|
apache-2.0
| 17,279
|
from datetime import datetime
from _datetime import timedelta
from AlarmEntity import AlarmEntity
class AlarmTimeEntity():
def __init__(self, alarmTimeDao):
self.id = alarmTimeDao[0]
self.alarm = AlarmEntity((alarmTimeDao[4],alarmTimeDao[5]))
dayAlarm = alarmTimeDao[1]
timeAlarm = str(alarmTimeDao[2])
if len(timeAlarm) == 6:
hoursAlarm = int(timeAlarm[:2])
minutesAlarm = int(timeAlarm[2:4])
secondssAlarm = int(timeAlarm[4:6])
else:
hoursAlarm = int(timeAlarm[:1])
minutesAlarm = int(timeAlarm[1:3])
secondssAlarm = int(timeAlarm[3:5])
self.datetime = datetime.today()
self.datetime = datetime(self.datetime.year, self.datetime.month,
self.datetime.day, hoursAlarm, minutesAlarm, secondssAlarm)
todayWeekday = self.datetime.isoweekday()
# if the alarm is for a day in the next week
if dayAlarm < todayWeekday:
self.datetime += timedelta(days=((7 - todayWeekday) + dayAlarm))
elif dayAlarm > todayWeekday:
self.datetime += timedelta(days=dayAlarm - todayWeekday)
def __str__(self):
return "AlarmTimeEntity:: id: " + str(self.id) +"; datetime: "+ str(self.datetime)
|
Guervyl/GVPAlarm-manager
|
AlarmTimeEntity.py
|
Python
|
gpl-3.0
| 1,357
|
#!/usr/bin/env python
# PYTHON_ARGCOMPLETE_OK
"""A tool to aggregate data about Ansible source and testing into a sqlite DB for reporting."""
from __future__ import (absolute_import, print_function)
import argparse
import json
import os
import sqlite3
import sys
import yaml
DATABASE_PATH = os.path.expanduser('~/.ansible/report.db')
BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) + '/'
ANSIBLE_PATH = os.path.join(BASE_PATH, 'lib')
ANSIBLE_TEST_PATH = os.path.join(BASE_PATH, 'test/lib')
if ANSIBLE_PATH not in sys.path:
sys.path.insert(0, ANSIBLE_PATH)
if ANSIBLE_TEST_PATH not in sys.path:
sys.path.insert(0, ANSIBLE_TEST_PATH)
from ansible.module_utils.urls import open_url
from ansible.parsing.plugin_docs import read_docstring
from ansible_test._internal.target import walk_integration_targets
def main():
os.chdir(BASE_PATH)
args = parse_args()
args.func()
def parse_args():
try:
import argcomplete
except ImportError:
argcomplete = None
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(metavar='COMMAND')
subparsers.required = True # work-around for python 3 bug which makes subparsers optional
populate = subparsers.add_parser('populate',
help='populate report database')
populate.set_defaults(func=populate_database)
query = subparsers.add_parser('query',
help='query report database')
query.set_defaults(func=query_database)
if argcomplete:
argcomplete.autocomplete(parser)
args = parser.parse_args()
return args
def query_database():
if not os.path.exists(DATABASE_PATH):
sys.exit('error: Database not found. Did you run `report.py populate` first?')
os.execvp('sqlite3', ('sqlite3', DATABASE_PATH))
def populate_database():
populate_modules()
populate_coverage()
populate_integration_targets()
def populate_modules():
module_dir = os.path.join(BASE_PATH, 'lib/ansible/modules/')
modules_rows = []
module_statuses_rows = []
for root, dir_names, file_names in os.walk(module_dir):
for file_name in file_names:
module, extension = os.path.splitext(file_name)
if module == '__init__' or extension != '.py':
continue
if module.startswith('_'):
module = module[1:]
namespace = os.path.join(root.replace(module_dir, '')).replace('/', '.')
path = os.path.join(root, file_name)
result = read_docstring(path)
metadata = result['metadata']
doc = result['doc']
if not metadata:
if module == 'async_wrapper':
continue
raise Exception('no metadata for: %s' % path)
modules_rows.append(dict(
module=module,
namespace=namespace,
path=path.replace(BASE_PATH, ''),
supported_by=metadata['supported_by'],
version_added=str(doc.get('version_added', '')) if doc else '',
))
for status in metadata['status']:
module_statuses_rows.append(dict(
module=module,
status=status,
))
populate_data(dict(
modules=dict(
rows=modules_rows,
schema=(
('module', 'TEXT'),
('namespace', 'TEXT'),
('path', 'TEXT'),
('supported_by', 'TEXT'),
('version_added', 'TEXT'),
)),
module_statuses=dict(
rows=module_statuses_rows,
schema=(
('module', 'TEXT'),
('status', 'TEXT'),
)),
))
def populate_coverage():
response = open_url('https://codecov.io/api/gh/ansible/ansible/tree/devel/?src=extension')
data = json.load(response)
files = data['commit']['report']['files']
coverage_rows = []
for path, data in files.items():
report = data['t']
coverage_rows.append(dict(
path=path,
coverage=float(report['c']),
lines=report['n'],
hit=report['h'],
partial=report['p'],
missed=report['m'],
))
populate_data(dict(
coverage=dict(
rows=coverage_rows,
schema=(
('path', 'TEXT'),
('coverage', 'REAL'),
('lines', 'INTEGER'),
('hit', 'INTEGER'),
('partial', 'INTEGER'),
('missed', 'INTEGER'),
)),
))
def populate_integration_targets():
targets = list(walk_integration_targets())
integration_targets_rows = [dict(
target=target.name,
type=target.type,
path=target.path,
script_path=target.script_path,
) for target in targets]
integration_target_aliases_rows = [dict(
target=target.name,
alias=alias,
) for target in targets for alias in target.aliases]
integration_target_modules_rows = [dict(
target=target.name,
module=module,
) for target in targets for module in target.modules]
populate_data(dict(
integration_targets=dict(
rows=integration_targets_rows,
schema=(
('target', 'TEXT'),
('type', 'TEXT'),
('path', 'TEXT'),
('script_path', 'TEXT'),
)),
integration_target_aliases=dict(
rows=integration_target_aliases_rows,
schema=(
('target', 'TEXT'),
('alias', 'TEXT'),
)),
integration_target_modules=dict(
rows=integration_target_modules_rows,
schema=(
('target', 'TEXT'),
('module', 'TEXT'),
)),
))
def create_table(cursor, name, columns):
schema = ', '.join('%s %s' % column for column in columns)
cursor.execute('DROP TABLE IF EXISTS %s' % name)
cursor.execute('CREATE TABLE %s (%s)' % (name, schema))
def populate_table(cursor, rows, name, columns):
create_table(cursor, name, columns)
values = ', '.join([':%s' % column[0] for column in columns])
for row in rows:
cursor.execute('INSERT INTO %s VALUES (%s)' % (name, values), row)
def populate_data(data):
connection = sqlite3.connect(DATABASE_PATH)
cursor = connection.cursor()
for table in data:
populate_table(cursor, data[table]['rows'], table, data[table]['schema'])
connection.commit()
connection.close()
if __name__ == '__main__':
main()
|
anryko/ansible
|
hacking/report.py
|
Python
|
gpl-3.0
| 6,781
|
# a waf tool to add autoconf-like macros to the configure section
import os, sys
import Build, Options, preproc, Logs
from Configure import conf
from TaskGen import feature
from samba_utils import TO_LIST, GET_TARGET_TYPE, SET_TARGET_TYPE, unique_list, mkdir_p
missing_headers = set()
####################################################
# some autoconf like helpers, to make the transition
# to waf a bit easier for those used to autoconf
# m4 files
@conf
def DEFINE(conf, d, v, add_to_cflags=False, quote=False):
'''define a config option'''
conf.define(d, v, quote=quote)
if add_to_cflags:
conf.env.append_value('CCDEFINES', d + '=' + str(v))
def hlist_to_string(conf, headers=None):
'''convert a headers list to a set of #include lines'''
hdrs=''
hlist = conf.env.hlist
if headers:
hlist = hlist[:]
hlist.extend(TO_LIST(headers))
for h in hlist:
hdrs += '#include <%s>\n' % h
return hdrs
@conf
def COMPOUND_START(conf, msg):
'''start a compound test'''
def null_check_message_1(self,*k,**kw):
return
def null_check_message_2(self,*k,**kw):
return
v = getattr(conf.env, 'in_compound', [])
if v != [] and v != 0:
conf.env.in_compound = v + 1
return
conf.check_message_1(msg)
conf.saved_check_message_1 = conf.check_message_1
conf.check_message_1 = null_check_message_1
conf.saved_check_message_2 = conf.check_message_2
conf.check_message_2 = null_check_message_2
conf.env.in_compound = 1
@conf
def COMPOUND_END(conf, result):
'''start a compound test'''
conf.env.in_compound -= 1
if conf.env.in_compound != 0:
return
conf.check_message_1 = conf.saved_check_message_1
conf.check_message_2 = conf.saved_check_message_2
p = conf.check_message_2
if result is True:
p('ok')
elif not result:
p('not found', 'YELLOW')
else:
p(result)
@feature('nolink')
def nolink(self):
'''using the nolink type in conf.check() allows us to avoid
the link stage of a test, thus speeding it up for tests
that where linking is not needed'''
pass
def CHECK_HEADER(conf, h, add_headers=False, lib=None):
'''check for a header'''
if h in missing_headers and lib is None:
return False
d = h.upper().replace('/', '_')
d = d.replace('.', '_')
d = d.replace('-', '_')
d = 'HAVE_%s' % d
if CONFIG_SET(conf, d):
if add_headers:
if not h in conf.env.hlist:
conf.env.hlist.append(h)
return True
(ccflags, ldflags, cpppath) = library_flags(conf, lib)
hdrs = hlist_to_string(conf, headers=h)
if lib is None:
lib = ""
ret = conf.check(fragment='%s\nint main(void) { return 0; }' % hdrs,
type='nolink',
execute=0,
ccflags=ccflags,
mandatory=False,
includes=cpppath,
uselib=lib.upper(),
msg="Checking for header %s" % h)
if not ret:
missing_headers.add(h)
return False
conf.DEFINE(d, 1)
if add_headers and not h in conf.env.hlist:
conf.env.hlist.append(h)
return ret
@conf
def CHECK_HEADERS(conf, headers, add_headers=False, together=False, lib=None):
'''check for a list of headers
when together==True, then the headers accumulate within this test.
This is useful for interdependent headers
'''
ret = True
if not add_headers and together:
saved_hlist = conf.env.hlist[:]
set_add_headers = True
else:
set_add_headers = add_headers
for hdr in TO_LIST(headers):
if not CHECK_HEADER(conf, hdr, set_add_headers, lib=lib):
ret = False
if not add_headers and together:
conf.env.hlist = saved_hlist
return ret
def header_list(conf, headers=None, lib=None):
'''form a list of headers which exist, as a string'''
hlist=[]
if headers is not None:
for h in TO_LIST(headers):
if CHECK_HEADER(conf, h, add_headers=False, lib=lib):
hlist.append(h)
return hlist_to_string(conf, headers=hlist)
@conf
def CHECK_TYPE(conf, t, alternate=None, headers=None, define=None, lib=None, msg=None):
'''check for a single type'''
if define is None:
define = 'HAVE_' + t.upper().replace(' ', '_')
if msg is None:
msg='Checking for %s' % t
ret = CHECK_CODE(conf, '%s _x' % t,
define,
execute=False,
headers=headers,
local_include=False,
msg=msg,
lib=lib,
link=False)
if not ret and alternate:
conf.DEFINE(t, alternate)
return ret
@conf
def CHECK_TYPES(conf, list, headers=None, define=None, alternate=None, lib=None):
'''check for a list of types'''
ret = True
for t in TO_LIST(list):
if not CHECK_TYPE(conf, t, headers=headers,
define=define, alternate=alternate, lib=lib):
ret = False
return ret
@conf
def CHECK_TYPE_IN(conf, t, headers=None, alternate=None, define=None):
'''check for a single type with a header'''
return CHECK_TYPE(conf, t, headers=headers, alternate=alternate, define=define)
@conf
def CHECK_VARIABLE(conf, v, define=None, always=False,
headers=None, msg=None, lib=None):
'''check for a variable declaration (or define)'''
if define is None:
define = 'HAVE_%s' % v.upper()
if msg is None:
msg="Checking for variable %s" % v
return CHECK_CODE(conf,
# we need to make sure the compiler doesn't
# optimize it out...
'''
#ifndef %s
void *_x; _x=(void *)&%s; return (int)_x;
#endif
return 0
''' % (v, v),
execute=False,
link=False,
msg=msg,
local_include=False,
lib=lib,
headers=headers,
define=define,
always=always)
@conf
def CHECK_DECLS(conf, vars, reverse=False, headers=None, always=False):
'''check a list of variable declarations, using the HAVE_DECL_xxx form
of define
When reverse==True then use HAVE_xxx_DECL instead of HAVE_DECL_xxx
'''
ret = True
for v in TO_LIST(vars):
if not reverse:
define='HAVE_DECL_%s' % v.upper()
else:
define='HAVE_%s_DECL' % v.upper()
if not CHECK_VARIABLE(conf, v,
define=define,
headers=headers,
msg='Checking for declaration of %s' % v,
always=always):
if not CHECK_CODE(conf,
'''
return (int)%s;
''' % (v),
execute=False,
link=False,
msg='Checking for declaration of %s (as enum)' % v,
local_include=False,
headers=headers,
define=define,
always=always):
ret = False
return ret
def CHECK_FUNC(conf, f, link=True, lib=None, headers=None):
'''check for a function'''
define='HAVE_%s' % f.upper()
ret = False
conf.COMPOUND_START('Checking for %s' % f)
if link is None or link:
ret = CHECK_CODE(conf,
# this is based on the autoconf strategy
'''
#define %s __fake__%s
#ifdef HAVE_LIMITS_H
# include <limits.h>
#else
# include <assert.h>
#endif
#undef %s
#if defined __stub_%s || defined __stub___%s
#error "bad glibc stub"
#endif
extern char %s();
int main() { return %s(); }
''' % (f, f, f, f, f, f, f),
execute=False,
link=True,
addmain=False,
add_headers=False,
define=define,
local_include=False,
lib=lib,
headers=headers,
msg='Checking for %s' % f)
if not ret:
ret = CHECK_CODE(conf,
# it might be a macro
# we need to make sure the compiler doesn't
# optimize it out...
'void *__x = (void *)%s; return (int)__x' % f,
execute=False,
link=True,
addmain=True,
add_headers=True,
define=define,
local_include=False,
lib=lib,
headers=headers,
msg='Checking for macro %s' % f)
if not ret and (link is None or not link):
ret = CHECK_VARIABLE(conf, f,
define=define,
headers=headers,
msg='Checking for declaration of %s' % f)
conf.COMPOUND_END(ret)
return ret
@conf
def CHECK_FUNCS(conf, list, link=True, lib=None, headers=None):
'''check for a list of functions'''
ret = True
for f in TO_LIST(list):
if not CHECK_FUNC(conf, f, link=link, lib=lib, headers=headers):
ret = False
return ret
@conf
def CHECK_SIZEOF(conf, vars, headers=None, define=None, critical=True):
'''check the size of a type'''
for v in TO_LIST(vars):
v_define = define
ret = False
if v_define is None:
v_define = 'SIZEOF_%s' % v.upper().replace(' ', '_')
for size in list((1, 2, 4, 8, 16, 32)):
if CHECK_CODE(conf,
'static int test_array[1 - 2 * !(((long int)(sizeof(%s))) <= %d)];' % (v, size),
define=v_define,
quote=False,
headers=headers,
local_include=False,
msg="Checking if size of %s == %d" % (v, size)):
conf.DEFINE(v_define, size)
ret = True
break
if not ret and critical:
Logs.error("Couldn't determine size of '%s'" % v)
sys.exit(1)
return ret
@conf
def CHECK_VALUEOF(conf, v, headers=None, define=None):
'''check the value of a variable/define'''
ret = True
v_define = define
if v_define is None:
v_define = 'VALUEOF_%s' % v.upper().replace(' ', '_')
if CHECK_CODE(conf,
'printf("%%u", (unsigned)(%s))' % v,
define=v_define,
execute=True,
define_ret=True,
quote=False,
headers=headers,
local_include=False,
msg="Checking value of %s" % v):
return int(conf.env[v_define])
return None
@conf
def CHECK_CODE(conf, code, define,
always=False, execute=False, addmain=True,
add_headers=True, mandatory=False,
headers=None, msg=None, cflags='', includes='# .',
local_include=True, lib=None, link=True,
define_ret=False, quote=False,
on_target=True):
'''check if some code compiles and/or runs'''
if CONFIG_SET(conf, define):
return True
if headers is not None:
CHECK_HEADERS(conf, headers=headers, lib=lib)
if add_headers:
hdrs = header_list(conf, headers=headers, lib=lib)
else:
hdrs = ''
if execute:
execute = 1
else:
execute = 0
defs = conf.get_config_header()
if addmain:
fragment='%s\n%s\n int main(void) { %s; return 0; }\n' % (defs, hdrs, code)
else:
fragment='%s\n%s\n%s\n' % (defs, hdrs, code)
if msg is None:
msg="Checking for %s" % define
cflags = TO_LIST(cflags)
if local_include:
cflags.append('-I%s' % conf.curdir)
if not link:
type='nolink'
else:
type='cprogram'
uselib = TO_LIST(lib)
(ccflags, ldflags, cpppath) = library_flags(conf, uselib)
includes = TO_LIST(includes)
includes.extend(cpppath)
uselib = [l.upper() for l in uselib]
cflags.extend(ccflags)
if on_target:
exec_args = conf.SAMBA_CROSS_ARGS(msg=msg)
else:
exec_args = []
conf.COMPOUND_START(msg)
ret = conf.check(fragment=fragment,
execute=execute,
define_name = define,
mandatory = mandatory,
ccflags=cflags,
ldflags=ldflags,
includes=includes,
uselib=uselib,
type=type,
msg=msg,
quote=quote,
exec_args=exec_args,
define_ret=define_ret)
if not ret and CONFIG_SET(conf, define):
# sometimes conf.check() returns false, but it
# sets the define. Maybe a waf bug?
ret = True
if ret:
if not define_ret:
conf.DEFINE(define, 1)
conf.COMPOUND_END(True)
else:
conf.COMPOUND_END(conf.env[define])
return True
if always:
conf.DEFINE(define, 0)
conf.COMPOUND_END(False)
return False
@conf
def CHECK_STRUCTURE_MEMBER(conf, structname, member,
always=False, define=None, headers=None):
'''check for a structure member'''
if define is None:
define = 'HAVE_%s' % member.upper()
return CHECK_CODE(conf,
'%s s; void *_x; _x=(void *)&s.%s' % (structname, member),
define,
execute=False,
link=False,
always=always,
headers=headers,
local_include=False,
msg="Checking for member %s in %s" % (member, structname))
@conf
def CHECK_CFLAGS(conf, cflags, fragment='int main(void) { return 0; }\n'):
'''check if the given cflags are accepted by the compiler
'''
return conf.check(fragment=fragment,
execute=0,
type='nolink',
ccflags=cflags,
msg="Checking compiler accepts %s" % cflags)
@conf
def CHECK_LDFLAGS(conf, ldflags):
'''check if the given ldflags are accepted by the linker
'''
return conf.check(fragment='int main(void) { return 0; }\n',
execute=0,
ldflags=ldflags,
mandatory=False,
msg="Checking linker accepts %s" % ldflags)
@conf
def CONFIG_GET(conf, option):
'''return True if a configuration option was found'''
if (option in conf.env):
return conf.env[option]
else:
return None
@conf
def CONFIG_SET(conf, option):
'''return True if a configuration option was found'''
if option not in conf.env:
return False
v = conf.env[option]
if v is None:
return False
if v == []:
return False
if v == ():
return False
return True
@conf
def CONFIG_RESET(conf, option):
if option not in conf.env:
return
del conf.env[option]
Build.BuildContext.CONFIG_RESET = CONFIG_RESET
Build.BuildContext.CONFIG_SET = CONFIG_SET
Build.BuildContext.CONFIG_GET = CONFIG_GET
def library_flags(self, libs):
'''work out flags from pkg_config'''
ccflags = []
ldflags = []
cpppath = []
for lib in TO_LIST(libs):
# note that we do not add the -I and -L in here, as that is added by the waf
# core. Adding it here would just change the order that it is put on the link line
# which can cause system paths to be added before internal libraries
extra_ccflags = TO_LIST(getattr(self.env, 'CCFLAGS_%s' % lib.upper(), []))
extra_ldflags = TO_LIST(getattr(self.env, 'LDFLAGS_%s' % lib.upper(), []))
extra_cpppath = TO_LIST(getattr(self.env, 'CPPPATH_%s' % lib.upper(), []))
ccflags.extend(extra_ccflags)
ldflags.extend(extra_ldflags)
cpppath.extend(extra_cpppath)
if 'EXTRA_LDFLAGS' in self.env:
ldflags.extend(self.env['EXTRA_LDFLAGS'])
ccflags = unique_list(ccflags)
ldflags = unique_list(ldflags)
cpppath = unique_list(cpppath)
return (ccflags, ldflags, cpppath)
@conf
def CHECK_LIB(conf, libs, mandatory=False, empty_decl=True, set_target=True, shlib=False):
'''check if a set of libraries exist as system libraries
returns the sublist of libs that do exist as a syslib or []
'''
fragment= '''
int foo()
{
int v = 2;
return v*2;
}
'''
ret = []
liblist = TO_LIST(libs)
for lib in liblist[:]:
if GET_TARGET_TYPE(conf, lib) == 'SYSLIB':
ret.append(lib)
continue
(ccflags, ldflags, cpppath) = library_flags(conf, lib)
if shlib:
res = conf.check(features='c cshlib', fragment=fragment, lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)
else:
res = conf.check(lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)
if not res:
if mandatory:
Logs.error("Mandatory library '%s' not found for functions '%s'" % (lib, list))
sys.exit(1)
if empty_decl:
# if it isn't a mandatory library, then remove it from dependency lists
if set_target:
SET_TARGET_TYPE(conf, lib, 'EMPTY')
else:
conf.define('HAVE_LIB%s' % lib.upper().replace('-','_').replace('.','_'), 1)
conf.env['LIB_' + lib.upper()] = lib
if set_target:
conf.SET_TARGET_TYPE(lib, 'SYSLIB')
ret.append(lib)
return ret
@conf
def CHECK_FUNCS_IN(conf, list, library, mandatory=False, checklibc=False,
headers=None, link=True, empty_decl=True, set_target=True):
"""
check that the functions in 'list' are available in 'library'
if they are, then make that library available as a dependency
if the library is not available and mandatory==True, then
raise an error.
If the library is not available and mandatory==False, then
add the library to the list of dependencies to remove from
build rules
optionally check for the functions first in libc
"""
remaining = TO_LIST(list)
liblist = TO_LIST(library)
# check if some already found
for f in remaining[:]:
if CONFIG_SET(conf, 'HAVE_%s' % f.upper()):
remaining.remove(f)
# see if the functions are in libc
if checklibc:
for f in remaining[:]:
if CHECK_FUNC(conf, f, link=True, headers=headers):
remaining.remove(f)
if remaining == []:
for lib in liblist:
if GET_TARGET_TYPE(conf, lib) != 'SYSLIB' and empty_decl:
SET_TARGET_TYPE(conf, lib, 'EMPTY')
return True
checklist = conf.CHECK_LIB(liblist, empty_decl=empty_decl, set_target=set_target)
for lib in liblist[:]:
if not lib in checklist and mandatory:
Logs.error("Mandatory library '%s' not found for functions '%s'" % (lib, list))
sys.exit(1)
ret = True
for f in remaining:
if not CHECK_FUNC(conf, f, lib=' '.join(checklist), headers=headers, link=link):
ret = False
return ret
@conf
def IN_LAUNCH_DIR(conf):
'''return True if this rule is being run from the launch directory'''
return os.path.realpath(conf.curdir) == os.path.realpath(Options.launch_dir)
Options.Handler.IN_LAUNCH_DIR = IN_LAUNCH_DIR
@conf
def SAMBA_CONFIG_H(conf, path=None):
'''write out config.h in the right directory'''
# we don't want to produce a config.h in places like lib/replace
# when we are building projects that depend on lib/replace
if not IN_LAUNCH_DIR(conf):
return
# we need to build real code that can't be optimized away to test
if conf.check(fragment='''
#include <stdio.h>
int main(void)
{
char t[100000];
while (fgets(t, sizeof(t), stdin));
return 0;
}
''',
execute=0,
ccflags='-fstack-protector',
ldflags='-fstack-protector',
mandatory=False,
msg='Checking if toolchain accepts -fstack-protector'):
conf.ADD_CFLAGS('-fstack-protector')
conf.ADD_LDFLAGS('-fstack-protector')
if Options.options.debug:
conf.ADD_CFLAGS('-g', testflags=True)
if Options.options.developer:
conf.env.DEVELOPER_MODE = True
conf.ADD_CFLAGS('-g', testflags=True)
conf.ADD_CFLAGS('-Wall', testflags=True)
conf.ADD_CFLAGS('-Wshadow', testflags=True)
conf.ADD_CFLAGS('-Wmissing-prototypes', testflags=True)
conf.ADD_CFLAGS('-Wcast-align -Wcast-qual', testflags=True)
conf.ADD_CFLAGS('-fno-common', testflags=True)
conf.ADD_CFLAGS('-Werror=address', testflags=True)
# we add these here to ensure that -Wstrict-prototypes is not set during configure
conf.ADD_CFLAGS('-Werror=strict-prototypes -Wstrict-prototypes',
testflags=True)
conf.ADD_CFLAGS('-Werror=write-strings -Wwrite-strings',
testflags=True)
conf.ADD_CFLAGS('-Werror-implicit-function-declaration',
testflags=True)
conf.ADD_CFLAGS('-Werror=pointer-arith -Wpointer-arith',
testflags=True)
conf.ADD_CFLAGS('-Werror=declaration-after-statement -Wdeclaration-after-statement',
testflags=True)
conf.ADD_CFLAGS('-Werror=return-type -Wreturn-type',
testflags=True)
conf.ADD_CFLAGS('-Werror=uninitialized -Wuninitialized',
testflags=True)
conf.ADD_CFLAGS('-Wformat=2 -Wno-format-y2k', testflags=True)
# This check is because for ldb_search(), a NULL format string
# is not an error, but some compilers complain about that.
if CHECK_CFLAGS(conf, ["-Werror=format", "-Wformat=2"], '''
int testformat(char *format, ...) __attribute__ ((format (__printf__, 1, 2)));
int main(void) {
testformat(0);
return 0;
}
'''):
if not 'EXTRA_CFLAGS' in conf.env:
conf.env['EXTRA_CFLAGS'] = []
conf.env['EXTRA_CFLAGS'].extend(TO_LIST("-Werror=format"))
if Options.options.picky_developer:
conf.ADD_NAMED_CFLAGS('PICKY_CFLAGS', '-Werror -Wno-error=deprecated-declarations', testflags=True)
if Options.options.fatal_errors:
conf.ADD_CFLAGS('-Wfatal-errors', testflags=True)
if Options.options.pedantic:
conf.ADD_CFLAGS('-W', testflags=True)
if Options.options.address_sanitizer:
conf.ADD_CFLAGS('-fno-omit-frame-pointer -O1 -fsanitize=address', testflags=True)
conf.ADD_LDFLAGS('-fsanitize=address', testflags=True)
conf.env['ADDRESS_SANITIZER'] = True
# Let people pass an additional ADDITIONAL_{CFLAGS,LDFLAGS}
# environment variables which are only used the for final build.
#
# The CFLAGS and LDFLAGS environment variables are also
# used for the configure checks which might impact their results.
conf.add_os_flags('ADDITIONAL_CFLAGS')
if conf.env.ADDITIONAL_CFLAGS and conf.CHECK_CFLAGS(conf.env['ADDITIONAL_CFLAGS']):
conf.env['EXTRA_CFLAGS'].extend(conf.env['ADDITIONAL_CFLAGS'])
conf.add_os_flags('ADDITIONAL_LDFLAGS')
if conf.env.ADDITIONAL_LDFLAGS and conf.CHECK_LDFLAGS(conf.env['ADDITIONAL_LDFLAGS']):
conf.env['EXTRA_LDFLAGS'].extend(conf.env['ADDITIONAL_LDFLAGS'])
if path is None:
conf.write_config_header('config.h', top=True)
else:
conf.write_config_header(path)
conf.SAMBA_CROSS_CHECK_COMPLETE()
@conf
def CONFIG_PATH(conf, name, default):
'''setup a configurable path'''
if not name in conf.env:
if default[0] == '/':
conf.env[name] = default
else:
conf.env[name] = conf.env['PREFIX'] + default
@conf
def ADD_NAMED_CFLAGS(conf, name, flags, testflags=False):
'''add some CFLAGS to the command line
optionally set testflags to ensure all the flags work
'''
if testflags:
ok_flags=[]
for f in flags.split():
if CHECK_CFLAGS(conf, f):
ok_flags.append(f)
flags = ok_flags
if not name in conf.env:
conf.env[name] = []
conf.env[name].extend(TO_LIST(flags))
@conf
def ADD_CFLAGS(conf, flags, testflags=False):
'''add some CFLAGS to the command line
optionally set testflags to ensure all the flags work
'''
ADD_NAMED_CFLAGS(conf, 'EXTRA_CFLAGS', flags, testflags=testflags)
@conf
def ADD_LDFLAGS(conf, flags, testflags=False):
'''add some LDFLAGS to the command line
optionally set testflags to ensure all the flags work
this will return the flags that are added, if any
'''
if testflags:
ok_flags=[]
for f in flags.split():
if CHECK_LDFLAGS(conf, f):
ok_flags.append(f)
flags = ok_flags
if not 'EXTRA_LDFLAGS' in conf.env:
conf.env['EXTRA_LDFLAGS'] = []
conf.env['EXTRA_LDFLAGS'].extend(TO_LIST(flags))
return flags
@conf
def ADD_EXTRA_INCLUDES(conf, includes):
'''add some extra include directories to all builds'''
if not 'EXTRA_INCLUDES' in conf.env:
conf.env['EXTRA_INCLUDES'] = []
conf.env['EXTRA_INCLUDES'].extend(TO_LIST(includes))
def CURRENT_CFLAGS(bld, target, cflags, allow_warnings=False, hide_symbols=False):
'''work out the current flags. local flags are added first'''
ret = TO_LIST(cflags)
if not 'EXTRA_CFLAGS' in bld.env:
list = []
else:
list = bld.env['EXTRA_CFLAGS'];
ret.extend(list)
if not allow_warnings and 'PICKY_CFLAGS' in bld.env:
list = bld.env['PICKY_CFLAGS'];
ret.extend(list)
if hide_symbols and bld.env.HAVE_VISIBILITY_ATTR:
ret.append(bld.env.VISIBILITY_CFLAGS)
return ret
@conf
def CHECK_CC_ENV(conf):
"""trim whitespaces from 'CC'.
The build farm sometimes puts a space at the start"""
if os.environ.get('CC'):
conf.env.CC = TO_LIST(os.environ.get('CC'))
if len(conf.env.CC) == 1:
# make for nicer logs if just a single command
conf.env.CC = conf.env.CC[0]
@conf
def SETUP_CONFIGURE_CACHE(conf, enable):
'''enable/disable cache of configure results'''
if enable:
# when -C is chosen, we will use a private cache and will
# not look into system includes. This roughtly matches what
# autoconf does with -C
cache_path = os.path.join(conf.blddir, '.confcache')
mkdir_p(cache_path)
Options.cache_global = os.environ['WAFCACHE'] = cache_path
else:
# when -C is not chosen we will not cache configure checks
# We set the recursion limit low to prevent waf from spending
# a lot of time on the signatures of the files.
Options.cache_global = os.environ['WAFCACHE'] = ''
preproc.recursion_limit = 1
# in either case we don't need to scan system includes
preproc.go_absolute = False
@conf
def SAMBA_CHECK_UNDEFINED_SYMBOL_FLAGS(conf):
# we don't want any libraries or modules to rely on runtime
# resolution of symbols
if not sys.platform.startswith("openbsd"):
conf.env.undefined_ldflags = conf.ADD_LDFLAGS('-Wl,-no-undefined', testflags=True)
if not sys.platform.startswith("openbsd") and conf.env.undefined_ignore_ldflags == []:
if conf.CHECK_LDFLAGS(['-undefined', 'dynamic_lookup']):
conf.env.undefined_ignore_ldflags = ['-undefined', 'dynamic_lookup']
@conf
def CHECK_CFG(self, *k, **kw):
return self.check_cfg(*k, **kw)
|
jelmer/samba
|
buildtools/wafsamba/samba_autoconf.py
|
Python
|
gpl-3.0
| 28,806
|
from __future__ import absolute_import
from sentry.lang.native.utils import get_sdk_from_event, cpu_name_from_data, \
version_build_from_data
def test_get_sdk_from_event():
sdk_info = get_sdk_from_event({
'debug_meta': {
'sdk_info': {
'dsym_type': 'macho',
'sdk_name': 'iOS',
'version_major': 9,
'version_minor': 3,
'version_patchlevel': 0,
}
}
})
assert sdk_info['dsym_type'] == 'macho'
assert sdk_info['sdk_name'] == 'iOS'
assert sdk_info['version_major'] == 9
assert sdk_info['version_minor'] == 3
assert sdk_info['version_patchlevel'] == 0
sdk_info = get_sdk_from_event({
'contexts': {
'os': {
'type': 'os',
'name': 'iOS',
'version': '9.3.1.1234',
}
}
})
assert sdk_info['dsym_type'] == 'macho'
assert sdk_info['sdk_name'] == 'iOS'
assert sdk_info['version_major'] == 9
assert sdk_info['version_minor'] == 3
assert sdk_info['version_patchlevel'] == 1
def test_cpu_name_from_data():
cpu_name = cpu_name_from_data({
'contexts': {
'device': {
'type': 'device',
'arch': 'arm64'
},
'device2': {
'type': 'device',
'arch': 'arm7'
},
}
})
assert cpu_name == 'arm64'
def test_version_build_from_data():
app_info = version_build_from_data({
'contexts': {
'app': {
'app_build': "2",
'device_app_hash': "18482a73f96d2ed3f4ce8d73fa9942744bff3598",
'app_id': "45BA82DF-F3E3-37F7-9D88-12A1AAB719E7",
'app_version': "1.0",
'app_identifier': "com.rokkincat.SentryExample",
'app_name': "SwiftExample",
'app_start_time': "2017-03-28T15:14:01Z",
'type': "app",
'build_type': "simulator"
}
}
})
assert app_info.version == '1.0'
assert app_info.build == '2'
assert app_info.name == 'SwiftExample'
assert app_info.id == 'com.rokkincat.SentryExample'
app_info = version_build_from_data({
'contexts': {
'app': {
'device_app_hash': "18482a73f96d2ed3f4ce8d73fa9942744bff3598",
'app_id': "45BA82DF-F3E3-37F7-9D88-12A1AAB719E7",
'app_version': "1.0",
'app_identifier': "com.rokkincat.SentryExample",
'app_name': "SwiftExample",
'app_start_time': "2017-03-28T15:14:01Z",
'type': "app",
'build_type': "simulator"
}
}
})
assert app_info is None
app_info = version_build_from_data({
'contexts': {
'app': {
'device_app_hash': "18482a73f96d2ed3f4ce8d73fa9942744bff3598",
'app_id': "45BA82DF-F3E3-37F7-9D88-12A1AAB719E7",
'app_identifier': "com.rokkincat.SentryExample",
'app_name': "SwiftExample",
'app_start_time': "2017-03-28T15:14:01Z",
'type': "app",
'build_type': "simulator"
}
}
})
assert app_info is None
app_info = version_build_from_data({
'contexts': {
'bal': {
'device_app_hash': "18482a73f96d2ed3f4ce8d73fa9942744bff3598",
}
}
})
assert app_info is None
def test_cpu_name_from_data_inferred_type():
cpu_name = cpu_name_from_data({
'contexts': {
'some_device': {
'type': 'device',
'arch': 'arm64'
}
}
})
assert cpu_name == 'arm64'
|
JackDanger/sentry
|
tests/sentry/lang/native/test_utils.py
|
Python
|
bsd-3-clause
| 3,840
|
# coding=utf-8
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
from flask.ext.login import UserMixin
from flask.ext.principal import Identity
import hashlib
import os
import yaml
from octoprint.settings import settings
class UserManager(object):
valid_roles = ["user", "admin"]
@staticmethod
def createPasswordHash(password):
return hashlib.sha512(password + "mvBUTvwzBzD3yPwvnJ4E4tXNf3CGJvvW").hexdigest()
def addUser(self, username, password, active, roles):
pass
def changeUserActivation(self, username, active):
pass
def changeUserRoles(self, username, roles):
pass
def addRolesToUser(self, username, roles):
pass
def removeRolesFromUser(self, username, roles):
pass
def changeUserPassword(self, username, password):
pass
def removeUser(self, username):
pass
def findUser(self, username=None):
return None
def getAllUsers(self):
return []
##~~ FilebasedUserManager, takes available users from users.yaml file
class FilebasedUserManager(UserManager):
def __init__(self):
UserManager.__init__(self)
userfile = settings().get(["accessControl", "userfile"])
if userfile is None:
userfile = os.path.join(settings().settings_dir, "users.yaml")
self._userfile = userfile
self._users = {}
self._dirty = False
self._load()
def _load(self):
if os.path.exists(self._userfile) and os.path.isfile(self._userfile):
with open(self._userfile, "r") as f:
data = yaml.safe_load(f)
for name in data.keys():
attributes = data[name]
self._users[name] = User(name, attributes["password"], attributes["active"], attributes["roles"])
else:
self._users["admin"] = User("admin", "7557160613d5258f883014a7c3c0428de53040fc152b1791f1cc04a62b428c0c2a9c46ed330cdce9689353ab7a5352ba2b2ceb459b96e9c8ed7d0cb0b2c0c076", True, ["user", "admin"])
def _save(self, force=False):
if not self._dirty and not force:
return
data = {}
for name in self._users.keys():
user = self._users[name]
data[name] = {
"password": user._passwordHash,
"active": user._active,
"roles": user._roles
}
with open(self._userfile, "wb") as f:
yaml.safe_dump(data, f, default_flow_style=False, indent=" ", allow_unicode=True)
self._dirty = False
self._load()
def addUser(self, username, password, active=False, roles=["user"]):
if username in self._users.keys():
raise UserAlreadyExists(username)
self._users[username] = User(username, UserManager.createPasswordHash(password), active, roles)
self._dirty = True
self._save()
def changeUserActivation(self, username, active):
if not username in self._users.keys():
raise UnknownUser(username)
if self._users[username]._active != active:
self._users[username]._active = active
self._dirty = True
self._save()
def changeUserRoles(self, username, roles):
if not username in self._users.keys():
raise UnknownUser(username)
user = self._users[username]
removedRoles = set(user._roles) - set(roles)
self.removeRolesFromUser(username, removedRoles)
addedRoles = set(roles) - set(user._roles)
self.addRolesToUser(username, addedRoles)
def addRolesToUser(self, username, roles):
if not username in self._users.keys():
raise UnknownUser(username)
user = self._users[username]
for role in roles:
if not role in user._roles:
user._roles.append(role)
self._dirty = True
self._save()
def removeRolesFromUser(self, username, roles):
if not username in self._users.keys():
raise UnknownUser(username)
user = self._users[username]
for role in roles:
if role in user._roles:
user._roles.remove(role)
self._dirty = True
self._save()
def changeUserPassword(self, username, password):
if not username in self._users.keys():
raise UnknownUser(username)
passwordHash = UserManager.createPasswordHash(password)
user = self._users[username]
if user._passwordHash != passwordHash:
user._passwordHash = passwordHash
self._dirty = True
self._save()
def removeUser(self, username):
if not username in self._users.keys():
raise UnknownUser(username)
del self._users[username]
self._dirty = True
self._save()
def findUser(self, username=None):
if username is None:
return None
if username not in self._users.keys():
return None
return self._users[username]
def getAllUsers(self):
return map(lambda x: x.asDict(), self._users.values())
##~~ Exceptions
class UserAlreadyExists(Exception):
def __init__(self, username):
Exception.__init__(self, "User %s already exists" % username)
class UnknownUser(Exception):
def __init__(self, username):
Exception.__init__(self, "Unknown user: %s" % username)
class UnknownRole(Exception):
def _init_(self, role):
Exception.__init__(self, "Unknown role: %s" % role)
##~~ User object
class User(UserMixin):
def __init__(self, username, passwordHash, active, roles):
self._username = username
self._passwordHash = passwordHash
self._active = active
self._roles = roles
def asDict(self):
return {
"name": self._username,
"active": self.is_active(),
"admin": self.is_admin(),
"user": self.is_user()
}
def check_password(self, passwordHash):
return self._passwordHash == passwordHash
def get_id(self):
return self._username
def get_name(self):
return self._username
def is_active(self):
return self._active
def is_user(self):
return "user" in self._roles
def is_admin(self):
return "admin" in self._roles
##~~ DummyUser object to use when accessControl is disabled
class DummyUser(User):
def __init__(self):
User.__init__(self, "dummy", "", True, UserManager.valid_roles)
def check_password(self, passwordHash):
return True
class DummyIdentity(Identity):
def __init__(self):
Identity.__init__(self, "dummy")
def dummy_identity_loader():
return DummyIdentity()
|
3dprintcanalhouse/octoprint2
|
octoprint/users.py
|
Python
|
agpl-3.0
| 5,919
|
def test_pass():
assert True
|
bbiskup/pytest-purkinje
|
testdata/testproj/singlepass/simple_test.py
|
Python
|
mit
| 33
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Article.published'
db.delete_column(u'articles_article', 'published')
# Adding field 'Article.is_published'
db.add_column(u'articles_article', 'is_published',
self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'Article.published'
db.add_column(u'articles_article', 'published',
self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True),
keep_default=False)
# Deleting field 'Article.is_published'
db.delete_column(u'articles_article', 'is_published')
models = {
u'articles.article': {
'Meta': {'object_name': 'Article'},
'content_detail': ('ckeditor.fields.RichTextField', [], {}),
'content_list': ('ckeditor.fields.RichTextField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'meta_description': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'blank': 'True', 'populate_from': "'title'", 'unique': 'True', 'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256'})
}
}
complete_apps = ['articles']
|
AmandaCMS/amanda-cms
|
amanda/articles/migrations/0002_auto__del_field_article_published__add_field_article_is_published.py
|
Python
|
mit
| 2,096
|
"""
"""
from ._utfiles import *
from ._utconfig import *
from ._uttime import *
|
abantos/bolt
|
bolt/utils/__init__.py
|
Python
|
mit
| 79
|
import argparse
import time
import logging
import json
import typing
from datetime import datetime
import apache_beam as beam
from apache_beam.options.pipeline_options import GoogleCloudOptions
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.transforms.combiners import CountCombineFn
from apache_beam.runners import DataflowRunner, DirectRunner
# ### functions and classes
class CommonLog(typing.NamedTuple):
ip: str
user_id: str
lat: float
lng: float
timestamp: str
http_request: str
http_response: int
num_bytes: int
user_agent: str
beam.coders.registry.register_coder(CommonLog, beam.coders.RowCoder)
def parse_json(element):
row = json.loads(element.decode('utf-8'))
return CommonLog(**row)
def add_processing_timestamp(element):
row = element._asdict()
row['event_timestamp'] = row.pop('timestamp')
row['processing_timestamp'] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
return row
class GetTimestampFn(beam.DoFn):
def process(self, element, window=beam.DoFn.WindowParam):
window_start = window.start.to_utc_datetime().strftime("%Y-%m-%dT%H:%M:%S")
output = {'page_views': element, 'timestamp': window_start}
yield output
# ### main
def run():
# Command line arguments
parser = argparse.ArgumentParser(description='Load from Json from Pub/Sub into BigQuery')
parser.add_argument('--project',required=True, help='Specify Google Cloud project')
parser.add_argument('--region', required=True, help='Specify Google Cloud region')
parser.add_argument('--staging_location', required=True, help='Specify Cloud Storage bucket for staging')
parser.add_argument('--temp_location', required=True, help='Specify Cloud Storage bucket for temp')
parser.add_argument('--runner', required=True, help='Specify Apache Beam Runner')
parser.add_argument('--input_topic', required=True, help='Input Pub/Sub Topic')
parser.add_argument('--agg_table_name', required=True, help='BigQuery table name for aggregate results')
parser.add_argument('--raw_table_name', required=True, help='BigQuery table name for raw inputs')
parser.add_argument('--window_duration', required=True, help='Window duration')
opts = parser.parse_args()
# Setting up the Beam pipeline options
options = PipelineOptions(save_main_session=True, streaming=True)
options.view_as(GoogleCloudOptions).project = opts.project
options.view_as(GoogleCloudOptions).region = opts.region
options.view_as(GoogleCloudOptions).staging_location = opts.staging_location
options.view_as(GoogleCloudOptions).temp_location = opts.temp_location
options.view_as(GoogleCloudOptions).job_name = '{0}{1}'.format('streaming-minute-traffic-pipeline-',time.time_ns())
options.view_as(StandardOptions).runner = opts.runner
input_topic = opts.input_topic
raw_table_name = opts.raw_table_name
agg_table_name = opts.agg_table_name
window_duration = opts.window_duration
# Table schema for BigQuery
agg_table_schema = {
"fields": [
{
"name": "page_views",
"type": "INTEGER"
},
{
"name": "timestamp",
"type": "STRING"
},
]
}
raw_table_schema = {
"fields": [
{
"name": "ip",
"type": "STRING"
},
{
"name": "user_id",
"type": "STRING"
},
{
"name": "user_agent",
"type": "STRING"
},
{
"name": "lat",
"type": "FLOAT",
"mode": "NULLABLE"
},
{
"name": "lng",
"type": "FLOAT",
"mode": "NULLABLE"
},
{
"name": "event_timestamp",
"type": "STRING"
},
{
"name": "processing_timestamp",
"type": "STRING"
},
{
"name": "http_request",
"type": "STRING"
},
{
"name": "http_response",
"type": "INTEGER"
},
{
"name": "num_bytes",
"type": "INTEGER"
}
]
}
# Create the pipeline
p = beam.Pipeline(options=options)
parsed_msgs = (p | 'ReadFromPubSub' >> #TODO: Use ReadFromPubSub to read in messages
| 'ParseJson' >> beam.Map(parse_json).with_output_types(CommonLog))
(parsed_msgs
| "AddProcessingTimestamp" >> beam.Map(add_processing_timestamp)
| 'WriteRawToBQ' >> beam.io.WriteToBigQuery(
raw_table_name,
schema=raw_table_schema,
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND
)
)
(parsed_msgs
| "WindowByMinute" >> # TODO: Window into 1 minute long windows
| "CountPerMinute" >> # TODO: Count number of messages per window.
| "AddWindowTimestamp" >> beam.ParDo(GetTimestampFn())
| 'WriteAggToBQ' >> # TODO: Write aggregated data to BigQuery table
)
logging.getLogger().setLevel(logging.INFO)
logging.info("Building pipeline ...")
p.run().wait_until_finish()
if __name__ == '__main__':
run()
|
GoogleCloudPlatform/training-data-analyst
|
quests/dataflow_python/5_Streaming_Analytics/lab/streaming_minute_traffic_pipeline.py
|
Python
|
apache-2.0
| 5,609
|
#
# Copyright (c) 2015-2018 Nest Labs, Inc.
# Copyright (c) 2019-2020 Google LLC.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# @file
# BLE Central support for Weave Device Manager via OSX CoreBluetooth APIs.
#
from __future__ import absolute_import
from __future__ import print_function
import abc
import logging
import select
import socket
import sys
import six.moves.queue
import subprocess
import threading
import time
import binascii
from ctypes import *
import readline
from Foundation import *
import objc
from PyObjCTools import AppHelper
from .WeaveBleUtility import *
from .WeaveUtility import WeaveUtility
from .WeaveBleBase import WeaveBleBase
try:
objc.loadBundle("CoreBluetooth", globals(),
bundle_path=objc.pathForFramework(u'/System/Library/Frameworks/IOBluetooth.framework/Versions/A/Frameworks/CoreBluetooth.framework'))
except:
objc.loadBundle("CoreBluetooth", globals(),
bundle_path=objc.pathForFramework(u'/System/Library/Frameworks/CoreBluetooth.framework'))
weave_service = CBUUID.UUIDWithString_(u'0000FEAF-0000-1000-8000-00805F9B34FB')
weave_service_short = CBUUID.UUIDWithString_(u'FEAF')
weave_tx = CBUUID.UUIDWithString_(u'18EE2EF5-263D-4559-959F-4F9C429F9D11')
weave_rx = CBUUID.UUIDWithString_(u'18EE2EF5-263D-4559-959F-4F9C429F9D12')
chromecast_setup_service = CBUUID.UUIDWithString_(u'0000FEA0-0000-1000-8000-00805F9B34FB')
chromecast_setup_service_short = CBUUID.UUIDWithString_(u'FEA0')
def _VoidPtrToCBUUID(ptr, len):
try:
ptr = WeaveUtility.VoidPtrToByteArray(ptr, len)
ptr = WeaveUtility.Hexlify(ptr)
ptr = ptr[:8] + '-' + ptr[8:12] + '-' + ptr[12:16] + '-' + ptr[16:20] + '-' + ptr[20:]
ptr = CBUUID.UUIDWithString_(ptr)
except:
print("ERROR: failed to convert void * to CBUUID")
ptr = None
return ptr
class CoreBluetoothManager(WeaveBleBase):
def __init__(self, devMgr, logger=None):
if logger:
self.logger = logger
else:
self.logger = logging.getLogger('WeaveBLEMgr')
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
self.manager = None
self.peripheral = None
self.service = None
self.scan_quiet = False
self.characteristics = {}
self.peripheral_list = []
self.bg_peripheral_name = None
self.weave_queue = six.moves.queue.Queue()
self.manager = CBCentralManager.alloc()
self.manager.initWithDelegate_queue_options_(self, None, None)
self.ready_condition = False
self.loop_condition = False # indicates whether the cmd requirement has been met in the runloop.
self.connect_state = False # reflects whether or not there is a connection.
self.send_condition = False
self.subscribe_condition = False
self.runLoopUntil(("ready", time.time(), 10.0))
self.orig_input_hook = None
self.hookFuncPtr = None
self.setInputHook(self.readlineCB)
self.devMgr = devMgr
self.devMgr.SetBlockingCB(self.devMgrCB)
def HandleBleEventCB():
return self.GetBleEvent()
def HandleBleWriteCharCB(connObj, svcId, charId, buffer, length):
return self.WriteBleCharacteristic(connObj, svcId, charId, buffer, length)
def HandleBleSubscribeCB(connObj, svcId, charId, subscribe):
return self.SubscribeBleCharacteristic(connObj, svcId, charId, subscribe)
def HandleBleCloseCB(connObj):
return self.CloseBle(connObj)
self.devMgr.SetBleEventCB(HandleBleEventCB)
self.devMgr.SetBleWriteCharCB(HandleBleWriteCharCB)
self.devMgr.SetBleSubscribeCharCB(HandleBleSubscribeCB)
self.devMgr.SetBleCloseCB(HandleBleCloseCB)
# test if any connections currently exist (left around from a previous run) and disconnect if need be.
peripherals = self.manager.retrieveConnectedPeripheralsWithServices_([weave_service_short, weave_service])
if peripherals and len(peripherals):
for periph in peripherals:
self.logger.info("disconnecting old connection.")
self.loop_condition = False
self.manager.cancelPeripheralConnection_(periph)
self.runLoopUntil(("disconnect", time.time(), 5.0))
self.connect_state = False
self.loop_condition = False
def __del__(self):
self.disconnect()
self.setInputHook(self.orig_input_hook)
self.devMgr.SetBlockingCB(None)
self.devMgr.SetBleEventCB(None)
def devMgrCB(self):
""" A callback used by WeaveDeviceMgr.py to drive the OSX runloop while the
main thread waits for the Weave thread to complete its operation."""
runLoop = NSRunLoop.currentRunLoop()
nextfire = runLoop.limitDateForMode_(NSDefaultRunLoopMode)
def readlineCB(self):
""" A callback used by readline to drive the OSX runloop while the main thread
waits for commandline input from the user."""
runLoop = NSRunLoop.currentRunLoop()
nextfire = runLoop.limitDateForMode_(NSDefaultRunLoopMode)
if self.orig_input_hook:
self.orig_input_hook()
def setInputHook(self, hookFunc):
"""Set the PyOS_InputHook to call the specific function."""
hookFunctionType = CFUNCTYPE(None)
self.hookFuncPtr = hookFunctionType(hookFunc)
pyos_inputhook_ptr = c_void_p.in_dll(pythonapi, "PyOS_InputHook")
# save the original so that on del we can revert it back to the way it was.
self.orig_input_hook = cast(pyos_inputhook_ptr.value, PYFUNCTYPE(c_int))
# set the new hook. readLine will call this periodically as it polls for input.
pyos_inputhook_ptr.value = cast(self.hookFuncPtr, c_void_p).value
def shouldLoop(self, should_tuple):
""" Used by runLoopUntil to determine whether it should exit the runloop."""
result = False
time_expired = time.time() >= should_tuple[1] + should_tuple[2]
if should_tuple[0] == "ready":
if not self.ready_condition and not time_expired:
result = True
elif should_tuple[0] == "scan":
if not time_expired:
result = True
for peripheral in self.peripheral_list:
if should_tuple[3] and str(peripheral._.name) == should_tuple[3]:
result = False
break
elif should_tuple[0] == "connect":
if not self.loop_condition and not time_expired:
result = True
elif should_tuple[0] == "disconnect":
if not self.loop_condition and not time_expired:
result = True
elif should_tuple[0] == "send":
if not self.send_condition and not time_expired:
result = True
elif should_tuple[0] == "subscribe":
if not self.subscribe_condition and not time_expired:
result = True
elif should_tuple[0] == "unsubscribe":
if self.subscribe_condition and not time_expired:
result = True
return result
def runLoopUntil(self, should_tuple):
""" Helper function to drive OSX runloop until an expected event is received or
the timeout expires."""
runLoop = NSRunLoop.currentRunLoop()
nextfire = 1
while nextfire and self.shouldLoop(should_tuple):
nextfire = runLoop.limitDateForMode_(NSDefaultRunLoopMode)
def centralManagerDidUpdateState_(self, manager):
""" IO Bluetooth initialization is successful."""
state = manager.state()
string = "BLE is ready!" if state > 4 else "BLE is not ready!"
self.logger.info(string)
self.manager = manager
self.ready_condition = True if state > 4 else False
def centralManager_didDiscoverPeripheral_advertisementData_RSSI_(self, manager, peripheral, data, rssi):
""" Called for each peripheral discovered during scan."""
if self.bg_peripheral_name is None:
if peripheral not in self.peripheral_list:
if not self.scan_quiet:
self.logger.info("adding to scan list:")
self.logger.info("")
self.logger.info("{0:<10}{1:<80}".format("Name =", str(peripheral._.name)))
self.logger.info("{0:<10}{1:<80}".format("ID =", str(peripheral._.identifier.UUIDString())))
self.logger.info("{0:<10}{1:<80}".format("RSSI =", rssi))
self.logger.info("ADV data: " + repr(data))
self.logger.info("")
self.peripheral_list.append(peripheral)
else:
if peripheral._.name == self.bg_peripheral_name:
if len(self.peripheral_list) == 0:
self.logger.info("found background peripheral")
self.peripheral_list = [peripheral]
def centralManager_didConnectPeripheral_(self, manager, peripheral):
"""Called by CoreBluetooth via runloop when a connection succeeds."""
self.logger.debug(repr(peripheral))
# make this class the delegate for peripheral events.
self.peripheral.setDelegate_(self)
# invoke service discovery on the periph.
self.logger.info("Discovering services")
self.peripheral.discoverServices_([weave_service_short, weave_service])
def centralManager_didFailToConnectPeripheral_error_(self, manager, peripheral, error):
"""Called by CoreBluetooth via runloop when a connection fails."""
self.logger.info("Failed to connect error = " + repr(error))
self.loop_condition = True
self.connect_state = False
def centralManager_didDisconnectPeripheral_error_(self, manager, peripheral, error):
"""Called by CoreBluetooth via runloop when a disconnect completes. error = None on success."""
self.loop_condition = True
self.connect_state = False
if self.devMgr:
self.logger.info("BLE disconnected, error = " + repr(error))
dcEvent = BleDisconnectEvent(BLE_ERROR_REMOTE_DEVICE_DISCONNECTED)
self.weave_queue.put(dcEvent)
self.devMgr.DriveBleIO()
def peripheral_didDiscoverServices_(self, peripheral, services):
"""Called by CoreBluetooth via runloop when peripheral services are discovered."""
if len(self.peripheral.services()) == 0:
self.logger.error("Weave service not found")
self.connect_state = False
else:
# in debugging, we found connect being called twice. This
# would trigger discovering the services twice, and
# consequently, discovering characteristics twice. We use the
# self.service as a flag to indicate whether the
# characteristics need to be invalidated immediately.
if (self.service == self.peripheral.services()[0]):
self.logger.debug("didDiscoverServices already happened")
else:
self.service = self.peripheral.services()[0]
self.characteristics[self.service.UUID()] = []
# NOTE: currently limiting discovery to only the pair of Weave characteristics.
self.peripheral.discoverCharacteristics_forService_([weave_rx, weave_tx], self.service)
def peripheral_didDiscoverCharacteristicsForService_error_(self, peripheral, service, error):
"""Called by CoreBluetooth via runloop when a characteristic for a service is discovered."""
self.logger.debug("didDiscoverCharacteristicsForService:error "+str(repr(peripheral)) + " "+ str(repr(service)))
self.logger.debug(repr(service))
self.logger.debug(repr(error))
if not error:
self.characteristics[service.UUID()] = [char for char in self.service.characteristics()]
self.connect_state = True
else:
self.logger.error("ERROR: failed to discover characteristics for service.")
self.connect_state = False
self.loop_condition = True
def peripheral_didWriteValueForCharacteristic_error_(self, peripheral, characteristic, error):
""" Called by CoreBluetooth via runloop when a write to characteristic
operation completes. error = None on success."""
self.logger.debug("didWriteValue error = " + repr(error))
self.send_condition = True
charId = bytearray(characteristic.UUID().data().bytes().tobytes())
svcId = bytearray(weave_service.data().bytes().tobytes())
if self.devMgr:
txEvent = BleTxEvent(charId=charId, svcId=svcId, status=True if not error else False)
self.weave_queue.put(txEvent)
self.devMgr.DriveBleIO()
def peripheral_didUpdateNotificationStateForCharacteristic_error_(self, peripheral, characteristic, error):
""" Called by CoreBluetooth via runloop when a subscribe for notification operation completes.
Error = None on success."""
self.logger.debug("Receiving notifications")
charId = bytearray(characteristic.UUID().data().bytes().tobytes())
svcId = bytearray(weave_service.data().bytes().tobytes())
# look at error and send True/False on Success/Failure
success = True if not error else False
if characteristic.isNotifying():
operation = BleSubscribeOperation_Subscribe
self.subscribe_condition = True
else:
operation = BleSubscribeOperation_Unsubscribe
self.subscribe_condition = False
self.logger.debug("Operation = " + repr(operation))
self.logger.debug("success = " + repr(success))
if self.devMgr:
subscribeEvent = BleSubscribeEvent(charId=charId, svcId=svcId, status=success, operation=operation)
self.weave_queue.put(subscribeEvent)
self.devMgr.DriveBleIO()
def peripheral_didUpdateValueForCharacteristic_error_(self, peripheral, characteristic, error):
""" Called by CoreBluetooth via runloop when a new characteristic value is received for a
characteristic to which this device has subscribed."""
#len = characteristic.value().length()
bytes = bytearray(characteristic.value().bytes().tobytes())
charId = bytearray(characteristic.UUID().data().bytes().tobytes())
svcId = bytearray(weave_service.data().bytes().tobytes())
# Kick Weave thread to retrieve the saved packet.
if self.devMgr:
# Save buffer, length, service UUID and characteristic UUID
rxEvent = BleRxEvent(charId=charId, svcId=svcId, buffer=bytes)
self.weave_queue.put(rxEvent)
self.devMgr.DriveBleIO()
self.logger.debug("received")
self.logger.debug("received (" + str(len) + ") bytes: " + repr(characteristic.value().bytes().tobytes()))
def GetBleEvent(self):
""" Called by WeaveDeviceMgr.py on behalf of Weave to retrieve a queued message."""
if not self.weave_queue.empty():
ev = self.weave_queue.get()
if isinstance(ev, BleRxEvent):
eventStruct = BleRxEventStruct.fromBleRxEvent(ev)
return cast( pointer(eventStruct), c_void_p).value
elif isinstance(ev, BleTxEvent):
eventStruct = BleTxEventStruct.fromBleTxEvent(ev)
return cast( pointer(eventStruct), c_void_p).value
elif isinstance(ev, BleSubscribeEvent):
eventStruct = BleSubscribeEventStruct.fromBleSubscribeEvent(ev)
return cast( pointer(eventStruct), c_void_p).value
elif isinstance(ev, BleDisconnectEvent):
eventStruct = BleDisconnectEventStruct.fromBleDisconnectEvent(ev)
return cast( pointer(eventStruct), c_void_p).value
return None
def scan(self, line):
""" API to initiatae BLE scanning for -t user_timeout seconds."""
args = self.ParseInputLine(line, "scan")
if not args:
return
self.scan_quiet = args[1]
self.bg_peripheral_name = None
del self.peripheral_list[:]
self.peripheral_list = []
# Filter on the service UUID Array or None to accept all scan results.
self.manager.scanForPeripheralsWithServices_options_([weave_service_short, weave_service, chromecast_setup_service_short, chromecast_setup_service], None)
#self.manager.scanForPeripheralsWithServices_options_(None, None)
self.runLoopUntil(("scan", time.time(), args[0], args[2]))
self.manager.stopScan()
self.logger.info("scanning stopped")
def bgScanStart(self, name):
""" API to initiate background BLE scanning."""
self.logger.info("scanning started")
self.bg_peripheral_name = name
del self.peripheral_list[:]
self.peripheral_list = []
# Filter on the service UUID Array or None to accept all scan results.
self.manager.scanForPeripheralsWithServices_options_([weave_service_short, weave_service, chromecast_setup_service_short, chromecast_setup_service], None)
def bgScanStop(self):
""" API to stop background BLE scanning."""
self.manager.stopScan()
self.bg_peripheral_name = None
self.logger.info("scanning stopped")
def connect(self, identifier):
""" API to initiate BLE connection to peripheral device whose identifier == identifier."""
self.logger.info("trying to connect to " + identifier)
if self.connect_state:
self.logger.error("ERROR: Connection to a BLE device already exists!")
else:
for p in self.peripheral_list:
p_id = str(p.identifier().UUIDString())
p_name = str(p.name())
self.logger.debug(p_id + " vs " + str(identifier))
self.logger.debug(p_name + " vs " + str(identifier))
if p_id == str(identifier) or p_name == str(identifier):
self.loop_condition = False
self.peripheral = p
self.manager.connectPeripheral_options_(p, None)
self.runLoopUntil(("connect", time.time(), 15.0))
# Cleanup when the connect fails due to timeout,
# otherwise CoreBluetooth will continue to try to connect after this
# API exits.
if not self.connect_state:
self.manager.cancelPeripheralConnection_(p)
self.peripheral = None
break
ret = True if self.loop_condition and self.connect_state else False
resString = "connect " + ("success" if ret else "fail")
self.logger.info(resString)
return ret
def disconnect(self):
""" API to initiate BLE disconnect procedure."""
self.logger.info("disconnecting")
if self.peripheral and self.peripheral.state() != BlePeripheralState_Disconnected:
self.loop_condition = False
self.manager.cancelPeripheralConnection_(self.peripheral)
self.runLoopUntil(("disconnect", time.time(), 10.0))
resString = "disconnect " + ("success" if self.loop_condition and not self.connect_state else "fail")
self.logger.info(resString)
self.characteristics = {}
#del self.peripheral_list[:]
#self.peripheral_list = []
self.peripheral = None
self.service = None
def scan_connect(self, line):
""" API to perform both scan and connect operations in one call."""
args = self.ParseInputLine(line, "scan-connect")
if not args:
return
self.scan_quiet = args[1]
self.scan(line)
if len(self.peripheral_list):
return self.connect(args[2])
else:
self.logger.info("Failed to scan device named: " + args[2] + ". Connection skipped.")
return False
def isConnected(self):
if self.peripheral and self.peripheral.state() != BlePeripheralState_Disconnected:
return True
return False
def WriteBleCharacteristic(self, connObj, svcId, charId, buffer, length):
""" Called by WeaveDeviceMgr.py to satisfy a request by Weave to transmit a packet over BLE."""
result = False
bytes = WeaveUtility.VoidPtrToByteArray(buffer, length)
bytes = NSData.dataWithBytes_length_(bytes, len(bytes)) # convert bytearray to NSData
svcId = _VoidPtrToCBUUID(svcId, 16)
charId = _VoidPtrToCBUUID(charId, 16)
if self.peripheral and self.peripheral.state() != BlePeripheralState_Disconnected:
for char in self.characteristics[svcId]:
if char.UUID() == charId:
self.peripheral.writeValue_forCharacteristic_type_(bytes, char, CBCharacteristicWriteWithResponse)
result = True
break
else:
self.logger.warning("WARNING: peripheral is no longer connected.")
return result
def SubscribeBleCharacteristic(self, connObj, svcId, charId, subscribe):
""" Called by Weave to (un-)subscribe to a characteristic of a service."""
result = False
svcId = _VoidPtrToCBUUID(svcId, 16)
charId = _VoidPtrToCBUUID(charId, 16)
if self.peripheral and self.peripheral.state() != BlePeripheralState_Disconnected:
for char in self.characteristics[svcId]:
if char.UUID() == charId:
self.peripheral.setNotifyValue_forCharacteristic_(True if subscribe else False, char)
result = True
break
else:
self.logger.warning("WARNING: peripheral is no longer connected.")
return result
def ble_debug_log(self, line):
args = self.ParseInputLine(line)
if int(args[0]) == 1:
self.logger.setLevel(logging.DEBUG)
self.logger.debug("current logging level is debug")
else:
self.logger.setLevel(logging.INFO)
self.logger.info("current logging level is info")
return True
def CloseBle(self, connObj):
""" Called by Weave to close the BLE connection."""
if self.peripheral:
self.manager.cancelPeripheralConnection_(self.peripheral)
self.characteristics = {}
#del self.peripheral_list[:]
#self.peripheral_list = []
self.peripheral = None
self.service = None
self.connect_state = False
return True
def updateCharacteristic(self, bytes, svcId, charId):
# TODO: implement this for Peripheral support.
return False
|
openweave/openweave-core
|
src/device-manager/python/openweave/WeaveCoreBluetoothMgr.py
|
Python
|
apache-2.0
| 23,625
|
"""Copyright 2008 Orbitz WorldWide
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License"""
import django
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
try:
from django.template.base import add_to_builtins
except ImportError: # Django < 1.7
from django.template.loader import add_to_builtins
if django.VERSION < (1, 5): # load the "future" {% url %} tag
add_to_builtins('django.templatetags.future')
if django.VERSION < (1, 7):
# Django doing autodiscover automatically:
# https://docs.djangoproject.com/en/dev/releases/1.7/#app-loading-refactor
admin.autodiscover()
graphite_urls = patterns(
'',
('^admin/', include(admin.site.urls)),
('^render/?', include('graphite.render.urls')),
('^composer/?', include('graphite.composer.urls')),
('^metrics/?', include('graphite.metrics.urls')),
('^browser/?', include('graphite.browser.urls')),
('^account/', include('graphite.account.urls')),
('^dashboard/?', include('graphite.dashboard.urls')),
('^whitelist/?', include('graphite.whitelist.urls')),
('^version/', include('graphite.version.urls')),
('^events/', include('graphite.events.urls')),
url('^s/(?P<path>.*)',
'graphite.url_shortener.views.shorten', name='shorten'),
url('^S/(?P<link_id>[a-zA-Z0-9]+)/?$',
'graphite.url_shortener.views.follow', name='follow'),
url('^$', 'graphite.browser.views.browser', name='browser'),
)
url_prefix = ''
if settings.URL_PREFIX.strip('/'):
url_prefix = '{0}/'.format(settings.URL_PREFIX.strip('/'))
urlpatterns = patterns(
'',
(r'^{0}'.format(url_prefix), include(graphite_urls)),
)
handler500 = 'graphite.views.server_error'
|
pu239ppy/graphite-web
|
webapp/graphite/urls.py
|
Python
|
apache-2.0
| 2,222
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras 3D transposed convolution layer (sometimes called deconvolution)."""
# pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import
from keras import activations
from keras import constraints
from keras import initializers
from keras import regularizers
from keras.engine.input_spec import InputSpec
from keras.layers.convolutional.conv3d import Conv3D
from keras.utils import conv_utils
import tensorflow.compat.v2 as tf
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.layers.Conv3DTranspose',
'keras.layers.Convolution3DTranspose')
class Conv3DTranspose(Conv3D):
"""Transposed convolution layer (sometimes called Deconvolution).
The need for transposed convolutions generally arises
from the desire to use a transformation going in the opposite direction
of a normal convolution, i.e., from something that has the shape of the
output of some convolution to something that has the shape of its input
while maintaining a connectivity pattern that is compatible with
said convolution.
When using this layer as the first layer in a model,
provide the keyword argument `input_shape`
(tuple of integers or `None`, does not include the sample axis),
e.g. `input_shape=(128, 128, 128, 3)` for a 128x128x128 volume with 3 channels
if `data_format="channels_last"`.
Args:
filters: Integer, the dimensionality of the output space
(i.e. the number of output filters in the convolution).
kernel_size: An integer or tuple/list of 3 integers, specifying the
depth, height and width of the 3D convolution window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the convolution along the depth, height
and width.
Can be a single integer to specify the same value for
all spatial dimensions.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: one of `"valid"` or `"same"` (case-insensitive).
`"valid"` means no padding. `"same"` results in padding with zeros evenly
to the left/right or up/down of the input such that output has the same
height/width dimension as the input.
output_padding: An integer or tuple/list of 3 integers,
specifying the amount of padding along the depth, height, and
width.
Can be a single integer to specify the same value for all
spatial dimensions.
The amount of output padding along a given dimension must be
lower than the stride along that same dimension.
If set to `None` (default), the output shape is inferred.
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch_size, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch_size, channels, depth, height, width)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
dilation_rate: an integer or tuple/list of 3 integers, specifying
the dilation rate to use for dilated convolution.
Can be a single integer to specify the same value for
all spatial dimensions.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any stride value != 1.
activation: Activation function to use.
If you don't specify anything, no activation is applied
(see `keras.activations`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix
(see `keras.initializers`). Defaults to 'glorot_uniform'.
bias_initializer: Initializer for the bias vector
(see `keras.initializers`). Defaults to 'zeros'.
kernel_regularizer: Regularizer function applied to
the `kernel` weights matrix
(see `keras.regularizers`).
bias_regularizer: Regularizer function applied to the bias vector
(see `keras.regularizers`).
activity_regularizer: Regularizer function applied to
the output of the layer (its "activation")
(see `keras.regularizers`).
kernel_constraint: Constraint function applied to the kernel matrix
(see `keras.constraints`).
bias_constraint: Constraint function applied to the bias vector
(see `keras.constraints`).
Input shape:
5D tensor with shape:
`(batch_size, channels, depth, rows, cols)` if data_format='channels_first'
or 5D tensor with shape:
`(batch_size, depth, rows, cols, channels)` if data_format='channels_last'.
Output shape:
5D tensor with shape:
`(batch_size, filters, new_depth, new_rows, new_cols)` if
data_format='channels_first'
or 5D tensor with shape:
`(batch_size, new_depth, new_rows, new_cols, filters)` if
data_format='channels_last'.
`depth` and `rows` and `cols` values might have changed due to padding.
If `output_padding` is specified::
```
new_depth = ((depth - 1) * strides[0] + kernel_size[0] - 2 * padding[0] +
output_padding[0])
new_rows = ((rows - 1) * strides[1] + kernel_size[1] - 2 * padding[1] +
output_padding[1])
new_cols = ((cols - 1) * strides[2] + kernel_size[2] - 2 * padding[2] +
output_padding[2])
```
Returns:
A tensor of rank 5 representing
`activation(conv3dtranspose(inputs, kernel) + bias)`.
Raises:
ValueError: if `padding` is "causal".
ValueError: when both `strides` > 1 and `dilation_rate` > 1.
References:
- [A guide to convolution arithmetic for deep
learning](https://arxiv.org/abs/1603.07285v1)
- [Deconvolutional
Networks](https://www.matthewzeiler.com/mattzeiler/deconvolutionalnetworks.pdf)
"""
def __init__(self,
filters,
kernel_size,
strides=(1, 1, 1),
padding='valid',
output_padding=None,
data_format=None,
dilation_rate=(1, 1, 1),
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
super(Conv3DTranspose, self).__init__(
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activations.get(activation),
use_bias=use_bias,
kernel_initializer=initializers.get(kernel_initializer),
bias_initializer=initializers.get(bias_initializer),
kernel_regularizer=regularizers.get(kernel_regularizer),
bias_regularizer=regularizers.get(bias_regularizer),
activity_regularizer=regularizers.get(activity_regularizer),
kernel_constraint=constraints.get(kernel_constraint),
bias_constraint=constraints.get(bias_constraint),
**kwargs)
self.output_padding = output_padding
if self.output_padding is not None:
self.output_padding = conv_utils.normalize_tuple(
self.output_padding, 3, 'output_padding', allow_zero=True)
for stride, out_pad in zip(self.strides, self.output_padding):
if out_pad >= stride:
raise ValueError('Strides must be greater than output padding. '
f'Received strides={self.strides}, '
f'output_padding={self.output_padding}.')
def build(self, input_shape):
input_shape = tf.TensorShape(input_shape)
if len(input_shape) != 5:
raise ValueError('Inputs should have rank 5. '
f'Received input_shape={input_shape}.')
channel_axis = self._get_channel_axis()
if input_shape.dims[channel_axis].value is None:
raise ValueError('The channel dimension of the inputs '
'to `Conv3DTranspose` should be defined. '
f'The input_shape received is {input_shape}, '
f'where axis {channel_axis} (0-based) '
'is the channel dimension, which found to be `None`.')
input_dim = int(input_shape[channel_axis])
kernel_shape = self.kernel_size + (self.filters, input_dim)
self.input_spec = InputSpec(ndim=5, axes={channel_axis: input_dim})
self.kernel = self.add_weight(
'kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=True,
dtype=self.dtype)
if self.use_bias:
self.bias = self.add_weight(
'bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=self.dtype)
else:
self.bias = None
self.built = True
def call(self, inputs):
inputs_shape = tf.shape(inputs)
batch_size = inputs_shape[0]
if self.data_format == 'channels_first':
d_axis, h_axis, w_axis = 2, 3, 4
else:
d_axis, h_axis, w_axis = 1, 2, 3
depth = inputs_shape[d_axis]
height = inputs_shape[h_axis]
width = inputs_shape[w_axis]
kernel_d, kernel_h, kernel_w = self.kernel_size
stride_d, stride_h, stride_w = self.strides
if self.output_padding is None:
out_pad_d = out_pad_h = out_pad_w = None
else:
out_pad_d, out_pad_h, out_pad_w = self.output_padding
# Infer the dynamic output shape:
out_depth = conv_utils.deconv_output_length(depth,
kernel_d,
padding=self.padding,
output_padding=out_pad_d,
stride=stride_d)
out_height = conv_utils.deconv_output_length(height,
kernel_h,
padding=self.padding,
output_padding=out_pad_h,
stride=stride_h)
out_width = conv_utils.deconv_output_length(width,
kernel_w,
padding=self.padding,
output_padding=out_pad_w,
stride=stride_w)
if self.data_format == 'channels_first':
output_shape = (batch_size, self.filters, out_depth, out_height,
out_width)
strides = (1, 1, stride_d, stride_h, stride_w)
else:
output_shape = (batch_size, out_depth, out_height, out_width,
self.filters)
strides = (1, stride_d, stride_h, stride_w, 1)
output_shape_tensor = tf.stack(output_shape)
outputs = tf.nn.conv3d_transpose(
inputs,
self.kernel,
output_shape_tensor,
strides,
data_format=conv_utils.convert_data_format(self.data_format, ndim=5),
padding=self.padding.upper())
if not tf.executing_eagerly():
# Infer the static output shape:
out_shape = self.compute_output_shape(inputs.shape)
outputs.set_shape(out_shape)
if self.use_bias:
outputs = tf.nn.bias_add(
outputs,
self.bias,
data_format=conv_utils.convert_data_format(self.data_format, ndim=4))
if self.activation is not None:
return self.activation(outputs)
return outputs
def compute_output_shape(self, input_shape):
input_shape = tf.TensorShape(input_shape).as_list()
output_shape = list(input_shape)
if self.data_format == 'channels_first':
c_axis, d_axis, h_axis, w_axis = 1, 2, 3, 4
else:
c_axis, d_axis, h_axis, w_axis = 4, 1, 2, 3
kernel_d, kernel_h, kernel_w = self.kernel_size
stride_d, stride_h, stride_w = self.strides
if self.output_padding is None:
out_pad_d = out_pad_h = out_pad_w = None
else:
out_pad_d, out_pad_h, out_pad_w = self.output_padding
output_shape[c_axis] = self.filters
output_shape[d_axis] = conv_utils.deconv_output_length(
output_shape[d_axis],
kernel_d,
padding=self.padding,
output_padding=out_pad_d,
stride=stride_d)
output_shape[h_axis] = conv_utils.deconv_output_length(
output_shape[h_axis],
kernel_h,
padding=self.padding,
output_padding=out_pad_h,
stride=stride_h)
output_shape[w_axis] = conv_utils.deconv_output_length(
output_shape[w_axis],
kernel_w,
padding=self.padding,
output_padding=out_pad_w,
stride=stride_w)
return tf.TensorShape(output_shape)
def get_config(self):
config = super(Conv3DTranspose, self).get_config()
config.pop('dilation_rate')
config['output_padding'] = self.output_padding
return config
# Alias
Convolution3DTranspose = Conv3DTranspose
|
keras-team/keras
|
keras/layers/convolutional/conv3d_transpose.py
|
Python
|
apache-2.0
| 14,187
|
from __future__ import annotations
import pathlib
import random
import sys
import typing
from typing import (
AbstractSet,
Container,
FrozenSet,
Iterable,
Tuple,
)
import gidgethub.abc
import gidgethub.httpx
import gidgethub.actions
import httpx
import trio
import yaml
class ConfigData(typing.TypedDict):
"""Dict representation of assign-reviers.yml."""
numberOfReviewers: int
team: list[str]
reviewers: list[str]
async def already_reviewed(gh: gidgethub.abc.GitHubAPI) -> FrozenSet[str]:
"""Get the list of people who have already left a review."""
event = gidgethub.actions.event()
reviews = gh.getiter(
"/repos/{owner}/{repo}/pulls/{pull_number}/reviews",
url_vars={
"owner": event["repository"]["owner"]["login"],
"repo": event["repository"]["name"],
"pull_number": event["pull_request"]["number"],
},
)
reviewers = set()
# GitHub provides the complete history of reviews for a PR in
# chronological order.
async for review in reviews:
reviewer = review["user"]["login"]
# A comment is not a review.
if review["state"] == "COMMENTED":
continue
else:
reviewers.add(reviewer)
return frozenset(reviewers)
def select_reviewers(
*,
author: str,
available_reviewers: AbstractSet[str],
assigned_reviewers: AbstractSet[str],
already_reviewers: AbstractSet[str],
count: int,
) -> Tuple[FrozenSet[str], FrozenSet[str]]:
"""Select people to review the PR.
If the author is a potential reviewer, remove them from contention. Also
deduct the number of reviewers necessary based on any that have already
been asked to review who are also eligible to review or have already
reviewed.
"""
already_reviewing = frozenset(
(available_reviewers & assigned_reviewers)
| (available_reviewers & already_reviewers)
)
potential_reviewers = set(available_reviewers) # Mutable copy.
potential_reviewers -= already_reviewing
potential_reviewers.discard(author)
print("Potential reviewers (left):", potential_reviewers)
print(f"Want {count} reviewers")
count -= len(already_reviewing)
print(f"Need {count} more reviewers")
selected_reviewers = []
while count > 0 and potential_reviewers:
selected = random.choice(list(potential_reviewers))
potential_reviewers.discard(selected)
selected_reviewers.append(selected)
count -= 1
selected_reviewers = frozenset(selected_reviewers)
print("Reviewers to add:", selected_reviewers)
return already_reviewing | selected_reviewers, selected_reviewers
async def add_assignee(
gh: gidgethub.abc.GitHubAPI, team: Container[str], reviewers: Iterable[str]
) -> None:
"""Assign the PR.
For team members, assign to themselves. For external PRs, randomly select
one of the reviewers.
"""
event = gidgethub.actions.event()
if (assignee := event["pull_request"]["user"]["login"]) not in team:
assignee = random.choice(list(reviewers))
await gh.post(
"/repos/{owner}/{repo}/issues/{issue_number}/assignees",
url_vars={
"owner": event["repository"]["owner"]["login"],
"repo": event["repository"]["name"],
"issue_number": event["pull_request"]["number"],
},
data={"assignees": [assignee]},
)
async def add_reviewers(
gh: gidgethub.abc.GitHubAPI, reviewers_to_add: Iterable[str]
) -> None:
"""Add reviewers to a PR."""
event = gidgethub.actions.event()
await gh.post(
"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers",
url_vars={
"owner": event["repository"]["owner"]["login"],
"repo": event["repository"]["name"],
"pull_number": event["pull_request"]["number"],
},
data={"reviewers": list(reviewers_to_add)},
)
async def main(token: str):
config_file = pathlib.Path(__file__).parent.parent / "assign-reviewers.yml"
with config_file.open(encoding="utf-8") as file:
config: ConfigData = yaml.safe_load(file)
event = gidgethub.actions.event()
# author = event["pull_request"]["user"]["login"]
# available_reviewers = frozenset(config["reviewers"])
# print("Available reviewers:", available_reviewers)
# assigned_reviewers = {
# reviewer["login"] for reviewer in event["pull_request"]["requested_reviewers"]
# }
# print("Reviewers already requested:", assigned_reviewers)
async with httpx.AsyncClient(timeout=None) as client:
gh = gidgethub.httpx.GitHubAPI(
client, event["repository"]["full_name"], oauth_token=token
)
# already_reviewers = await already_reviewed(gh)
# print("People who have already reviewed:", already_reviewers)
# team_reviewers, reviewers_to_add = select_reviewers(
# author=author,
# available_reviewers=available_reviewers,
# assigned_reviewers=assigned_reviewers,
# already_reviewers=already_reviewers,
# count=int(config["numberOfReviewers"]),
# )
async with trio.open_nursery() as nursery:
if not event["pull_request"]["assignee"]:
nursery.start_soon(
add_assignee, gh, frozenset(config["team"]), config["reviewers"]
)
# if reviewers_to_add and not event["pull_request"]["draft"]:
# nursery.start_soon(add_reviewers, gh, reviewers_to_add)
# else:
# print("No reviewers to add or PR is in draft")
if __name__ == "__main__":
trio.run(main, sys.argv[1])
|
DonJayamanne/pythonVSCode
|
.github/assign-reviewers/__main__.py
|
Python
|
mit
| 5,762
|
#
# users.py: Code for creating user accounts and setting the root password
#
# Copyright (C) 2006, 2007, 2008 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): Chris Lumens <clumens@redhat.com>
#
import libuser
import string
import crypt
import random
import tempfile
import os
import os.path
from pyanaconda import iutil
import pwquality
from pyanaconda.iutil import strip_accents
from pyanaconda.i18n import _
from pyanaconda.constants import PASSWORD_MIN_LEN
import logging
log = logging.getLogger("anaconda")
def createLuserConf(instPath, algoname='sha512'):
""" Writes a libuser.conf for instPath.
This must be called before User() is instantiated the first time
so that libuser.admin will use the temporary config file.
"""
createTmp = False
try:
fn = os.environ["LIBUSER_CONF"]
if os.access(fn, os.F_OK):
log.info("removing libuser.conf at %s", os.getenv("LIBUSER_CONF"))
os.unlink(fn)
log.info("created new libuser.conf at %s with instPath=\"%s\"", fn, instPath)
fd = open(fn, 'w')
except (OSError, IOError, KeyError):
createTmp = True
if createTmp:
(fp, fn) = tempfile.mkstemp(prefix="libuser.")
log.info("created new libuser.conf at %s with instPath=\"%s\"", fn, instPath)
fd = os.fdopen(fp, 'w')
buf = """
[defaults]
skeleton = %(instPath)s/etc/skel
mailspooldir = %(instPath)s/var/mail
crypt_style = %(algo)s
modules = files shadow
create_modules = files shadow
[files]
directory = %(instPath)s/etc
[shadow]
directory = %(instPath)s/etc
""" % {"instPath": instPath, "algo": algoname}
fd.write(buf)
fd.close()
os.environ["LIBUSER_CONF"] = fn
return fn
def getPassAlgo(authconfigStr):
""" Reads the auth string and returns a string indicating our desired
password encoding algorithm.
"""
if authconfigStr.find("--enablemd5") != -1 or authconfigStr.find("--passalgo=md5") != -1:
return 'md5'
elif authconfigStr.find("--passalgo=sha256") != -1:
return 'sha256'
elif authconfigStr.find("--passalgo=sha512") != -1:
return 'sha512'
else:
return None
# These are explained in crypt/crypt-entry.c in glibc's code. The prefixes
# we use for the different crypt salts:
# $1$ MD5
# $5$ SHA256
# $6$ SHA512
def cryptPassword(password, algo=None):
salts = {'md5': '$1$', 'sha256': '$5$', 'sha512': '$6$'}
saltlen = 2
if algo is None:
algo = 'sha512'
if algo == 'md5' or algo == 'sha256' or algo == 'sha512':
saltlen = 16
saltstr = salts[algo]
for _i in range(saltlen):
saltstr = saltstr + random.choice (string.letters +
string.digits + './')
return crypt.crypt (password, saltstr)
def validatePassword(pw, user="root", settings=None):
"""Check the quality of a password.
This function does three things: given a password and an optional
username, it will tell if this password can be used at all, how
strong the password is on a scale of 1-100, and, if the password is
unusable, why it is unusuable.
This function uses libpwquality to check the password strength.
pwquality will raise a PWQError on a weak password, which, honestly,
is kind of dumb behavior. A weak password isn't exceptional, it's what
we're asking about! Anyway, this function does not raise PWQError. If
the password fails the PWQSettings conditions, the first member of the
return tuple will be False and the second member of the tuple will be 0.
:param pw: the password to check
:type pw: string
:param user: the username for which the password is being set. If no
username is provided, "root" will be used. Use user=None
to disable the username check.
:type user: string
:param settings: an optional PWQSettings object
:type settings: pwquality.PWQSettings
:returns: A tuple containing (bool(valid), int(score), str(message))
:rtype: tuple
"""
valid = True
message = None
strength = 0
if settings is None:
# Generate a default PWQSettings once and save it as a member of this function
if not hasattr(validatePassword, "pwqsettings"):
validatePassword.pwqsettings = pwquality.PWQSettings()
validatePassword.pwqsettings.read_config()
validatePassword.pwqsettings.minlen = PASSWORD_MIN_LEN
settings = validatePassword.pwqsettings
legal = string.digits + string.ascii_letters + string.punctuation + " "
for letter in pw:
if letter not in legal:
message = _("Requested password contains "
"non-ASCII characters, which are "
"not allowed.")
valid = False
break
if valid:
try:
strength = settings.check(pw, None, user)
except pwquality.PWQError as e:
# Leave valid alone here: the password is weak but can still
# be accepted.
# PWQError values are built as a tuple of (int, str)
message = e[1]
return (valid, strength, message)
def guess_username(fullname):
fullname = fullname.split()
# use last name word (at the end in most of the western countries..)
if len(fullname) > 0:
username = fullname[-1].decode("utf-8").lower()
else:
username = u""
# and prefix it with the first name inital
if len(fullname) > 1:
username = fullname[0].decode("utf-8")[0].lower() + username
username = strip_accents(username).encode("utf-8")
return username
class Users:
def __init__ (self):
self.admin = libuser.admin()
def createGroup (self, group_name, **kwargs):
"""Create a new user on the system with the given name. Optional kwargs:
gid -- The GID for the new user. If none is given, the next
available one is used.
root -- The directory of the system to create the new user
in. homedir will be interpreted relative to this.
Defaults to /mnt/sysimage.
"""
childpid = os.fork()
root = kwargs.get("root", "/mnt/sysimage")
if not childpid:
if not root in ["","/"]:
os.chroot(root)
os.chdir("/")
del(os.environ["LIBUSER_CONF"])
self.admin = libuser.admin()
if self.admin.lookupGroupByName(group_name):
log.error("Group %s already exists, not creating.", group_name)
os._exit(1)
groupEnt = self.admin.initGroup(group_name)
if kwargs.get("gid", -1) >= 0:
groupEnt.set(libuser.GIDNUMBER, kwargs["gid"])
try:
self.admin.addGroup(groupEnt)
except RuntimeError as e:
log.critical("Error when creating new group: %s", e)
os._exit(1)
os._exit(0)
try:
status = os.waitpid(childpid, 0)[1]
except OSError as e:
log.critical("exception from waitpid while creating a group: %s %s", e.errno, e.strerror)
return False
if os.WIFEXITED(status) and (os.WEXITSTATUS(status) == 0):
return True
else:
return False
def createUser (self, user_name, *args, **kwargs):
"""Create a new user on the system with the given name. Optional kwargs:
algo -- The password algorithm to use in case isCrypted=True.
If none is given, the cryptPassword default is used.
gecos -- The GECOS information (full name, office, phone, etc.).
Defaults to "".
groups -- A list of existing group names the user should be
added to. Defaults to [].
homedir -- The home directory for the new user. Defaults to
/home/<name>.
isCrypted -- Is the password kwargs already encrypted? Defaults
to False.
lock -- Is the new account locked by default? Defaults to
False.
password -- The password. See isCrypted for how this is interpreted.
If the password is "" then the account is created
with a blank password. If None or False the account will
be left in its initial state (locked)
root -- The directory of the system to create the new user
in. homedir will be interpreted relative to this.
Defaults to /mnt/sysimage.
shell -- The shell for the new user. If none is given, the
libuser default is used.
uid -- The UID for the new user. If none is given, the next
available one is used.
gid -- The GID for the new user. If none is given, the next
available one is used.
"""
childpid = os.fork()
root = kwargs.get("root", "/mnt/sysimage")
if not childpid:
if not root in ["","/"]:
os.chroot(root)
os.chdir("/")
del(os.environ["LIBUSER_CONF"])
self.admin = libuser.admin()
if self.admin.lookupUserByName(user_name):
log.error("User %s already exists, not creating.", user_name)
os._exit(1)
userEnt = self.admin.initUser(user_name)
groupEnt = self.admin.initGroup(user_name)
if kwargs.get("gid", -1) >= 0:
groupEnt.set(libuser.GIDNUMBER, kwargs["gid"])
grpLst = filter(lambda grp: grp,
map(self.admin.lookupGroupByName, kwargs.get("groups", [])))
userEnt.set(libuser.GIDNUMBER, [groupEnt.get(libuser.GIDNUMBER)[0]] +
map(lambda grp: grp.get(libuser.GIDNUMBER)[0], grpLst))
if kwargs.get("homedir", False):
userEnt.set(libuser.HOMEDIRECTORY, kwargs["homedir"])
else:
iutil.mkdirChain(root+'/home')
userEnt.set(libuser.HOMEDIRECTORY, "/home/" + user_name)
if kwargs.get("shell", False):
userEnt.set(libuser.LOGINSHELL, kwargs["shell"])
if kwargs.get("uid", -1) >= 0:
userEnt.set(libuser.UIDNUMBER, kwargs["uid"])
if kwargs.get("gecos", False):
userEnt.set(libuser.GECOS, kwargs["gecos"])
# need to create home directory for the user or does it already exist?
# userEnt.get returns lists (usually with a single item)
mk_homedir = not os.path.exists(userEnt.get(libuser.HOMEDIRECTORY)[0])
try:
self.admin.addUser(userEnt, mkmailspool=kwargs.get("mkmailspool", True),
mkhomedir=mk_homedir)
except RuntimeError as e:
log.critical("Error when creating new user: %s", e)
os._exit(1)
try:
self.admin.addGroup(groupEnt)
except RuntimeError as e:
log.critical("Error when creating new group: %s", e)
os._exit(1)
if not mk_homedir:
try:
stats = os.stat(userEnt.get(libuser.HOMEDIRECTORY)[0])
orig_uid = stats.st_uid
orig_gid = stats.st_gid
log.info("Home directory for the user %s already existed, "
"fixing the owner.", user_name)
# home directory already existed, change owner of it properly
iutil.chown_dir_tree(userEnt.get(libuser.HOMEDIRECTORY)[0],
userEnt.get(libuser.UIDNUMBER)[0],
groupEnt.get(libuser.GIDNUMBER)[0],
orig_uid, orig_gid)
except OSError as e:
log.critical("Unable to change owner of existing home directory: %s",
os.strerror)
os._exit(1)
pw = kwargs.get("password", False)
try:
if pw:
if kwargs.get("isCrypted", False):
password = kwargs["password"]
else:
password = cryptPassword(kwargs["password"], algo=kwargs.get("algo", None))
self.admin.setpassUser(userEnt, password, True)
userEnt.set(libuser.SHADOWLASTCHANGE, "")
self.admin.modifyUser(userEnt)
elif pw == "":
# Setup the account with *NO* password
self.admin.unlockUser(userEnt)
log.info("user account %s setup with no password", user_name)
if kwargs.get("lock", False):
self.admin.lockUser(userEnt)
log.info("user account %s locked", user_name)
# setpassUser raises SystemError on failure, while unlockUser and lockUser
# raise RuntimeError
except (RuntimeError, SystemError) as e:
log.critical("Unable to set password for new user: %s", e)
os._exit(1)
# Add the user to all the groups they should be part of.
grpLst.append(self.admin.lookupGroupByName(user_name))
try:
for grp in grpLst:
grp.add(libuser.MEMBERNAME, user_name)
self.admin.modifyGroup(grp)
except RuntimeError as e:
log.critical("Unable to add user to groups: %s", e)
os._exit(1)
os._exit(0)
try:
status = os.waitpid(childpid, 0)[1]
except OSError as e:
log.critical("exception from waitpid while creating a user: %s %s", e.errno, e.strerror)
return False
if os.WIFEXITED(status) and (os.WEXITSTATUS(status) == 0):
return True
else:
return False
def checkUserExists(self, username, root="/mnt/sysimage"):
childpid = os.fork()
if not childpid:
if not root in ["","/"]:
os.chroot(root)
os.chdir("/")
del(os.environ["LIBUSER_CONF"])
self.admin = libuser.admin()
if self.admin.lookupUserByName(username):
os._exit(0)
else:
os._exit(1)
try:
status = os.waitpid(childpid, 0)[1]
except OSError as e:
log.critical("exception from waitpid while creating a user: %s %s", e.errno, e.strerror)
return False
if os.WIFEXITED(status) and (os.WEXITSTATUS(status) == 0):
return True
else:
return False
def setUserPassword(self, username, password, isCrypted, lock, algo=None):
user = self.admin.lookupUserByName(username)
if isCrypted:
self.admin.setpassUser(user, password, True)
else:
self.admin.setpassUser(user, cryptPassword(password, algo=algo), True)
if lock:
self.admin.lockUser(user)
user.set(libuser.SHADOWLASTCHANGE, "")
self.admin.modifyUser(user)
def setRootPassword(self, password, isCrypted=False, isLocked=False, algo=None):
return self.setUserPassword("root", password, isCrypted, isLocked, algo)
|
Sabayon/anaconda
|
pyanaconda/users.py
|
Python
|
gpl-2.0
| 16,470
|
## www.pubnub.com - PubNub Real-time push service in the cloud.
# coding=utf8
## PubNub Real-time Push APIs and Notifications Framework
## Copyright (c) 2010 Stephen Blum
## http://www.pubnub.com/
## -----------------------------------
## PubNub 3.0 Real-time Push Cloud API
## -----------------------------------
try: import json
except ImportError: import simplejson as json
import time
import hashlib
import urllib2
class Pubnub():
def __init__(
self,
publish_key,
subscribe_key,
secret_key = False,
ssl_on = False,
origin = 'pubsub.pubnub.com'
) :
"""
#**
#* Pubnub
#*
#* Init the Pubnub Client API
#*
#* @param string publish_key required key to send messages.
#* @param string subscribe_key required key to receive messages.
#* @param string secret_key required key to sign messages.
#* @param boolean ssl required for 2048 bit encrypted messages.
#* @param string origin PUBNUB Server Origin.
#**
## Initiat Class
pubnub = Pubnub( 'PUBLISH-KEY', 'SUBSCRIBE-KEY', 'SECRET-KEY', False )
"""
self.origin = origin
self.limit = 1800
self.publish_key = publish_key
self.subscribe_key = subscribe_key
self.secret_key = secret_key
self.ssl = ssl_on
if self.ssl :
self.origin = 'https://' + self.origin
else :
self.origin = 'http://' + self.origin
def publish( self, args ) :
"""
#**
#* Publish
#*
#* Send a message to a channel.
#*
#* @param array args with channel and message.
#* @return array success information.
#**
## Publish Example
info = pubnub.publish({
'channel' : 'hello_world',
'message' : {
'some_text' : 'Hello my World'
}
})
print(info)
"""
## Fail if bad input.
if not (args['channel'] and args['message']) :
return [ 0, 'Missing Channel or Message' ]
## Capture User Input
channel = args['channel']
message = json.dumps(args['message'], separators=(',',':'))
## Sign Message
if self.secret_key :
signature = hashlib.md5('/'.join([
self.publish_key,
self.subscribe_key,
self.secret_key,
channel,
message
])).hexdigest()
else :
signature = '0'
## Send Message
return self._request([
'publish',
self.publish_key,
self.subscribe_key,
signature,
channel,
'0',
message
])
def subscribe( self, args ) :
"""
#**
#* Subscribe
#*
#* This is BLOCKING.
#* Listen for a message on a channel.
#*
#* @param array args with channel and message.
#* @return false on fail, array on success.
#**
## Subscribe Example
def receive(message) :
print(message)
return True
pubnub.subscribe({
'channel' : 'hello_world',
'callback' : receive
})
"""
## Fail if missing channel
if not 'channel' in args :
raise Exception('Missing Channel.')
return False
## Fail if missing callback
if not 'callback' in args :
raise Exception('Missing Callback.')
return False
## Capture User Input
channel = args['channel']
callback = args['callback']
## Begin Subscribe
while True :
timetoken = 'timetoken' in args and args['timetoken'] or 0
try :
## Wait for Message
response = self._request([
'subscribe',
self.subscribe_key,
channel,
'0',
str(timetoken)
])
messages = response[0]
args['timetoken'] = response[1]
## If it was a timeout
if not len(messages) :
continue
## Run user Callback and Reconnect if user permits.
for message in messages :
if not callback(message) :
return
except Exception:
time.sleep(1)
return True
def history( self, args ) :
"""
#**
#* History
#*
#* Load history from a channel.
#*
#* @param array args with 'channel' and 'limit'.
#* @return mixed false on fail, array on success.
#*
## History Example
history = pubnub.history({
'channel' : 'hello_world',
'limit' : 1
})
print(history)
"""
## Capture User Input
limit = args.has_key('limit') and int(args['limit']) or 10
channel = args['channel']
## Fail if bad input.
if not channel :
raise Exception('Missing Channel')
return False
## Get History
return self._request([
'history',
self.subscribe_key,
channel,
'0',
str(limit)
]);
def time(self) :
"""
#**
#* Time
#*
#* Timestamp from PubNub Cloud.
#*
#* @return int timestamp.
#*
## PubNub Server Time Example
timestamp = pubnub.time()
print(timestamp)
"""
return self._request([
'time',
'0'
])[0]
def _encode( self, request ) :
return [
"".join([ ' ~`!@#$%^&*()+=[]\\{}|;\':",./<>?'.find(ch) > -1 and
hex(ord(ch)).replace( '0x', '%' ).upper() or
ch for ch in list(bit)
]) for bit in request]
def _request( self, request, origin = None, encode = True ) :
## Build URL
url = (origin or self.origin) + '/' + "/".join(
encode and self._encode(request) or request
)
## Send Request Expecting JSONP Response
try:
try: usock = urllib2.urlopen( url, None, 200 )
except TypeError: usock = urllib2.urlopen( url, None )
response = usock.read()
usock.close()
return json.loads( response )
except:
return None
|
joeshaw/pubnub-python
|
Pubnub.py
|
Python
|
mit
| 6,723
|
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2017 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
import os
import sys
import unicodedata
from django.core.cache import cache
from django.http import HttpResponseRedirect
from django.shortcuts import resolve_url, render as django_render, redirect
from django.utils.encoding import force_text
from django.utils.http import is_safe_url
from django.utils.translation import ugettext as _, ugettext_lazy
try:
import pyuca # pylint: disable=import-error
HAS_PYUCA = True
except ImportError:
HAS_PYUCA = False
import six
from six.moves.urllib.parse import urlparse
from weblate.trans.data import data_dir
PLURAL_SEPARATOR = '\x1e\x1e'
# List of default domain names on which warn user
DEFAULT_DOMAINS = ('example.net', 'example.com')
PRIORITY_CHOICES = (
(60, ugettext_lazy('Very high')),
(80, ugettext_lazy('High')),
(100, ugettext_lazy('Medium')),
(120, ugettext_lazy('Low')),
(140, ugettext_lazy('Very low')),
)
def is_plural(text):
"""Check whether string is plural form."""
return text.find(PLURAL_SEPARATOR) != -1
def split_plural(text):
return text.split(PLURAL_SEPARATOR)
def join_plural(text):
return PLURAL_SEPARATOR.join(text)
def get_string(text):
"""Return correctly formatted string from ttkit unit data."""
# Check for null target (happens with XLIFF)
if text is None:
return ''
if hasattr(text, 'strings'):
return join_plural(text.strings)
return text
def is_repo_link(val):
"""Check whether repository is just a link for other one."""
return val.startswith('weblate://')
def get_distinct_translations(units):
"""Return list of distinct translations.
It should be possible to use
distinct('target') since Django 1.4, but it is not supported with MySQL, so
let's emulate that based on presumption we won't get too many results.
"""
targets = {}
result = []
for unit in units:
if unit.target in targets:
continue
targets[unit.target] = 1
result.append(unit)
return result
def translation_percent(translated, total):
"""Return translation percentage."""
if total == 0 or total is None:
return 0.0
perc = round(1000 * translated / total) / 10.0
# Avoid displaying misleading rounded 0.0% or 100.0%
if perc == 0.0 and translated != 0:
return 0.1
if perc == 100.0 and translated < total:
return 99.9
return perc
def add_configuration_error(name, message):
"""Log configuration error."""
errors = cache.get('configuration-errors', [])
errors.append({
'name': name,
'message': message,
})
cache.set('configuration-errors', errors)
def get_configuration_errors():
"""Return all configuration errors."""
return cache.get('configuration-errors', [])
def get_clean_env(extra=None):
"""Return cleaned up environment for subprocess execution."""
environ = {
'LANG': 'en_US.UTF-8',
'HOME': data_dir('home'),
}
if extra is not None:
environ.update(extra)
variables = ('PATH', 'LD_LIBRARY_PATH')
for var in variables:
if var in os.environ:
environ[var] = os.environ[var]
# Python 2 on Windows doesn't handle Unicode objects in environment
# even if they can be converted to ASCII string, let's fix it here
if six.PY2 and sys.platform == 'win32':
return {
str(key): str(val) for key, val in environ.items()
}
return environ
def cleanup_repo_url(url):
"""Remove credentials from repository URL."""
parsed = urlparse(url)
if parsed.username and parsed.password:
return url.replace(
'{0}:{1}@'.format(
parsed.username,
parsed.password
),
''
)
elif parsed.username:
return url.replace(
'{0}@'.format(
parsed.username,
),
''
)
return url
def redirect_param(location, params, *args, **kwargs):
"""Redirect to a URL with parameters."""
return HttpResponseRedirect(
resolve_url(location, *args, **kwargs) + params
)
def cleanup_path(path):
"""Remove leading ./ or / from path."""
if path.startswith('./'):
path = path[2:]
if path.startswith('/'):
path = path[1:]
return path
def get_project_description(project):
"""Return verbose description for project translation"""
return _(
'{0} is translated into {1} languages using Weblate. '
'Join the translation or start translating your own project.',
).format(
project,
project.get_language_count()
)
def render(request, template, context=None, status=None):
"""Wrapper around Django render to extend context"""
if context is None:
context = {}
if 'project' in context and context['project'] is not None:
context['description'] = get_project_description(context['project'])
return django_render(request, template, context, status=status)
def path_separator(path):
"""Alway use / as path separator for consistency"""
if os.path.sep != '/':
return path.replace(os.path.sep, '/')
return path
def sort_unicode(choices, key):
"""Unicode aware sorting if available"""
if not HAS_PYUCA:
return sorted(
choices,
key=lambda tup: remove_accents(key(tup)).lower()
)
else:
collator = pyuca.Collator()
return sorted(
choices,
key=lambda tup: collator.sort_key(force_text(key(tup)))
)
def remove_accents(input_str):
"""Remove accents from a string."""
nkfd_form = unicodedata.normalize('NFKD', force_text(input_str))
only_ascii = nkfd_form.encode('ASCII', 'ignore')
return only_ascii
def sort_choices(choices):
"""Sort choices alphabetically.
Either using cmp or pyuca.
"""
return sort_unicode(choices, lambda tup: tup[1])
def sort_objects(objects):
"""Sort objects alphabetically"""
return sort_unicode(objects, force_text)
def check_domain(domain):
"""Check whether site domain is correctly set"""
return (
domain not in DEFAULT_DOMAINS and
not domain.startswith('http:') and
not domain.startswith('https:') and
not domain.endswith('/')
)
def redirect_next(next_url, fallback):
"""Redirect to next URL from request after validating it."""
if (next_url is None or
not is_safe_url(next_url) or
not next_url.startswith('/')):
return redirect(fallback)
return HttpResponseRedirect(next_url)
|
lem9/weblate
|
weblate/trans/util.py
|
Python
|
gpl-3.0
| 7,459
|
#!/usr/bin/python
#excercize 4 - variables and names
cars = 100
space_in_a_car = 4
drivers = 30
passengers = 90
cars_not_driven = cars - drivers
cars_driven = drivers
carpool_capacity = cars_driven * space_in_a_car
average_passengers_per_car = passengers / cars_driven
print("There are", cars, "cars available.")
print("There are only", drivers, "drivers available.")
print("There will be", cars_not_driven, "empty cars today.")
print("We can transport", carpool_capacity, "people today.")
print("We have", passengers, "to carpool today.")
print("We need to put about", average_passengers_per_car, "in each car.")
|
Baumelbi/IntroPython2016
|
students/sheree/session_01/homework/LPTHW-EXC-04.py
|
Python
|
unlicense
| 616
|
# -*- coding: utf-8 -*-
#
# Ulakbus Documents documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 28 14:30:19 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'wiki'
# General information about the project.
project = u'Ulakbus'
copyright = u'2015 Zetaops'
author = u'Mustafa Tola, Bahadir Kinali, Cem Guresci, Ali Riza Keles'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'tr'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'bizstyle'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_logo = "ulakbus.png"
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'UlakbusDocumentsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'UlakbusDocuments.tex', u'Ulakbus Documents Documentation',
u'Mustafa Tola, Osman Sonmezturk, Ali Riza Keles', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'ulakbusdocuments', u'Ulakbus Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'UlakbusDocuments', u'Ulakbus Documentation',
author, 'UlakbusDocuments', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
|
hiorws/ulakbus.org
|
source/conf.py
|
Python
|
gpl-3.0
| 11,687
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.test.integration_tests.pages import basepage
from openstack_dashboard.test.integration_tests.regions import forms
from openstack_dashboard.test.integration_tests.regions import tables
class SecurityGroupsTable(tables.TableRegion):
name = "security_groups"
CREATE_SECURITYGROUP_FORM_FIELDS = ("name", "description")
@tables.bind_table_action('create')
def create_group(self, create_button):
create_button.click()
return forms.FormRegion(
self.driver, self.conf,
field_mappings=self.CREATE_SECURITYGROUP_FORM_FIELDS)
@tables.bind_table_action('delete')
def delete_group(self, delete_button):
delete_button.click()
return forms.BaseFormRegion(self.driver, self.conf, None)
class SecuritygroupsPage(basepage.BaseNavigationPage):
SECURITYGROUPS_TABLE_NAME_COLUMN = 'name'
def __init__(self, driver, conf):
super(SecuritygroupsPage, self).__init__(driver, conf)
self._page_title = "Access & Security"
def _get_row_with_securitygroup_name(self, name):
return self.securitygroups_table.get_row(
self.SECURITYGROUPS_TABLE_NAME_COLUMN, name)
@property
def securitygroups_table(self):
return SecurityGroupsTable(self.driver, self.conf)
def create_securitygroup(self, name, description=None):
create_securitygroups_form = self.securitygroups_table.create_group()
create_securitygroups_form.name.text = name
if description is not None:
create_securitygroups_form.description.text = description
create_securitygroups_form.submit()
def delete_securitygroup(self, name):
row = self._get_row_with_securitygroup_name(name)
row.mark()
modal_confirmation_form = self.securitygroups_table.delete_group()
modal_confirmation_form.submit()
def is_securitygroup_present(self, name):
return bool(self._get_row_with_securitygroup_name(name))
|
ankur-gupta91/horizon-net-ip
|
openstack_dashboard/test/integration_tests/pages/project/compute/access_and_security/securitygroupspage.py
|
Python
|
apache-2.0
| 2,566
|
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test RPC calls related to blockchain state. Tests correspond to code in
# rpcblockchain.cpp.
#
from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.authproxy import JSONRPCException
from test_framework.util import (
initialize_chain,
assert_equal,
assert_raises,
assert_is_hex_string,
assert_is_hash_string,
start_nodes,
connect_nodes_bi,
)
class BlockchainTest(BitcoinTestFramework):
"""
Test blockchain-related RPC calls:
- gettxoutsetinfo
"""
def setup_chain(self):
print("Initializing test directory " + self.options.tmpdir)
initialize_chain(self.options.tmpdir)
def setup_network(self, split=False):
self.nodes = start_nodes(2, self.options.tmpdir)
connect_nodes_bi(self.nodes, 0, 1)
self.is_network_split = False
self.sync_all()
def run_test(self):
self._test_gettxoutsetinfo()
self._test_getblockheader()
def _test_gettxoutsetinfo(self):
node = self.nodes[0]
res = node.gettxoutsetinfo()
assert_equal(res[u'total_amount'], Decimal('8725.00000000'))
assert_equal(res[u'transactions'], 200)
assert_equal(res[u'height'], 200)
assert_equal(res[u'txouts'], 200)
assert_equal(res[u'bytes_serialized'], 13924),
assert_equal(len(res[u'bestblock']), 64)
assert_equal(len(res[u'hash_serialized']), 64)
def _test_getblockheader(self):
node = self.nodes[0]
assert_raises(
JSONRPCException, lambda: node.getblockheader('nonsense'))
besthash = node.getbestblockhash()
secondbesthash = node.getblockhash(199)
header = node.getblockheader(besthash)
assert_equal(header['hash'], besthash)
assert_equal(header['height'], 200)
assert_equal(header['confirmations'], 1)
assert_equal(header['previousblockhash'], secondbesthash)
assert_is_hex_string(header['chainwork'])
assert_is_hash_string(header['hash'])
assert_is_hash_string(header['previousblockhash'])
assert_is_hash_string(header['merkleroot'])
assert_is_hash_string(header['bits'], length=None)
assert isinstance(header['time'], int)
assert isinstance(header['mediantime'], int)
assert isinstance(header['nonce'], int)
assert isinstance(header['version'], int)
assert isinstance(int(header['versionHex'], 16), int)
assert isinstance(header['difficulty'], Decimal)
if __name__ == '__main__':
BlockchainTest().main()
|
janko33bd/bitcoin
|
qa/rpc-tests/blockchain.py
|
Python
|
mit
| 2,833
|
#!/usr/bin/python
#
# currentsensor.py API for reading current from the ACS712 sensor
#
# Aug2018 tests show readings are generally within 60mA of the actual current
# e.g. idle (w pigpio daemon running) .31-33 Amps shows 337-379mA
# loaded w three systemtests/loadcpu_fib.py .68-.70 Amps shows 664-741mA
#
import sys
sys.path.insert(0, '/home/pi/RWPi/rwpilib')
import PDALib
import myPDALib
import myPyLib
import math
import time
import traceback
ACS712PIN = 7 # Current sensor is on pin 7 (0..7) of the MCP3008
# zero_current = 514.00 # MCP3008 10 bit reading at open circuit, no load
# mVperBit=26.39358 # MCP3008 10bit reading
#zero_current = 2047.50 # MCP3008 10 bit reading at open circuit, no load
#mVperBit=6.520806 # MCP3208 12bit reading
# REFERENCE READING - IDLE
refReading=2015.0 # 2019.0
refCurrent=218.0 # 280.0 # mA
# REFERENCE READING - zero
# refReading=2129.0 # 2019.0
# refCurrent=0.0 # 0.0 # mA
# REFERENCE 2 - two fibanacci and currentsensor
refReading2=1988.0 # 1993.0 #1965.0
# refReading2=1982.0 # with zero pt 1
refCurrent2=397.0 # 325.0 #560.0
mAperDelta= (refCurrent2 - refCurrent) / (refReading - refReading2)
print("mAperDelta: %.2f" % mAperDelta)
# current = (refReading - reading) * mAperDelta + refCurrent
def current(reading):
c = (refReading - reading) * mAperDelta + refCurrent
return c
# current_sense(10) readings:
#
#
# ###### CURRENT_SENSE(readings=75)
#
# reads current sensor directly
def current_sense(readings=75,debug=0):
if (readings > 1):
values = []
for i in range(0,readings):
values.append(myPDALib.analogRead12bit(ACS712PIN))
time.sleep(0.005)
values.sort()
pin_value = sum(values) / float(len(values)) # average
else:
# current from a single analog reading
pin_value = myPDALib.analogRead12bit(ACS712PIN)
current_now = (refReading - pin_value) * mAperDelta + refCurrent
if (debug != 0):
print ("reading: %d current: %.0f mA" % (pin_value, current_now))
return current_now
def main():
myPyLib.set_cntl_c_handler() # Set CNTL-C handler
while True:
try:
current_sense(200,1)
time.sleep(1)
except SystemExit:
myPDALib.PiExit()
print("currentsensor: Bye Bye")
break
except:
print("Exception Raised")
traceback.print_exc()
break
if __name__ == "__main__":
main()
|
slowrunner/RWPi
|
litst/currentsensor.py
|
Python
|
gpl-3.0
| 2,432
|
from enigma import eDVBFrontendParametersSatellite, eDVBFrontendParametersTerrestrial, eDVBFrontendParametersCable, eDVBFrontendParameters, eDVBResourceManager, eTimer
class Tuner:
def __init__(self, frontend, ignore_rotor=False):
self.frontend = frontend
self.ignore_rotor = ignore_rotor
# transponder = (frequency, symbolrate, polarisation, fec, inversion, orbpos, system, modulation, rolloff, pilot, tsid, onid)
# 0 1 2 3 4 5 6 7 8 9 10 11
def tune(self, transponder):
if self.frontend:
print "[TuneTest] tuning to transponder with data", transponder
parm = eDVBFrontendParametersSatellite()
parm.frequency = transponder[0] * 1000
parm.symbol_rate = transponder[1] * 1000
parm.polarisation = transponder[2]
parm.fec = transponder[3]
parm.inversion = transponder[4]
parm.orbital_position = transponder[5]
parm.system = transponder[6]
parm.modulation = transponder[7]
parm.rolloff = transponder[8]
parm.pilot = transponder[9]
self.tuneSatObj(parm)
def tuneSatObj(self, transponderObj):
if self.frontend:
feparm = eDVBFrontendParameters()
feparm.setDVBS(transponderObj, self.ignore_rotor)
self.lastparm = feparm
self.frontend.tune(feparm)
def tuneTerr(self, frequency,
inversion=2, bandwidth = 7000000, fechigh = 6, feclow = 6,
modulation = 2, transmission = 2, guard = 4,
hierarchy = 4, system = 0, plpid = 0):
if self.frontend:
print "[TuneTest] tuning to transponder with data", [frequency, inversion, bandwidth, fechigh, feclow, modulation, transmission, guard, hierarchy, system, plpid]
parm = eDVBFrontendParametersTerrestrial()
parm.frequency = frequency
parm.inversion = inversion
parm.bandwidth = bandwidth
parm.code_rate_HP = fechigh
parm.code_rate_LP = feclow
parm.modulation = modulation
parm.transmission_mode = transmission
parm.guard_interval = guard
parm.hierarchy = hierarchy
parm.system = system
parm.plpid = plpid
self.tuneTerrObj(parm)
def tuneTerrObj(self, transponderObj):
if self.frontend:
feparm = eDVBFrontendParameters()
feparm.setDVBT(transponderObj)
self.lastparm = feparm
self.frontend.tune(feparm)
def tuneCab(self, transponder):
if self.frontend:
print "[TuneTest] tuning to transponder with data", transponder
parm = eDVBFrontendParametersCable()
parm.frequency = transponder[0]
parm.symbol_rate = transponder[1]
parm.modulation = transponder[2]
parm.fec_inner = transponder[3]
parm.inversion = transponder[4]
#parm.system = transponder[5]
self.tuneCabObj(parm)
def tuneCabObj(self, transponderObj):
if self.frontend:
feparm = eDVBFrontendParameters()
feparm.setDVBC(transponderObj)
self.lastparm = feparm
self.frontend.tune(feparm)
def retune(self):
if self.frontend:
self.frontend.tune(self.lastparm)
def getTransponderData(self):
ret = { }
if self.frontend:
self.frontend.getTransponderData(ret, True)
return ret
# tunes a list of transponders and checks, if they lock and optionally checks the onid/tsid combination
# 1) add transponders with addTransponder()
# 2) call run(<checkPIDs = True>)
# 3) finishedChecking() is called, when the run is finished
class TuneTest:
def __init__(self, feid, stopOnSuccess = -1, stopOnError = -1):
self.stopOnSuccess = stopOnSuccess
self.stopOnError = stopOnError
self.feid = feid
self.transponderlist = []
self.currTuned = None
print "TuneTest for feid %d" % self.feid
if not self.openFrontend():
self.oldref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.session.nav.stopService() # try to disable foreground service
if not self.openFrontend():
if self.session.pipshown: # try to disable pip
if hasattr(self.session, 'infobar'):
if self.session.infobar.servicelist.dopipzap:
self.session.infobar.servicelist.togglePipzap()
if hasattr(self.session, 'pip'):
del self.session.pip
self.session.pipshown = False
if not self.openFrontend():
self.frontend = None # in normal case this should not happen
self.tuner = Tuner(self.frontend)
self.timer = eTimer()
self.timer.callback.append(self.updateStatus)
def gotTsidOnid(self, tsid, onid):
print "******** got tsid, onid:", tsid, onid
if tsid is not -1 and onid is not -1:
self.pidStatus = self.INTERNAL_PID_STATUS_SUCCESSFUL
self.tsid = tsid
self.onid = onid
else:
self.pidStatus = self.INTERNAL_PID_STATUS_FAILED
self.tsid = -1
self.onid = -1
self.timer.start(100, True)
def updateStatus(self):
dict = {}
self.frontend.getFrontendStatus(dict)
stop = False
print "status:", dict
if dict["tuner_state"] == "TUNING":
print "TUNING"
self.timer.start(100, True)
self.progressCallback((self.getProgressLength(), self.tuningtransponder, self.STATUS_TUNING, self.currTuned))
elif self.checkPIDs and self.pidStatus == self.INTERNAL_PID_STATUS_NOOP:
print "2nd choice"
if dict["tuner_state"] == "LOCKED":
print "acquiring TSID/ONID"
self.raw_channel.receivedTsidOnid.get().append(self.gotTsidOnid)
self.raw_channel.requestTsidOnid()
self.pidStatus = self.INTERNAL_PID_STATUS_WAITING
else:
self.pidStatus = self.INTERNAL_PID_STATUS_FAILED
elif self.checkPIDs and self.pidStatus == self.INTERNAL_PID_STATUS_WAITING:
print "waiting for pids"
else:
if dict["tuner_state"] == "LOSTLOCK" or dict["tuner_state"] == "FAILED":
self.tuningtransponder = self.nextTransponder()
self.failedTune.append([self.currTuned, self.oldTuned, "tune_failed", dict]) # last parameter is the frontend status)
if self.stopOnError != -1 and self.stopOnError <= len(self.failedTune):
stop = True
elif dict["tuner_state"] == "LOCKED":
pidsFailed = False
if self.checkPIDs:
if self.currTuned is not None:
if self.tsid != self.currTuned[10] or self.onid != self.currTuned[11]:
self.failedTune.append([self.currTuned, self.oldTuned, "pids_failed", {"real": (self.tsid, self.onid), "expected": (self.currTuned[10], self.currTuned[11])}, dict]) # last parameter is the frontend status
pidsFailed = True
else:
self.successfullyTune.append([self.currTuned, self.oldTuned, dict]) # 3rd parameter is the frontend status
if self.stopOnSuccess != -1 and self.stopOnSuccess <= len(self.successfullyTune):
stop = True
elif not self.checkPIDs or (self.checkPids and not pidsFailed):
self.successfullyTune.append([self.currTuned, self.oldTuned, dict]) # 3rd parameter is the frontend status
if self.stopOnSuccess != -1 and self.stopOnSuccess <= len(self.successfullyTune):
stop = True
self.tuningtransponder = self.nextTransponder()
else:
print "************* tuner_state:", dict["tuner_state"]
self.progressCallback((self.getProgressLength(), self.tuningtransponder, self.STATUS_NOOP, self.currTuned))
if not stop:
self.tune()
if self.tuningtransponder < len(self.transponderlist) and not stop:
if self.pidStatus != self.INTERNAL_PID_STATUS_WAITING:
self.timer.start(100, True)
print "restart timer"
else:
print "not restarting timers (waiting for pids)"
else:
self.progressCallback((self.getProgressLength(), len(self.transponderlist), self.STATUS_DONE, self.currTuned))
print "finishedChecking"
self.finishedChecking()
def firstTransponder(self):
print "firstTransponder:"
index = 0
if self.checkPIDs:
print "checkPIDs-loop"
# check for tsid != -1 and onid != -1
print "index:", index
print "len(self.transponderlist):", len(self.transponderlist)
while index < len(self.transponderlist) and (self.transponderlist[index][10] == -1 or self.transponderlist[index][11] == -1):
index += 1
print "FirstTransponder final index:", index
return index
def nextTransponder(self):
print "getting next transponder", self.tuningtransponder
index = self.tuningtransponder + 1
if self.checkPIDs:
print "checkPIDs-loop"
# check for tsid != -1 and onid != -1
print "index:", index
print "len(self.transponderlist):", len(self.transponderlist)
while index < len(self.transponderlist) and (self.transponderlist[index][10] == -1 or self.transponderlist[index][11] == -1):
index += 1
print "next transponder index:", index
return index
def finishedChecking(self):
print "finished testing"
print "successfull:", self.successfullyTune
print "failed:", self.failedTune
def openFrontend(self):
res_mgr = eDVBResourceManager.getInstance()
if res_mgr:
self.raw_channel = res_mgr.allocateRawChannel(self.feid)
if self.raw_channel:
self.frontend = self.raw_channel.getFrontend()
if self.frontend:
return True
else:
print "getFrontend failed"
else:
print "getRawChannel failed"
else:
print "getResourceManager instance failed"
return False
def tune(self):
print "tuning to", self.tuningtransponder
if self.tuningtransponder < len(self.transponderlist):
self.pidStatus = self.INTERNAL_PID_STATUS_NOOP
self.oldTuned = self.currTuned
self.currTuned = self.transponderlist[self.tuningtransponder]
self.tuner.tune(self.transponderlist[self.tuningtransponder])
INTERNAL_PID_STATUS_NOOP = 0
INTERNAL_PID_STATUS_WAITING = 1
INTERNAL_PID_STATUS_SUCCESSFUL = 2
INTERNAL_PID_STATUS_FAILED = 3
def run(self, checkPIDs = False):
self.checkPIDs = checkPIDs
self.pidStatus = self.INTERNAL_PID_STATUS_NOOP
self.failedTune = []
self.successfullyTune = []
self.tuningtransponder = self.firstTransponder()
self.tune()
self.progressCallback((self.getProgressLength(), self.tuningtransponder, self.STATUS_START, self.currTuned))
self.timer.start(100, True)
# transponder = (frequency, symbolrate, polarisation, fec, inversion, orbpos, <system>, <modulation>, <rolloff>, <pilot>, <tsid>, <onid>)
# 0 1 2 3 4 5 6 7 8 9 10 11
def addTransponder(self, transponder):
self.transponderlist.append(transponder)
def clearTransponder(self):
self.transponderlist = []
def getProgressLength(self):
count = 0
if self.stopOnError == -1:
count = len(self.transponderlist)
else:
if count < self.stopOnError:
count = self.stopOnError
if self.stopOnSuccess == -1:
count = len(self.transponderlist)
else:
if count < self.stopOnSuccess:
count = self.stopOnSuccess
return count
STATUS_START = 0
STATUS_TUNING = 1
STATUS_DONE = 2
STATUS_NOOP = 3
# can be overwritten
# progress = (range, value, status, transponder)
def progressCallback(self, progress):
pass
|
popazerty/EG-2
|
lib/python/Components/TuneTest.py
|
Python
|
gpl-2.0
| 10,730
|
from line import *
from numba import jit
'''
INPUT
lines - a list of LineSegments and params is a list of the parameters
params - a list of the parameters which includes
dist_merge - the minimum distance below which line segments are merged
angle_merge - angle below which line segments are merged
'''
#@jit
def lines_to_graph(lines, params):
# define paramters
min_dist_merge = params[0]
min_angle_merge = params[1]
min_width_merge = params[2]
split_tol = params[3]
min_dist_bond = params[4]
max_dist_bond = params[5]
max_angle_bond = params[6]
node_radius = params[7]
i=0
# merge lines
while i < len(lines):
j = i + 1
while j < len(lines):
didmerge = False
line1 = lines[i]
line2 = lines[j]
order = max(line1.order,line2.order)
dist, angle, width = line1.getDifference(line2)
if dist < min_dist_merge and angle < min_angle_merge and width < min_width_merge:
merged = combineLines([line1, line2])
merged.order = order
lines[i] = merged
del lines[j]
didmerge = True
if didmerge:
i = -1
break
else:
j += 1
i += 1
# deal with intersections
i = 0
while i < len(lines) - 1:
j = i + 1
while j < len(lines):
line1 = lines[i]
line2 = lines[j]
broken = line1.breakAtIntersection(line2)
brokenlengths = []
if len(broken) != 2:
for a in broken:
brokenlengths.append(a.length)
brokenlengths[0] /= line1.length
brokenlengths[1] /= line1.length
brokenlengths[2] /= line2.length
brokenlengths[3] /= line2.length
for k in [3,2,1,0]:
if brokenlengths[k] < split_tol:
del broken[k]
lines[i] = broken[0]
lines[j] = broken[1]
for k in reversed(range(2,len(broken))):
lines.insert(j+1, broken[k])
j += len(broken) - 1
else:
j += 1
i += 1
plotLines(lines)
# deal with high order bonds
i = 0
while i < len(lines) - 1:
j = i + 1
while j < len(lines):
line1 = lines[i]
line2 = lines[j]
dist, angle, width = line1.getDifference(line2)
proj = line1.getProjOverlap(line2)
if min_dist_bond < dist and dist < max_dist_bond and angle < max_angle_bond and proj > 0.2:
# remove the shorter of the lines and increment order
if line1.length < line2.length:
lines[j].order += 1
del lines[i]
else:
lines[i].order += 1
del lines[j]
i = -1
break
else:
j += 1
i += 1
# join bonds together and return the adjacency matrix generated
lines = combinePoints(lines, node_radius)
iters = 0
i = 0
while i < len(lines) - 1:
j = i + 1
while j < len(lines):
iters += 1
if iters > 1000: #give up
return lines
line1 = lines[i]
line2 = lines[j]
a, b = line1.pts
c, d = line2.pts
L1c = line1.getShortestDistToPoint(c)
L1d = line1.getShortestDistToPoint(d)
L2a = line2.getShortestDistToPoint(a)
L2b = line2.getShortestDistToPoint(b)
if L1c > 1e-6 and L1c < node_radius:
newlines = line1.pointSplit(c)
lines[i] = newlines[0]
lines.append(newlines[1])
i = -1
break
elif L1d > 1e-6 and L1d < node_radius:
newlines = line1.pointSplit(d)
lines[i] = newlines[0]
lines.append(newlines[1])
i = -1
break
elif L2a > 1e-6 and L2a < node_radius:
newlines = line2.pointSplit(a)
lines[j] = newlines[0]
lines.append(newlines[1])
elif L2b > 1e-6 and L2b < node_radius:
newlines = line2.pointSplit(b)
lines[j] = newlines[0]
lines.append(newlines[1])
else:
j += 1
i += 1
lines = combinePoints(lines, node_radius)
for i,line1 in enumerate(lines):
for j,line2 in enumerate(lines):
if line1 == line2:
continue
elif line1.pts[0] == line2.pts[0] and line1.pts[1] == line2.pts[1]:
lines[i] = line2
elif line1.pts[1] == line2.pts[0] and line1.pts[0] == line2.pts[1]:
lines[i] = line2
lineList = []
for line in lines:
lineList.append((line,))
lineList = list(set(lineList))
lines = []
for line in lineList:
lines.append(line[0])
plotLines(lines)
return lines
|
Molecular-Image-Recognition/Molecular-Image-Recognition
|
code/lines_to_graph.py
|
Python
|
mit
| 5,383
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers
from google.api_core import gapic_v1
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.tasks_v2.types import cloudtasks
from google.cloud.tasks_v2.types import queue
from google.cloud.tasks_v2.types import queue as gct_queue
from google.cloud.tasks_v2.types import task
from google.cloud.tasks_v2.types import task as gct_task
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from .base import CloudTasksTransport, DEFAULT_CLIENT_INFO
class CloudTasksGrpcTransport(CloudTasksTransport):
"""gRPC backend transport for CloudTasks.
Cloud Tasks allows developers to manage the execution of
background work in their applications.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(
self,
*,
host: str = "cloudtasks.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "cloudtasks.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def list_queues(
self,
) -> Callable[[cloudtasks.ListQueuesRequest], cloudtasks.ListQueuesResponse]:
r"""Return a callable for the list queues method over gRPC.
Lists queues.
Queues are returned in lexicographical order.
Returns:
Callable[[~.ListQueuesRequest],
~.ListQueuesResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_queues" not in self._stubs:
self._stubs["list_queues"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/ListQueues",
request_serializer=cloudtasks.ListQueuesRequest.serialize,
response_deserializer=cloudtasks.ListQueuesResponse.deserialize,
)
return self._stubs["list_queues"]
@property
def get_queue(self) -> Callable[[cloudtasks.GetQueueRequest], queue.Queue]:
r"""Return a callable for the get queue method over gRPC.
Gets a queue.
Returns:
Callable[[~.GetQueueRequest],
~.Queue]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_queue" not in self._stubs:
self._stubs["get_queue"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/GetQueue",
request_serializer=cloudtasks.GetQueueRequest.serialize,
response_deserializer=queue.Queue.deserialize,
)
return self._stubs["get_queue"]
@property
def create_queue(
self,
) -> Callable[[cloudtasks.CreateQueueRequest], gct_queue.Queue]:
r"""Return a callable for the create queue method over gRPC.
Creates a queue.
Queues created with this method allow tasks to live for a
maximum of 31 days. After a task is 31 days old, the task will
be deleted regardless of whether it was dispatched or not.
WARNING: Using this method may have unintended side effects if
you are using an App Engine ``queue.yaml`` or ``queue.xml`` file
to manage your queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__
before using this method.
Returns:
Callable[[~.CreateQueueRequest],
~.Queue]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_queue" not in self._stubs:
self._stubs["create_queue"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/CreateQueue",
request_serializer=cloudtasks.CreateQueueRequest.serialize,
response_deserializer=gct_queue.Queue.deserialize,
)
return self._stubs["create_queue"]
@property
def update_queue(
self,
) -> Callable[[cloudtasks.UpdateQueueRequest], gct_queue.Queue]:
r"""Return a callable for the update queue method over gRPC.
Updates a queue.
This method creates the queue if it does not exist and updates
the queue if it does exist.
Queues created with this method allow tasks to live for a
maximum of 31 days. After a task is 31 days old, the task will
be deleted regardless of whether it was dispatched or not.
WARNING: Using this method may have unintended side effects if
you are using an App Engine ``queue.yaml`` or ``queue.xml`` file
to manage your queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__
before using this method.
Returns:
Callable[[~.UpdateQueueRequest],
~.Queue]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_queue" not in self._stubs:
self._stubs["update_queue"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/UpdateQueue",
request_serializer=cloudtasks.UpdateQueueRequest.serialize,
response_deserializer=gct_queue.Queue.deserialize,
)
return self._stubs["update_queue"]
@property
def delete_queue(
self,
) -> Callable[[cloudtasks.DeleteQueueRequest], empty_pb2.Empty]:
r"""Return a callable for the delete queue method over gRPC.
Deletes a queue.
This command will delete the queue even if it has tasks in it.
Note: If you delete a queue, a queue with the same name can't be
created for 7 days.
WARNING: Using this method may have unintended side effects if
you are using an App Engine ``queue.yaml`` or ``queue.xml`` file
to manage your queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__
before using this method.
Returns:
Callable[[~.DeleteQueueRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_queue" not in self._stubs:
self._stubs["delete_queue"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/DeleteQueue",
request_serializer=cloudtasks.DeleteQueueRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_queue"]
@property
def purge_queue(self) -> Callable[[cloudtasks.PurgeQueueRequest], queue.Queue]:
r"""Return a callable for the purge queue method over gRPC.
Purges a queue by deleting all of its tasks.
All tasks created before this method is called are
permanently deleted.
Purge operations can take up to one minute to take
effect. Tasks might be dispatched before the purge takes
effect. A purge is irreversible.
Returns:
Callable[[~.PurgeQueueRequest],
~.Queue]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "purge_queue" not in self._stubs:
self._stubs["purge_queue"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/PurgeQueue",
request_serializer=cloudtasks.PurgeQueueRequest.serialize,
response_deserializer=queue.Queue.deserialize,
)
return self._stubs["purge_queue"]
@property
def pause_queue(self) -> Callable[[cloudtasks.PauseQueueRequest], queue.Queue]:
r"""Return a callable for the pause queue method over gRPC.
Pauses the queue.
If a queue is paused then the system will stop dispatching tasks
until the queue is resumed via
[ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue].
Tasks can still be added when the queue is paused. A queue is
paused if its [state][google.cloud.tasks.v2.Queue.state] is
[PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED].
Returns:
Callable[[~.PauseQueueRequest],
~.Queue]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "pause_queue" not in self._stubs:
self._stubs["pause_queue"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/PauseQueue",
request_serializer=cloudtasks.PauseQueueRequest.serialize,
response_deserializer=queue.Queue.deserialize,
)
return self._stubs["pause_queue"]
@property
def resume_queue(self) -> Callable[[cloudtasks.ResumeQueueRequest], queue.Queue]:
r"""Return a callable for the resume queue method over gRPC.
Resume a queue.
This method resumes a queue after it has been
[PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or
[DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The
state of a queue is stored in the queue's
[state][google.cloud.tasks.v2.Queue.state]; after calling this
method it will be set to
[RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING].
WARNING: Resuming many high-QPS queues at the same time can lead
to target overloading. If you are resuming high-QPS queues,
follow the 500/50/5 pattern described in `Managing Cloud Tasks
Scaling
Risks <https://cloud.google.com/tasks/docs/manage-cloud-task-scaling>`__.
Returns:
Callable[[~.ResumeQueueRequest],
~.Queue]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "resume_queue" not in self._stubs:
self._stubs["resume_queue"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/ResumeQueue",
request_serializer=cloudtasks.ResumeQueueRequest.serialize,
response_deserializer=queue.Queue.deserialize,
)
return self._stubs["resume_queue"]
@property
def get_iam_policy(
self,
) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]:
r"""Return a callable for the get iam policy method over gRPC.
Gets the access control policy for a
[Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if
the resource exists and does not have a policy set.
Authorization requires the following `Google
IAM <https://cloud.google.com/iam>`__ permission on the
specified resource parent:
- ``cloudtasks.queues.getIamPolicy``
Returns:
Callable[[~.GetIamPolicyRequest],
~.Policy]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_iam_policy" not in self._stubs:
self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/GetIamPolicy",
request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["get_iam_policy"]
@property
def set_iam_policy(
self,
) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]:
r"""Return a callable for the set iam policy method over gRPC.
Sets the access control policy for a
[Queue][google.cloud.tasks.v2.Queue]. Replaces any existing
policy.
Note: The Cloud Console does not check queue-level IAM
permissions yet. Project-level permissions are required to use
the Cloud Console.
Authorization requires the following `Google
IAM <https://cloud.google.com/iam>`__ permission on the
specified resource parent:
- ``cloudtasks.queues.setIamPolicy``
Returns:
Callable[[~.SetIamPolicyRequest],
~.Policy]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "set_iam_policy" not in self._stubs:
self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/SetIamPolicy",
request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["set_iam_policy"]
@property
def test_iam_permissions(
self,
) -> Callable[
[iam_policy_pb2.TestIamPermissionsRequest],
iam_policy_pb2.TestIamPermissionsResponse,
]:
r"""Return a callable for the test iam permissions method over gRPC.
Returns permissions that a caller has on a
[Queue][google.cloud.tasks.v2.Queue]. If the resource does not
exist, this will return an empty set of permissions, not a
[NOT_FOUND][google.rpc.Code.NOT_FOUND] error.
Note: This operation is designed to be used for building
permission-aware UIs and command-line tools, not for
authorization checking. This operation may "fail open" without
warning.
Returns:
Callable[[~.TestIamPermissionsRequest],
~.TestIamPermissionsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "test_iam_permissions" not in self._stubs:
self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/TestIamPermissions",
request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString,
response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString,
)
return self._stubs["test_iam_permissions"]
@property
def list_tasks(
self,
) -> Callable[[cloudtasks.ListTasksRequest], cloudtasks.ListTasksResponse]:
r"""Return a callable for the list tasks method over gRPC.
Lists the tasks in a queue.
By default, only the
[BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved
due to performance considerations;
[response_view][google.cloud.tasks.v2.ListTasksRequest.response_view]
controls the subset of information which is returned.
The tasks may be returned in any order. The ordering may change
at any time.
Returns:
Callable[[~.ListTasksRequest],
~.ListTasksResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_tasks" not in self._stubs:
self._stubs["list_tasks"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/ListTasks",
request_serializer=cloudtasks.ListTasksRequest.serialize,
response_deserializer=cloudtasks.ListTasksResponse.deserialize,
)
return self._stubs["list_tasks"]
@property
def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], task.Task]:
r"""Return a callable for the get task method over gRPC.
Gets a task.
Returns:
Callable[[~.GetTaskRequest],
~.Task]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_task" not in self._stubs:
self._stubs["get_task"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/GetTask",
request_serializer=cloudtasks.GetTaskRequest.serialize,
response_deserializer=task.Task.deserialize,
)
return self._stubs["get_task"]
@property
def create_task(self) -> Callable[[cloudtasks.CreateTaskRequest], gct_task.Task]:
r"""Return a callable for the create task method over gRPC.
Creates a task and adds it to a queue.
Tasks cannot be updated after creation; there is no UpdateTask
command.
- The maximum task size is 100KB.
Returns:
Callable[[~.CreateTaskRequest],
~.Task]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_task" not in self._stubs:
self._stubs["create_task"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/CreateTask",
request_serializer=cloudtasks.CreateTaskRequest.serialize,
response_deserializer=gct_task.Task.deserialize,
)
return self._stubs["create_task"]
@property
def delete_task(self) -> Callable[[cloudtasks.DeleteTaskRequest], empty_pb2.Empty]:
r"""Return a callable for the delete task method over gRPC.
Deletes a task.
A task can be deleted if it is scheduled or dispatched.
A task cannot be deleted if it has executed successfully
or permanently failed.
Returns:
Callable[[~.DeleteTaskRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_task" not in self._stubs:
self._stubs["delete_task"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/DeleteTask",
request_serializer=cloudtasks.DeleteTaskRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_task"]
@property
def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]:
r"""Return a callable for the run task method over gRPC.
Forces a task to run now.
When this method is called, Cloud Tasks will dispatch the task,
even if the task is already running, the queue has reached its
[RateLimits][google.cloud.tasks.v2.RateLimits] or is
[PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED].
This command is meant to be used for manual debugging. For
example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can
be used to retry a failed task after a fix has been made or to
manually force a task to be dispatched now.
The dispatched task is returned. That is, the task that is
returned contains the [status][Task.status] after the task is
dispatched but before the task is received by its target.
If Cloud Tasks receives a successful response from the task's
target, then the task will be deleted; otherwise the task's
[schedule_time][google.cloud.tasks.v2.Task.schedule_time] will
be reset to the time that
[RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called
plus the retry delay specified in the queue's
[RetryConfig][google.cloud.tasks.v2.RetryConfig].
[RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns
[NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a
task that has already succeeded or permanently failed.
Returns:
Callable[[~.RunTaskRequest],
~.Task]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "run_task" not in self._stubs:
self._stubs["run_task"] = self.grpc_channel.unary_unary(
"/google.cloud.tasks.v2.CloudTasks/RunTask",
request_serializer=cloudtasks.RunTaskRequest.serialize,
response_deserializer=task.Task.deserialize,
)
return self._stubs["run_task"]
def close(self):
self.grpc_channel.close()
__all__ = ("CloudTasksGrpcTransport",)
|
googleapis/python-tasks
|
google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py
|
Python
|
apache-2.0
| 33,575
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# fsfs-reshard.py REPOS_PATH MAX_FILES_PER_SHARD
#
# Perform an offline conversion of an FSFS repository between linear (format
# 2, usable by Subversion 1.4+) and sharded (format 3, usable by Subversion
# 1.5+) layouts.
#
# The MAX_FILES_PER_SHARD argument specifies the maximum number of files
# that will be stored in each shard (directory), or zero to specify a linear
# layout. Subversion 1.5 uses a default value of 1000 files per shard.
#
# As the repository will not be valid while the conversion is in progress,
# the repository administrator must ensure that access to the repository is
# blocked for the duration of the conversion.
#
# In the event that the conversion is interrupted, the repository will be in
# an inconsistent state. The repository administrator should then re-run
# this tool to completion.
#
#
# Note that, currently, resharding from one sharded layout to another is
# likely to be an extremely slow process. To reshard, we convert from a
# sharded to linear layout and then to the new sharded layout. The problem
# is that the initial conversion to the linear layout triggers exactly the
# same 'large number of files in a directory' problem that sharding is
# intended to solve.
#
# ====================================================================
# Copyright (c) 2007-2008 CollabNet. All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://subversion.tigris.org/license-1.html.
# If newer versions of this license are posted there, you may use a
# newer version instead, at your option.
#
# This software consists of voluntary contributions made by many
# individuals. For exact contribution history, see the revision
# history and logs, available at http://subversion.tigris.org/.
# ====================================================================
#
# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.6.x/tools/server-side/fsfs-reshard.py $
# $LastChangedDate: 2008-12-24 01:58:11 +0000 (Wed, 24 Dec 2008) $
# $LastChangedBy: arfrever $
# $LastChangedRevision: 874985 $
import os, stat, sys
from errno import EEXIST
def usage():
"""Print a usage message and exit."""
print("""usage: %s REPOS_PATH MAX_FILES_PER_SHARD [START END]
Perform an offline conversion of an FSFS repository between linear
(readable by Subversion 1.4 or later) and sharded (readable by
Subversion 1.5 or later) layouts.
The MAX_FILES_PER_SHARD argument specifies the maximum number of
files that will be stored in each shard (directory), or zero to
specify a linear layout. Subversion 1.5 uses a default value of
1000 files per shard.
Convert revisions START through END inclusive if specified, or all
revisions if unspecified.
""" % sys.argv[0])
sys.exit(1)
def incompatible_repos_format(repos_path, format):
"""Print an error saying that REPOS_PATH is a repository with an
incompatible repository format FORMAT, then exit."""
sys.stderr.write("""error: unable to convert repository '%s'.
This repository is not compatible with this tool. Valid
repository formats are '3' or '5'; this repository is
format '%s'.
""" % (repos_path, format))
sys.stderr.flush()
sys.exit(1)
def incompatible_fs_format(repos_path, format):
"""Print an error saying that REPOS_PATH is a repository with an
incompatible filesystem format FORMAT, then exit."""
sys.stderr.write("""error: unable to convert repository '%s'.
This repository contains a filesystem that is not compatible with
this tool. Valid filesystem formats are '1', '2', or '3'; this
repository contains a filesystem with format '%s'.
""" % (repos_path, format))
sys.stderr.flush()
sys.exit(1)
def unexpected_fs_format_options(repos_path):
"""Print an error saying that REPOS_PATH is a repository with
unexpected filesystem format options, then exit."""
sys.stderr.write("""error: unable to convert repository '%s'.
This repository contains a filesystem that appears to be invalid -
there is unexpected data after the filesystem format number.
""" % repos_path)
sys.stderr.flush()
sys.exit(1)
def incompatible_fs_format_option(repos_path, option):
"""Print an error saying that REPOS_PATH is a repository with an
incompatible filesystem format option OPTION, then exit."""
sys.stderr.write("""error: unable to convert repository '%s'.
This repository contains a filesystem that is not compatible with
this tool. This tool recognises the 'layout' option but the
filesystem uses the '%s' option.
""" % (repos_path, option))
sys.stderr.flush()
sys.exit(1)
def warn_about_fs_format_1(repos_path, format_path):
"""Print a warning saying that REPOS_PATH contains a format 1 FSFS
filesystem that we can't reconstruct, then exit."""
sys.stderr.write("""warning: conversion of '%s' will be one-way.
This repository is currently readable by Subversion 1.1 or later.
This tool can convert this repository to one that is readable by
either Subversion 1.4 (or later) or Subversion 1.5 (or later),
but it is not able to convert it back to the original format - a
separate dump/load step would be required.
If you would like to upgrade this repository anyway, delete the
file '%s' and re-run this tool.
""" % (repos_path, format_path))
sys.stderr.flush()
sys.exit(1)
def check_repos_format(repos_path):
"""Check that REPOS_PATH contains a repository with a suitable format;
print a message and exit if not."""
format_path = os.path.join(repos_path, 'format')
try:
format_file = open(format_path)
format = format_file.readline()
if not format.endswith('\n'):
incompatible_repos_format(repos_path, format + ' <missing newline>')
format = format.rstrip('\n')
if format == '3' or format == '5':
pass
else:
incompatible_repos_format(repos_path, format)
except IOError:
# In all likelihood, the file doesn't exist.
incompatible_repos_format(repos_path, '<unreadable>')
def check_fs_format(repos_path):
"""Check that REPOS_PATH contains a filesystem with a suitable format,
or that it contains no format file; print a message and exit if neither
is true. Return bool whether the filesystem is sharded."""
sharded = False
db_path = os.path.join(repos_path, 'db')
format_path = os.path.join(db_path, 'format')
try:
format_file = open(format_path)
format = format_file.readline()
if not format.endswith('\n'):
incompatible_fs_format(repos_path, format + ' <missing newline>')
format = format.rstrip('\n')
if format == '1':
# This is a format 1 (svndiff0 only) filesystem. We can upgrade it,
# but we can't downgrade again (since we can't uncompress any of the
# svndiff1 deltas that may have been written). Warn the user and exit.
warn_about_fs_format_1(repos_path, format_path)
if format == '2':
pass
elif format == '3':
pass
else:
incompatible_fs_format(repos_path, format)
for line in format_file:
if format == '2':
unexpected_fs_format_options(repos_path)
line = line.rstrip('\n')
if line == 'layout linear':
pass
elif line.startswith('layout sharded '):
sharded = True
else:
incompatible_fs_format_option(repos_path, line)
format_file.close()
except IOError:
# The format file might not exist if we've previously been interrupted,
# or if the user is following our advice about upgrading a format 1
# repository. In both cases, we'll just assume the format was
# compatible.
pass
return sharded
def current_file(repos_path):
"""Return triple of (revision, next_node_id, next_copy_id) from
REPOS_PATH/db/current ."""
return open(os.path.join(repos_path, 'db', 'current')).readline().split()
def remove_fs_format(repos_path):
"""Remove the filesystem format file for repository REPOS_PATH.
Do not raise an error if the file is already missing."""
format_path = os.path.join(repos_path, 'db', 'format')
try:
statinfo = os.stat(format_path)
except OSError:
# The file probably doesn't exist.
return
# On Windows, we need to ensure the file is writable before we can
# remove it.
os.chmod(format_path, statinfo.st_mode | stat.S_IWUSR)
os.remove(format_path)
def write_fs_format(repos_path, contents):
"""Write a new filesystem format file for repository REPOS_PATH containing
CONTENTS."""
format_path = os.path.join(repos_path, 'db', 'format')
f = open(format_path, 'wb')
f.write(contents)
f.close()
os.chmod(format_path, stat.S_IRUSR | stat.S_IRGRP)
def linearise(path):
"""Move all the files in subdirectories of PATH into PATH, and remove the
subdirectories. Handle conflicts between subdirectory names and files
contained in subdirectories by ensuring subdirectories have a '.shard'
suffix prior to moving (the files are assumed not to have this suffix.
Abort if a subdirectory is found to contain another subdirectory."""
# First enumerate all subdirectories of DIR and rename where necessary
# to include a .shard suffix.
for name in os.listdir(path):
if name.endswith('.shard'):
continue
subdir_path = os.path.join(path, name)
if not os.path.isdir(subdir_path):
continue
os.rename(subdir_path, subdir_path + '.shard')
# Now move all the subdirectory contents into the parent and remove
# the subdirectories.
for root_path, dirnames, filenames in os.walk(path):
if root_path == path:
continue
if len(dirnames) > 0:
sys.stderr.write("error: directory '%s' contains other unexpected directories.\n" \
% root_path)
sys.stderr.flush()
sys.exit(1)
for name in filenames:
from_path = os.path.join(root_path, name)
to_path = os.path.join(path, name)
os.rename(from_path, to_path)
os.rmdir(root_path)
def shard(path, max_files_per_shard, start, end):
"""Move the files for revisions START to END inclusive in PATH into
subdirectories of PATH named such that subdirectory '0' contains at most
MAX_FILES_PER_SHARD files, those named [0, MAX_FILES_PER_SHARD). Abort if
PATH is found to contain any entries with non-numeric names."""
tmp = path + '.reshard'
try:
os.mkdir(tmp)
except OSError, e:
if e.errno != EEXIST:
raise
# Move all entries into shards named N.shard.
for rev in range(start, end + 1):
name = str(rev)
shard = rev // max_files_per_shard
shard_name = str(shard) + '.shard'
from_path = os.path.join(path, name)
to_path = os.path.join(tmp, shard_name, name)
try:
os.rename(from_path, to_path)
except OSError:
# The most likely explanation is that the shard directory doesn't
# exist. Let's create it and retry the rename.
os.mkdir(os.path.join(tmp, shard_name))
os.rename(from_path, to_path)
# Now rename all the shards to remove the suffix.
skipped = 0
for name in os.listdir(tmp):
if not name.endswith('.shard'):
sys.stderr.write("warning: ignoring unexpected subdirectory '%s'.\n" \
% os.path.join(tmp, name))
sys.stderr.flush()
skipped += 1
continue
from_path = os.path.join(tmp, name)
to_path = os.path.join(path, os.path.basename(from_path)[:-6])
os.rename(from_path, to_path)
skipped == 0 and os.rmdir(tmp)
def main():
if len(sys.argv) < 3:
usage()
repos_path = sys.argv[1]
max_files_per_shard = sys.argv[2]
try:
start = int(sys.argv[3])
end = int(sys.argv[4])
except IndexError:
start = 0
end = int(current_file(repos_path)[0])
# Validate the command-line arguments.
db_path = os.path.join(repos_path, 'db')
current_path = os.path.join(db_path, 'current')
if not os.path.exists(current_path):
sys.stderr.write("error: '%s' doesn't appear to be a Subversion FSFS repository.\n" \
% repos_path)
sys.stderr.flush()
sys.exit(1)
try:
max_files_per_shard = int(max_files_per_shard)
except ValueError, OverflowError:
sys.stderr.write("error: maximum files per shard ('%s') is not a valid number.\n" \
% max_files_per_shard)
sys.stderr.flush()
sys.exit(1)
if max_files_per_shard < 0:
sys.stderr.write("error: maximum files per shard ('%d') must not be negative.\n" \
% max_files_per_shard)
sys.stderr.flush()
sys.exit(1)
# Check the format of the repository.
check_repos_format(repos_path)
sharded = check_fs_format(repos_path)
# Let the user know what's going on.
if max_files_per_shard > 0:
print("Converting '%s' to a sharded structure with %d files per directory" \
% (repos_path, max_files_per_shard))
if sharded:
print('(will convert to a linear structure first)')
else:
print("Converting '%s' to a linear structure" % repos_path)
# Prevent access to the repository for the duration of the conversion.
# There's no clean way to do this, but since the format of the repository
# is indeterminate, let's remove the format file while we're converting.
print('- marking the repository as invalid')
remove_fs_format(repos_path)
# First, convert to a linear scheme (this makes recovery easier because
# it's easier to reason about the behaviour on restart).
if sharded:
print('- linearising db/revs')
linearise(os.path.join(repos_path, 'db', 'revs'))
print('- linearising db/revprops')
linearise(os.path.join(repos_path, 'db', 'revprops'))
if max_files_per_shard == 0:
# We're done. Stamp the filesystem with a format 2 db/format file.
print('- marking the repository as a valid linear repository')
write_fs_format(repos_path, '2\n')
else:
print('- sharding db/revs')
shard(os.path.join(repos_path, 'db', 'revs'), max_files_per_shard,
start, end)
print('- sharding db/revprops')
shard(os.path.join(repos_path, 'db', 'revprops'), max_files_per_shard,
start, end)
# We're done. Stamp the filesystem with a format 3 db/format file.
print('- marking the repository as a valid sharded repository')
write_fs_format(repos_path, '3\nlayout sharded %d\n' % max_files_per_shard)
print('- done.')
sys.exit(0)
if __name__ == '__main__':
raise Exception("""This script is unfinished and not ready to be used on live data.
Trust us.""")
main()
|
bdmod/extreme-subversion
|
BinarySourcce/subversion-1.6.17/tools/server-side/fsfs-reshard.py
|
Python
|
gpl-2.0
| 14,407
|
# stdlib
import csv
import io
import tarfile
from typing import Callable
from typing import Dict as TypeDict
from typing import List
from typing import Type
from typing import Union
# third party
from nacl.signing import VerifyKey
import numpy as np
import torch as th
# relative
from ...... import deserialize
from ......util import get_tracer
from .....common.group import VERIFYALL
from .....common.message import ImmediateSyftMessageWithReply
from .....common.uid import UID
from .....store.storeable_object import StorableObject
from ....domain.domain_interface import DomainInterface
from ...exceptions import AuthorizationError
from ...exceptions import DatasetNotFoundError
from ...node_table.utils import model_to_json
from ..auth import service_auth
from ..node_service import ImmediateNodeServiceWithReply
from ..success_resp_message import SuccessResponseMessage
from .dataset_manager_messages import CreateDatasetMessage
from .dataset_manager_messages import DeleteDatasetMessage
from .dataset_manager_messages import GetDatasetMessage
from .dataset_manager_messages import GetDatasetResponse
from .dataset_manager_messages import GetDatasetsMessage
from .dataset_manager_messages import GetDatasetsResponse
from .dataset_manager_messages import UpdateDatasetMessage
ENCODING = "UTF-8"
tracer = get_tracer()
def _handle_dataset_creation_grid_ui(
msg: CreateDatasetMessage, node: DomainInterface, verify_key: VerifyKey
) -> None:
file_obj = io.BytesIO(msg.dataset)
tar_obj = tarfile.open(fileobj=file_obj)
tar_obj.extractall()
dataset_id = node.datasets.register(**msg.metadata)
for item in tar_obj.getmembers():
if not item.isdir():
extracted_file = tar_obj.extractfile(item.name)
if not extracted_file:
# TODO: raise CustomError
raise ValueError("Dataset Tar corrupted")
reader = csv.reader(
extracted_file.read().decode().split("\n"),
delimiter=",",
)
dataset = []
for row in reader:
if len(row) != 0:
dataset.append(row)
dataset = np.array(dataset, dtype=np.float)
df = th.tensor(dataset, dtype=th.float32)
id_at_location = UID()
# Step 2: create message which contains object to send
storable = StorableObject(
id=id_at_location,
data=df,
tags=["#" + item.name.split("/")[-1]],
search_permissions={VERIFYALL: None},
read_permissions={node.verify_key: node.id, verify_key: None},
write_permissions={node.verify_key: node.id, verify_key: None},
)
node.store[storable.id] = storable
node.datasets.add(
name=item.name,
dataset_id=str(dataset_id),
obj_id=str(id_at_location.value),
dtype=df.__class__.__name__,
shape=str(tuple(df.shape)),
)
def _handle_dataset_creation_syft(
msg: CreateDatasetMessage, node: DomainInterface, verify_key: VerifyKey
) -> None:
with tracer.start_as_current_span("_handle_dataset_creation_syft"):
with tracer.start_as_current_span("deserialization"):
result = deserialize(msg.dataset, from_bytes=True)
dataset_id = msg.metadata.get("dataset_id")
if not dataset_id:
dataset_id = node.datasets.register(**msg.metadata)
for table_name, table in result.items():
id_at_location = UID()
storable = StorableObject(
id=id_at_location,
data=table,
tags=[f"#{table_name}"],
search_permissions={VERIFYALL: None},
read_permissions={node.verify_key: node.id, verify_key: None},
write_permissions={node.verify_key: node.id, verify_key: None},
)
with tracer.start_as_current_span("save to DB"):
node.store[storable.id] = storable
node.datasets.add(
name=table_name,
dataset_id=str(dataset_id),
obj_id=str(id_at_location.value),
dtype=str(table.__class__.__name__),
shape=str(table.shape),
)
def create_dataset_msg(
msg: CreateDatasetMessage,
node: DomainInterface,
verify_key: VerifyKey,
) -> SuccessResponseMessage:
# Check key permissions
_allowed = node.users.can_upload_data(verify_key=verify_key)
if not _allowed:
raise AuthorizationError("You're not allowed to upload data!")
if msg.platform == "syft":
_handle_dataset_creation_syft(msg, node, verify_key)
elif msg.platform == "grid-ui":
_handle_dataset_creation_grid_ui(msg, node, verify_key)
return SuccessResponseMessage(
address=msg.reply_to,
resp_msg="Dataset Created Successfully!",
)
def get_dataset_metadata_msg(
msg: GetDatasetMessage,
node: DomainInterface,
verify_key: VerifyKey,
) -> GetDatasetResponse:
ds, objs = node.datasets.get(msg.dataset_id)
if not ds:
raise DatasetNotFoundError
dataset_json = model_to_json(ds)
# these types seem broken
dataset_json["data"] = [
{"name": obj.name, "id": obj.obj, "dtype": obj.dtype, "shape": obj.shape} # type: ignore
for obj in objs
]
return GetDatasetResponse(
address=msg.reply_to,
metadata=dataset_json,
)
def get_all_datasets_metadata_msg(
msg: GetDatasetsMessage,
node: DomainInterface,
verify_key: VerifyKey,
) -> GetDatasetsResponse:
datasets = []
for dataset in node.datasets.all():
ds = model_to_json(dataset)
_, objs = node.datasets.get(dataset.id)
# these types seem broken
ds["data"] = [
{
"name": obj.name, # type: ignore
"id": obj.obj, # type: ignore
"dtype": obj.dtype, # type: ignore
"shape": obj.shape, # type: ignore
}
for obj in objs
]
datasets.append(ds)
return GetDatasetsResponse(
address=msg.reply_to,
metadatas=datasets,
)
def update_dataset_msg(
msg: UpdateDatasetMessage,
node: DomainInterface,
verify_key: VerifyKey,
) -> SuccessResponseMessage:
# Get Payload Content
_allowed = node.users.can_upload_data(verify_key=verify_key)
if _allowed:
metadata = {
key: msg.metadata[key].upcast()
for (key, value) in msg.metadata.items()
if msg.metadata[key] is not None
}
node.datasets.set(dataset_id=msg.dataset_id, metadata=metadata)
else:
raise AuthorizationError("You're not allowed to upload data!")
return SuccessResponseMessage(
address=msg.reply_to,
resp_msg="Dataset updated successfully!",
)
def delete_dataset_msg(
msg: DeleteDatasetMessage,
node: DomainInterface,
verify_key: VerifyKey,
) -> SuccessResponseMessage:
_allowed = node.users.can_upload_data(verify_key=verify_key)
if _allowed:
# If bin object id exists, then only delete the bin object in the dataset.
# Otherwise, delete the whole dataset.
if msg.bin_object_id:
key = UID(msg.bin_object_id) # type: ignore
node.store.delete(key)
else:
ds, objs = node.datasets.get(msg.dataset_id)
if not ds:
raise DatasetNotFoundError
# Delete all the bin objects related to the dataset
for obj in objs:
node.store.delete(UID(obj.obj)) # type: ignore
node.datasets.delete(id=msg.dataset_id)
else:
raise AuthorizationError("You're not allowed to delete data!")
return SuccessResponseMessage(
address=msg.reply_to,
resp_msg="Dataset deleted successfully!",
)
class DatasetManagerService(ImmediateNodeServiceWithReply):
INPUT_TYPE = Union[
Type[CreateDatasetMessage],
Type[GetDatasetMessage],
Type[GetDatasetsMessage],
Type[UpdateDatasetMessage],
Type[DeleteDatasetMessage],
]
INPUT_MESSAGES = Union[
CreateDatasetMessage,
GetDatasetMessage,
GetDatasetsMessage,
UpdateDatasetMessage,
DeleteDatasetMessage,
]
OUTPUT_MESSAGES = Union[
SuccessResponseMessage, GetDatasetResponse, GetDatasetsResponse
]
msg_handler_map: TypeDict[INPUT_TYPE, Callable[..., OUTPUT_MESSAGES]] = {
CreateDatasetMessage: create_dataset_msg,
GetDatasetMessage: get_dataset_metadata_msg,
GetDatasetsMessage: get_all_datasets_metadata_msg,
UpdateDatasetMessage: update_dataset_msg,
DeleteDatasetMessage: delete_dataset_msg,
}
@staticmethod
@service_auth(guests_welcome=True)
def process(
node: DomainInterface,
msg: INPUT_MESSAGES,
verify_key: VerifyKey,
) -> OUTPUT_MESSAGES:
return DatasetManagerService.msg_handler_map[type(msg)](
msg=msg, node=node, verify_key=verify_key
)
@staticmethod
def message_handler_types() -> List[Type[ImmediateSyftMessageWithReply]]:
return [
CreateDatasetMessage,
GetDatasetMessage,
GetDatasetsMessage,
UpdateDatasetMessage,
DeleteDatasetMessage,
]
|
OpenMined/PySyft
|
packages/syft/src/syft/core/node/common/node_service/dataset_manager/dataset_manager_service.py
|
Python
|
apache-2.0
| 9,520
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""Tests for qutebrowser.browser.webkit.http."""
import logging
import pytest
import hypothesis
from hypothesis import strategies
from PyQt5.QtCore import QUrl
from qutebrowser.browser.webkit import http
@pytest.mark.parametrize('url, expected', [
# Filename in the URL
('http://example.com/path', 'path'),
('http://example.com/foo/path', 'path'),
# No filename at all
('http://example.com', 'qutebrowser-download'),
('http://example.com/', 'qutebrowser-download'),
])
def test_no_content_disposition(stubs, url, expected):
reply = stubs.FakeNetworkReply(url=QUrl(url))
inline, filename = http.parse_content_disposition(reply)
assert inline
assert filename == expected
@pytest.mark.parametrize('template', [
'{}',
'attachment; filename="{}"',
'inline; {}',
'attachment; {}="foo"',
"attachment; filename*=iso-8859-1''{}",
'attachment; filename*={}',
])
@hypothesis.given(strategies.text(alphabet=[chr(x) for x in range(255)]))
def test_parse_content_disposition_hypothesis(caplog, template, stubs, s):
"""Test parsing headers based on templates which hypothesis completes."""
header = template.format(s)
reply = stubs.FakeNetworkReply(headers={'Content-Disposition': header})
with caplog.at_level(logging.ERROR, 'network'):
http.parse_content_disposition(reply)
@hypothesis.given(strategies.binary())
def test_content_disposition_directly_hypothesis(s):
"""Test rfc6266 parsing directly with binary data."""
try:
cd = http.ContentDisposition.parse(s)
cd.filename()
except http.ContentDispositionError:
pass
@pytest.mark.parametrize('content_type, expected_mimetype, expected_rest', [
(None, None, None),
('image/example', 'image/example', None),
('', '', None),
('image/example; encoding=UTF-8', 'image/example', ' encoding=UTF-8'),
])
def test_parse_content_type(stubs, content_type, expected_mimetype,
expected_rest):
if content_type is None:
reply = stubs.FakeNetworkReply()
else:
reply = stubs.FakeNetworkReply(headers={'Content-Type': content_type})
mimetype, rest = http.parse_content_type(reply)
assert mimetype == expected_mimetype
assert rest == expected_rest
@hypothesis.given(strategies.text())
def test_parse_content_type_hypothesis(stubs, s):
reply = stubs.FakeNetworkReply(headers={'Content-Type': s})
http.parse_content_type(reply)
|
qutebrowser/qutebrowser
|
tests/unit/browser/webkit/http/test_http.py
|
Python
|
gpl-3.0
| 3,271
|
from django.contrib.postgres.fields import JSONField
from django.db.models.aggregates import Aggregate
__all__ = [
'ArrayAgg', 'BitAnd', 'BitOr', 'BoolAnd', 'BoolOr', 'JSONBAgg', 'StringAgg',
]
class ArrayAgg(Aggregate):
function = 'ARRAY_AGG'
template = '%(function)s(%(distinct)s%(expressions)s)'
def __init__(self, expression, distinct=False, **extra):
super().__init__(expression, distinct='DISTINCT ' if distinct else '', **extra)
def convert_value(self, value, expression, connection, context):
if not value:
return []
return value
class BitAnd(Aggregate):
function = 'BIT_AND'
class BitOr(Aggregate):
function = 'BIT_OR'
class BoolAnd(Aggregate):
function = 'BOOL_AND'
class BoolOr(Aggregate):
function = 'BOOL_OR'
class JSONBAgg(Aggregate):
function = 'JSONB_AGG'
_output_field = JSONField()
def convert_value(self, value, expression, connection, context):
if not value:
return []
return value
class StringAgg(Aggregate):
function = 'STRING_AGG'
template = "%(function)s(%(distinct)s%(expressions)s, '%(delimiter)s')"
def __init__(self, expression, delimiter, distinct=False, **extra):
distinct = 'DISTINCT ' if distinct else ''
super().__init__(expression, delimiter=delimiter, distinct=distinct, **extra)
def convert_value(self, value, expression, connection, context):
if not value:
return ''
return value
|
Beauhurst/django
|
django/contrib/postgres/aggregates/general.py
|
Python
|
bsd-3-clause
| 1,509
|
from django.core.management.base import BaseCommand
from django.db.models import get_apps, get_app
from django_evolution.models import Evolution
class Command(BaseCommand):
"""Lists the applied evolutions for one or more apps."""
def handle(self, *app_labels, **options):
if not app_labels:
app_labels = [app.__name__.split('.')[-2] for app in get_apps()]
for app_label in app_labels:
evolutions = list(Evolution.objects.filter(app_label=app_label))
if evolutions:
print "Applied evolutions for '%s':" % app_label
for evolution in evolutions:
print ' %s' % evolution.label
print
|
shash/IconDB
|
djev/management/commands/list-evolutions.py
|
Python
|
agpl-3.0
| 718
|
from django.db import models
from cms.models import CMSPlugin
CLASS_CHOICES = ['container', 'content', 'teaser']
CLASS_CHOICES = tuple((entry, entry) for entry in CLASS_CHOICES)
TAG_CHOICES = [
'div', 'article', 'section', 'header', 'footer', 'aside',
'h1', 'h2', 'h3', 'h4', 'h5', 'h6'
]
TAG_CHOICES = tuple((entry, entry) for entry in TAG_CHOICES)
class Style(CMSPlugin):
"""
Renders a given ``TAG_CHOICES`` element with additional attributes
"""
label = models.CharField(
verbose_name='Label',
blank=True,
max_length=255,
help_text='Overrides the display name in the structure mode.',
)
tag_type = models.CharField(
verbose_name='Tag type',
choices=TAG_CHOICES,
default=TAG_CHOICES[0][0],
max_length=255,
)
class_name = models.CharField(
verbose_name='Class name',
choices=CLASS_CHOICES,
default=CLASS_CHOICES[0][0],
blank=True,
max_length=255,
)
additional_classes = models.CharField(
verbose_name='Additional classes',
blank=True,
max_length=255,
)
def __str__(self):
return self.label or self.tag_type or str(self.pk)
def get_short_description(self):
# display format:
# Style label <tag> .list.of.classes #id
display = []
classes = []
if self.label:
display.append(self.label)
if self.tag_type:
display.append('<{0}>'.format(self.tag_type))
if self.class_name:
classes.append(self.class_name)
if self.additional_classes:
classes.extend(item.strip() for item in self.additional_classes.split(',') if item.strip())
display.append('.{0}'.format('.'.join(classes)))
return ' '.join(display)
def get_additional_classes(self):
return ' '.join(item.strip() for item in self.additional_classes.split(',') if item.strip())
|
rsalmaso/django-cms
|
cms/test_utils/project/pluginapp/plugins/style/models.py
|
Python
|
bsd-3-clause
| 1,967
|
import os
import re
import sys
"""
* Perform initial configuration to ensure that the server is set up to work with Burton's format
sudo chown -R ubuntu:ubuntu /var/www
mkdir -p /var/www/default/public_html
mv /var/www/html/index.html /var/www/default/public_html # Ubuntu >=14.04
mv /var/www/index.html /var/www/default/public_html # Ubuntu <14.04
rm -rf /var/www/html
sudo vim /etc/apache2/sites-available/000-default.conf # Ubuntu >=14.04
sudo vim /etc/apache2/sites-available/default # Ubuntu <14.04
sudo a2enmod ssl
sudo service apache2 restart
* Enable / disable .htaccess for a site
* PHP configuration
"""
environment = ''
def main(env):
global environment
environment = env
while True:
print("\nConfigure Websites\n")
print("Please select an operation:")
print(" 1. Restart Apache")
print(" 2. Add a new website")
print(" 3. Add SSL to website")
print(" 0. Go Back")
print(" -. Exit")
operation = input(environment.prompt)
if operation == '0':
return True
elif operation == '-':
sys.exit()
elif operation == '1':
restart_apache()
elif operation == '2':
add_website()
elif operation == '3':
add_ssl()
else:
print("Invalid input.")
def restart_apache():
print("\nAttempting to restart Apache:")
# TODO: Print an error when the user does not have permissions to perform the action.
result = os.system("sudo service apache2 restart")
print(result)
return True
def add_website():
global environment
print('\nAdd website.\n')
input_file = open('./example-files/apache-site', 'r')
input_file_text = input_file.read()
input_file.close()
site_name = input('Website name (without www or http)' + environment.prompt)
new_filename = '/etc/apache2/sites-available/%s.conf' % (site_name,)
tmp_filename = '/tmp/%s.conf' % (site_name,)
# TODO: Check that site_name is legal for both a domain name and a filename.
while os.path.isfile(new_filename):
print('Site exists! Please choose another.')
site_name = input('Website name (without www or http)' + environment.prompt)
new_filename = '/etc/apache2/sites-available/%s.conf' % (site_name,)
tmp_filename = '/tmp/%s.conf' % (site_name,)
new_config = re.sub('SITE', site_name, input_file_text)
try:
output_file = open(tmp_filename, 'w')
output_file.write(new_config)
output_file.close()
tmp_move = os.system("sudo mv %s %s" % (tmp_filename, new_filename))
except PermissionError as e:
print('\n\nError!')
print('The current user does not have permission to perform this action.')
#print('Please run Burton with elevated permissions to resolve this error.\n\n')
if tmp_move != 0:
print('\n\nError!')
print('The current user does not have permission to perform this action.')
#print('Please run Burton with elevated permissions to resolve this error.\n\n')
current_user = str(os.getuid())
result = os.system('sudo mkdir -p /var/www/%s/public_html/' % (site_name,))
result = os.system('sudo mkdir -p /var/www/%s/logs/' % (site_name,))
result = os.system('sudo chown -R %s:%s /var/www/%s/' % (current_user, current_user,))
result = os.system('sudo a2ensite %s.conf' % (site_name,))
restart_apache()
return True
def add_ssl():
global environment
print("\nAdd SSL to website.\n")
print("Please enter the URL of the website.\n")
site_name = input(environment.prompt)
print("Is this a wildcard certificate? (y/N)\n")
wildcard = input(environment.prompt)
if wildcard.lower()=='y':
print("Generating wildcard cert for *.%s" % (site_name,))
wildcard = '*.'
else:
print("Generating cert for %s" % (site_name,))
wildcard = ''
# http://serverfault.com/questions/649990/non-interactive-creation-of-ssl-certificate-requests
#command_template = 'openssl req -new -newkey rsa:2048 -nodes -sha256 -keyout foobar.com.key -out foobar.com.csr -subj "/C=US/ST=New foobar/L=foobar/O=foobar foobar, Inc./CN=foobar.com/emailAddress=foobar@foobar.com"'
command_template = "openssl req -new -newkey rsa:2048 -nodes -sha256 -keyout %s.key -out %s.csr -subj \"/CN=%s%s\""
print(command_template % (site_name, site_name, wildcard, site_name))
return True
|
dotancohen/burton
|
configure_websites.py
|
Python
|
gpl-3.0
| 4,169
|
# Copyright (c) MetaCommunications, Inc. 2003-2007
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import xml.sax.saxutils
import zipfile
import ftplib
import time
import stat
import xml.dom.minidom
import xmlrpclib
import httplib
import os.path
import string
import sys
import re
import urlparse
def process_xml_file( input_file, output_file ):
utils.log( 'Processing test log "%s"' % input_file )
f = open( input_file, 'r' )
xml = f.readlines()
f.close()
for i in range( 0, len(xml)):
xml[i] = string.translate( xml[i], utils.char_translation_table )
output_file.writelines( xml )
def process_test_log_files( output_file, dir, names ):
for file in names:
if os.path.basename( file ) == 'test_log.xml':
process_xml_file( os.path.join( dir, file ), output_file )
def collect_test_logs( input_dirs, test_results_writer ):
__log__ = 1
utils.log( 'Collecting test logs ...' )
for input_dir in input_dirs:
utils.log( 'Walking directory "%s" ...' % input_dir )
os.path.walk( input_dir, process_test_log_files, test_results_writer )
dart_status_from_result = {
'succeed': 'passed',
'fail': 'failed',
'note': 'passed',
'': 'notrun'
}
dart_project = {
'trunk': 'Boost_HEAD',
'': 'Boost_HEAD'
}
dart_track = {
'full': 'Nightly',
'incremental': 'Continuous',
'': 'Experimental'
}
ascii_only_table = ""
for i in range(0,256):
if chr(i) == '\n' or chr(i) == '\r':
ascii_only_table += chr(i)
elif i < 32 or i >= 0x80:
ascii_only_table += '?'
else:
ascii_only_table += chr(i)
class xmlrpcProxyTransport(xmlrpclib.Transport):
def __init__(self, proxy):
self.proxy = proxy
def make_connection(self, host):
self.realhost = host
return httplib.HTTP(self.proxy)
def send_request(self, connection, handler, request_body):
connection.putrequest('POST','http://%s%s' % (self.realhost,handler))
def send_host(self, connection, host):
connection.putheader('Host',self.realhost)
def publish_test_logs(
input_dirs,
runner_id, tag, platform, comment_file, timestamp, user, source, run_type,
dart_server = None,
http_proxy = None,
**unused
):
__log__ = 1
utils.log( 'Publishing test logs ...' )
dart_rpc = None
dart_dom = {}
def _publish_test_log_files_ ( unused, dir, names ):
for file in names:
if os.path.basename( file ) == 'test_log.xml':
utils.log( 'Publishing test log "%s"' % os.path.join(dir,file) )
if dart_server:
log_xml = open(os.path.join(dir,file)).read().translate(ascii_only_table)
#~ utils.log( '--- XML:\n%s' % log_xml)
#~ It seems possible to get an empty XML result file :-(
if log_xml == "": continue
log_dom = xml.dom.minidom.parseString(log_xml)
test = {
'library': log_dom.documentElement.getAttribute('library'),
'test-name': log_dom.documentElement.getAttribute('test-name'),
'toolset': log_dom.documentElement.getAttribute('toolset')
}
if not test['test-name'] or test['test-name'] == '':
test['test-name'] = 'unknown'
if not test['toolset'] or test['toolset'] == '':
test['toolset'] = 'unknown'
if not dart_dom.has_key(test['toolset']):
dart_dom[test['toolset']] = xml.dom.minidom.parseString(
'''<?xml version="1.0" encoding="UTF-8"?>
<DartSubmission version="2.0" createdby="collect_and_upload_logs.py">
<Site>%(site)s</Site>
<BuildName>%(buildname)s</BuildName>
<Track>%(track)s</Track>
<DateTimeStamp>%(datetimestamp)s</DateTimeStamp>
</DartSubmission>
''' % {
'site': runner_id,
'buildname': "%s -- %s (%s)" % (platform,test['toolset'],run_type),
'track': dart_track[run_type],
'datetimestamp' : timestamp
} )
submission_dom = dart_dom[test['toolset']]
for node in log_dom.documentElement.childNodes:
if node.nodeType == xml.dom.Node.ELEMENT_NODE:
if node.firstChild:
log_data = xml.sax.saxutils.escape(node.firstChild.data)
else:
log_data = ''
test_dom = xml.dom.minidom.parseString('''<?xml version="1.0" encoding="UTF-8"?>
<Test>
<Name>.Test.Boost.%(tag)s.%(library)s.%(test-name)s.%(type)s</Name>
<Status>%(result)s</Status>
<Measurement name="Toolset" type="text/string">%(toolset)s</Measurement>
<Measurement name="Timestamp" type="text/string">%(timestamp)s</Measurement>
<Measurement name="Log" type="text/text">%(log)s</Measurement>
</Test>
''' % {
'tag': tag,
'library': test['library'],
'test-name': test['test-name'],
'toolset': test['toolset'],
'type': node.nodeName,
'result': dart_status_from_result[node.getAttribute('result')],
'timestamp': node.getAttribute('timestamp'),
'log': log_data
})
submission_dom.documentElement.appendChild(
test_dom.documentElement.cloneNode(1) )
for input_dir in input_dirs:
utils.log( 'Walking directory "%s" ...' % input_dir )
os.path.walk( input_dir, _publish_test_log_files_, None )
if dart_server:
try:
rpc_transport = None
if http_proxy:
rpc_transport = xmlrpcProxyTransport(http_proxy)
dart_rpc = xmlrpclib.ServerProxy(
'http://%s/%s/Command/' % (dart_server,dart_project[tag]),
rpc_transport )
for dom in dart_dom.values():
#~ utils.log('Dart XML: %s' % dom.toxml('utf-8'))
dart_rpc.Submit.put(xmlrpclib.Binary(dom.toxml('utf-8')))
except Exception, e:
utils.log('Dart server error: %s' % e)
def upload_to_ftp( tag, results_file, ftp_proxy, debug_level, ftp_url ):
if not ftp_url:
ftp_host = 'boost.cowic.de'
ftp_url = ''.join(['ftp','://anonymous','@',ftp_host,'/boost/do-not-publish-this-url/results/'])
utils.log( 'Uploading log archive "%s" to %s/%s' % ( results_file, ftp_url, tag ) )
ftp_parts = urlparse.urlparse(ftp_url)
ftp_netloc = re.split('[@]',ftp_parts[1])
ftp_user = re.split('[:]',ftp_netloc[0])[0]
ftp_password = re.split('[:]',ftp_netloc[0]+':anonymous')[1]
ftp_site = re.split('[:]',ftp_netloc[1])[0]
ftp_path = ftp_parts[2]
if not ftp_proxy:
ftp = ftplib.FTP( ftp_site )
ftp.set_debuglevel( debug_level )
ftp.login( ftp_user, ftp_password )
else:
utils.log( ' Connecting through FTP proxy server "%s"' % ftp_proxy )
ftp = ftplib.FTP( ftp_proxy )
ftp.set_debuglevel( debug_level )
ftp.set_pasv (0) # turn off PASV mode
ftp.login( '%s@%s' % (ftp_user,ftp_site), ftp_password )
ftp.cwd( ftp_path )
try:
ftp.cwd( tag )
except ftplib.error_perm:
for dir in tag.split( '/' ):
ftp.mkd( dir )
ftp.cwd( dir )
f = open( results_file, 'rb' )
ftp.storbinary( 'STOR %s' % os.path.basename( results_file ), f )
ftp.quit()
def copy_comments( results_xml, comment_file ):
results_xml.startElement( 'comment', {} )
if os.path.exists( comment_file ):
utils.log( 'Reading comments file "%s"...' % comment_file )
f = open( comment_file, 'r' )
try:
results_xml.characters( f.read() )
finally:
f.close()
else:
utils.log( 'Warning: comment file "%s" is not found.' % comment_file )
lines = ['']
for arg in sys.argv:
# Make sure that the ftp details are hidden
arg = re.sub( 'ftp://.*$', 'ftp://XXXXX', arg )
# Escape quotes
arg = re.sub( r'(\\|")', r'\\\1', arg )
# Quote arguments if needed
if arg.find( ' ' ) != -1:
arg = '"%s"' % arg
if len( lines[-1] ) + len( arg ) + 2 >= 80:
# align backslashes
lines[-1] += ' ' * ( 79 - len( lines[-1] ) )
# indent lines after the first
lines.append( ' ' )
lines[-1] += ( arg + ' ' )
results_xml.characters( '<hr>' )
results_xml.characters( '<dl>' )
results_xml.characters( '<dt>Command Line</dt>' )
results_xml.characters( '<dd>' )
results_xml.characters( '<pre>' )
results_xml.characters( '\\\n'.join(lines) )
results_xml.characters( '</pre>' )
results_xml.characters( '</dd>' )
results_xml.characters( '</dl>\n' )
results_xml.endElement( 'comment' )
def compress_file( file_path, archive_path ):
utils.log( 'Compressing "%s"...' % file_path )
try:
z = zipfile.ZipFile( archive_path, 'w', zipfile.ZIP_DEFLATED )
z.write( file_path, os.path.basename( file_path ) )
z.close()
utils.log( 'Done writing "%s".'% archive_path )
except Exception, msg:
utils.log( 'Warning: Compressing falied (%s)' % msg )
utils.log( ' Trying to compress using a platform-specific tool...' )
try: import zip_cmd
except ImportError:
script_dir = os.path.dirname( os.path.abspath( sys.argv[0] ) )
utils.log( 'Could not find \'zip_cmd\' module in the script directory (%s).' % script_dir )
raise Exception( 'Compressing failed!' )
else:
if os.path.exists( archive_path ):
os.unlink( archive_path )
utils.log( 'Removing stale "%s".' % archive_path )
zip_cmd.main( file_path, archive_path )
utils.log( 'Done compressing "%s".' % archive_path )
def read_timestamp( file ):
if not os.path.exists( file ):
result = time.gmtime()
utils.log( 'Warning: timestamp file "%s" does not exist'% file )
utils.log( 'Using current UTC time (%s)' % result )
return result
return time.gmtime( os.stat( file ).st_mtime )
def collect_logs(
results_dir
, runner_id
, tag
, platform
, comment_file
, timestamp_file
, user
, source
, run_type
, dart_server = None
, http_proxy = None
, revision = ''
, **unused
):
timestamp = time.strftime( '%Y-%m-%dT%H:%M:%SZ', read_timestamp( timestamp_file ) )
if dart_server:
publish_test_logs( [ results_dir ],
runner_id, tag, platform, comment_file, timestamp, user, source, run_type,
dart_server = dart_server,
http_proxy = http_proxy )
results_file = os.path.join( results_dir, '%s.xml' % runner_id )
results_writer = open( results_file, 'w' )
utils.log( 'Collecting test logs into "%s"...' % results_file )
results_xml = xml.sax.saxutils.XMLGenerator( results_writer )
results_xml.startDocument()
results_xml.startElement(
'test-run'
, {
'tag': tag
, 'platform': platform
, 'runner': runner_id
, 'timestamp': timestamp
, 'source': source
, 'run-type': run_type
, 'revision': revision
}
)
copy_comments( results_xml, comment_file )
collect_test_logs( [ results_dir ], results_writer )
results_xml.endElement( "test-run" )
results_xml.endDocument()
results_writer.close()
utils.log( 'Done writing "%s".' % results_file )
compress_file(
results_file
, os.path.join( results_dir,'%s.zip' % runner_id )
)
def upload_logs(
results_dir
, runner_id
, tag
, user
, ftp_proxy
, debug_level
, send_bjam_log = False
, timestamp_file = None
, dart_server = None
, ftp_url = None
, **unused
):
logs_archive = os.path.join( results_dir, '%s.zip' % runner_id )
upload_to_ftp( tag, logs_archive, ftp_proxy, debug_level, ftp_url )
if send_bjam_log:
bjam_log_path = os.path.join( results_dir, 'bjam.log' )
if not timestamp_file:
timestamp_file = bjam_log_path
timestamp = time.strftime( '%Y-%m-%d-%H-%M-%S', read_timestamp( timestamp_file ) )
logs_archive = os.path.join( results_dir, '%s.%s.log.zip' % ( runner_id, timestamp ) )
compress_file( bjam_log_path, logs_archive )
upload_to_ftp( '%s/logs' % tag, logs_archive, ftp_proxy, debug_level, ftp_url )
def collect_and_upload_logs(
results_dir
, runner_id
, tag
, platform
, comment_file
, timestamp_file
, user
, source
, run_type
, revision = None
, ftp_proxy = None
, debug_level = 0
, send_bjam_log = False
, dart_server = None
, http_proxy = None
, ftp_url = None
, **unused
):
collect_logs(
results_dir
, runner_id
, tag
, platform
, comment_file
, timestamp_file
, user
, source
, run_type
, revision = revision
, dart_server = dart_server
, http_proxy = http_proxy
)
upload_logs(
results_dir
, runner_id
, tag
, user
, ftp_proxy
, debug_level
, send_bjam_log
, timestamp_file
, dart_server = dart_server
, ftp_url = ftp_url
)
def accept_args( args ):
args_spec = [
'locate-root='
, 'runner='
, 'tag='
, 'platform='
, 'comment='
, 'timestamp='
, 'source='
, 'run-type='
, 'user='
, 'ftp-proxy='
, 'proxy='
, 'debug-level='
, 'send-bjam-log'
, 'help'
, 'dart-server='
, 'revision='
, 'ftp='
]
options = {
'--tag' : 'trunk'
, '--platform' : sys.platform
, '--comment' : 'comment.html'
, '--timestamp' : 'timestamp'
, '--user' : None
, '--source' : 'SVN'
, '--run-type' : 'full'
, '--ftp-proxy' : None
, '--proxy' : None
, '--debug-level' : 0
, '--dart-server' : 'beta.boost.org:8081'
, '--revision' : None
, '--ftp' : None
}
utils.accept_args( args_spec, args, options, usage )
return {
'results_dir' : options[ '--locate-root' ]
, 'runner_id' : options[ '--runner' ]
, 'tag' : options[ '--tag' ]
, 'platform' : options[ '--platform']
, 'comment_file' : options[ '--comment' ]
, 'timestamp_file' : options[ '--timestamp' ]
, 'user' : options[ '--user' ]
, 'source' : options[ '--source' ]
, 'run_type' : options[ '--run-type' ]
, 'ftp_proxy' : options[ '--ftp-proxy' ]
, 'http_proxy' : options[ '--proxy' ]
, 'debug_level' : int(options[ '--debug-level' ])
, 'send_bjam_log' : options.has_key( '--send-bjam-log' )
, 'dart_server' : options[ '--dart-server' ]
, 'revision' : options[ '--revision' ]
, 'ftp' : options[ '--ftp' ]
}
commands = {
'collect-and-upload' : collect_and_upload_logs
, 'collect-logs' : collect_logs
, 'upload-logs' : upload_logs
}
def usage():
print 'Usage: %s [command] [options]' % os.path.basename( sys.argv[0] )
print '''
Commands:
\t%s
Options:
\t--locate-root directory to to scan for "test_log.xml" files
\t--runner runner ID (e.g. "Metacomm")
\t--timestamp path to a file which modification time will be used
\t as a timestamp of the run ("timestamp" by default)
\t--comment an HTML comment file to be inserted in the reports
\t ("comment.html" by default)
\t--tag the tag for the results ("trunk" by default)
\t--user SourceForge user name for a shell account (optional)
\t--source where Boost sources came from ("SVN" or "tarball";
\t "SVN" by default)
\t--run-type "incremental" or "full" ("full" by default)
\t--send-bjam-log in addition to regular XML results, send in full bjam
\t log of the regression run
\t--proxy HTTP proxy server address and port (e.g.
\t 'http://www.someproxy.com:3128', optional)
\t--ftp-proxy FTP proxy server (e.g. 'ftpproxy', optional)
\t--debug-level debugging level; controls the amount of debugging
\t output printed; 0 by default (no debug output)
\t--dart-server The dart server to send results to.
\t--ftp The ftp URL to upload results to.
''' % '\n\t'.join( commands.keys() )
def main():
if len(sys.argv) > 1 and sys.argv[1] in commands:
command = sys.argv[1]
args = sys.argv[ 2: ]
else:
command = 'collect-and-upload'
args = sys.argv[ 1: ]
commands[ command ]( **accept_args( args ) )
if __name__ != '__main__': import utils
else:
# in absense of relative import...
xsl_path = os.path.abspath( os.path.dirname( sys.argv[ 0 ] ) )
while os.path.basename( xsl_path ) != 'xsl_reports': xsl_path = os.path.dirname( xsl_path )
sys.path.append( xsl_path )
import utils
main()
|
NixaSoftware/CVis
|
venv/bin/tools/regression/src/collect_and_upload_logs.py
|
Python
|
apache-2.0
| 18,518
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import os
from asdf.extension import AsdfExtension, BuiltinExtension
from asdf.util import filepath_to_url
# Make sure that all tag implementations are imported by the time we create
# the extension class so that _astropy_asdf_types is populated correctly. We
# could do this using __init__ files, except it causes pytest import errors in
# the case that asdf is not installed.
from .tags.coordinates.angle import * # noqa
from .tags.coordinates.frames import * # noqa
from .tags.coordinates.earthlocation import * # noqa
from .tags.coordinates.skycoord import * # noqa
from .tags.coordinates.representation import * # noqa
from .tags.coordinates.spectralcoord import * # noqa
from .tags.fits.fits import * # noqa
from .tags.table.table import * # noqa
from .tags.time.time import * # noqa
from .tags.time.timedelta import * # noqa
from .tags.transform.basic import * # noqa
from .tags.transform.compound import * # noqa
from .tags.transform.functional_models import * # noqa
from .tags.transform.physical_models import * # noqa
from .tags.transform.math import * # noqa
from .tags.transform.polynomial import * # noqa
from .tags.transform.powerlaws import * # noqa
from .tags.transform.projections import * # noqa
from .tags.transform.spline import * # noqa
from .tags.transform.tabular import * # noqa
from .tags.unit.quantity import * # noqa
from .tags.unit.unit import * # noqa
from .tags.unit.equivalency import * # noqa
from .types import _astropy_types, _astropy_asdf_types
__all__ = ['AstropyExtension', 'AstropyAsdfExtension']
ASTROPY_SCHEMA_URI_BASE = 'http://astropy.org/schemas/'
SCHEMA_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'data', 'schemas'))
ASTROPY_URL_MAPPING = [
(ASTROPY_SCHEMA_URI_BASE,
filepath_to_url(
os.path.join(SCHEMA_PATH, 'astropy.org')) +
'/{url_suffix}.yaml')]
# This extension is used to register custom types that have both tags and
# schemas defined by Astropy.
class AstropyExtension(AsdfExtension):
@property
def types(self):
return _astropy_types
@property
def tag_mapping(self):
return [('tag:astropy.org:astropy',
ASTROPY_SCHEMA_URI_BASE + 'astropy{tag_suffix}')]
@property
def url_mapping(self):
return ASTROPY_URL_MAPPING
# This extension is used to register custom tag types that have schemas defined
# by ASDF, but have tag implementations defined in astropy.
class AstropyAsdfExtension(BuiltinExtension):
@property
def types(self):
return _astropy_asdf_types
|
astropy/astropy
|
astropy/io/misc/asdf/extension.py
|
Python
|
bsd-3-clause
| 2,664
|
import os
import numpy as np
import nibabel as nb
from scipy.fftpack import fft, ifft
def ideal_bandpass(data, sample_period, bandpass_freqs):
# Derived from YAN Chao-Gan 120504 based on REST.
sample_freq = 1. / sample_period
sample_length = data.shape[0]
data_p = np.zeros(int(2**np.ceil(np.log2(sample_length))))
data_p[:sample_length] = data
LowCutoff, HighCutoff = bandpass_freqs
if (LowCutoff is None): # No lower cutoff (low-pass filter)
low_cutoff_i = 0
elif (LowCutoff > sample_freq / 2.):
# Cutoff beyond fs/2 (all-stop filter)
low_cutoff_i = int(data_p.shape[0] / 2)
else:
low_cutoff_i = np.ceil(
LowCutoff * data_p.shape[0] * sample_period).astype('int')
if (HighCutoff > sample_freq / 2. or HighCutoff is None):
# Cutoff beyond fs/2 or unspecified (become a highpass filter)
high_cutoff_i = int(data_p.shape[0] / 2)
else:
high_cutoff_i = np.fix(
HighCutoff * data_p.shape[0] * sample_period).astype('int')
freq_mask = np.zeros_like(data_p, dtype='bool')
freq_mask[low_cutoff_i:high_cutoff_i + 1] = True
freq_mask[
data_p.shape[0] -
high_cutoff_i:data_p.shape[0] + 1 - low_cutoff_i
] = True
f_data = fft(data_p)
f_data[freq_mask != True] = 0.
data_bp = np.real_if_close(ifft(f_data)[:sample_length])
return data_bp
def bandpass_voxels(realigned_file, regressor_file, bandpass_freqs,
sample_period=None):
"""Performs ideal bandpass filtering on each voxel time-series.
Parameters
----------
realigned_file : string
Path of a realigned nifti file.
bandpass_freqs : tuple
Tuple containing the bandpass frequencies. (LowCutoff_HighPass HighCutoff_LowPass)
sample_period : float, optional
Length of sampling period in seconds. If not specified,
this value is read from the nifti file provided.
Returns
-------
bandpassed_file : string
Path of filtered output (nifti file).
"""
nii = nb.load(realigned_file)
data = nii.get_data().astype('float64')
mask = (data != 0).sum(-1) != 0
Y = data[mask].T
Yc = Y - np.tile(Y.mean(0), (Y.shape[0], 1))
if not sample_period:
hdr = nii.get_header()
sample_period = float(hdr.get_zooms()[3])
# Sketchy check to convert TRs in millisecond units
if sample_period > 20.0:
sample_period /= 1000.0
Y_bp = np.zeros_like(Y)
for j in range(Y.shape[1]):
Y_bp[:, j] = ideal_bandpass(Yc[:, j], sample_period, bandpass_freqs)
data[mask] = Y_bp.T
img = nb.Nifti1Image(data, header=nii.get_header(),
affine=nii.get_affine())
bandpassed_file = os.path.join(os.getcwd(),
'bandpassed_demeaned_filtered.nii.gz')
img.to_filename(bandpassed_file)
regressor_bandpassed_file = None
if regressor_file is not None:
if regressor_file.endswith('.nii.gz') or regressor_file.endswith('.nii'):
nii = nb.load(regressor_file)
data = nii.get_data().astype('float64')
mask = (data != 0).sum(-1) != 0
Y = data[mask].T
Yc = Y - np.tile(Y.mean(0), (Y.shape[0], 1))
Y_bp = np.zeros_like(Y)
for j in range(Y.shape[1]):
Y_bp[:, j] = ideal_bandpass(Yc[:, j], sample_period, bandpass_freqs)
data[mask] = Y_bp.T
img = nb.Nifti1Image(data, header=nii.get_header(),
affine=nii.get_affine())
regressor_bandpassed_file = os.path.join(os.getcwd(),
'regressor_bandpassed_demeaned_filtered.nii.gz')
img.to_filename(regressor_bandpassed_file)
else:
with open(regressor_file, 'r') as f:
header = [f.readline() for x in range(0,3)]
regressor = np.loadtxt(regressor_file)
Yc = regressor - np.tile(regressor.mean(0), (regressor.shape[0], 1))
Y_bp = np.zeros_like(Yc)
for j in range(regressor.shape[1]):
Y_bp[:, j] = ideal_bandpass(Yc[:, j], sample_period,
bandpass_freqs)
regressor_bandpassed_file = os.path.join(os.getcwd(),
'regressor_bandpassed_demeaned_filtered.1D')
with open(regressor_bandpassed_file, "w") as ofd:
# write out the header information
for line in header:
ofd.write(line)
nuisance_regressors = np.array(Y_bp)
np.savetxt(ofd, nuisance_regressors, fmt='%.18f',
delimiter='\t')
return bandpassed_file, regressor_bandpassed_file
def afni_1dBandpass(in_file, highpass, lowpass, tr=1):
'''
Perform AFNI 1dBandpass
Parameters
----------
in_file : string
Path of an input 1D file
highpass : float
LowCutoff/HighPass
lowpass : float
HighCutoff/LowPass
Returns
-------
out_file : string
Path of an output 1D file
'''
import os
basename = os.path.basename(in_file)
filename, file_extension = os.path.splitext(basename)
out_file = os.path.join(os.getcwd(), filename + '_bp' + file_extension)
cmd = '1dBandpass -dt %f %f %f %s > %s' % (
tr, highpass, lowpass, in_file, out_file)
os.system(cmd)
return out_file
|
FCP-INDI/C-PAC
|
CPAC/nuisance/bandpass.py
|
Python
|
bsd-3-clause
| 5,588
|
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2015 Star2Billing S.L.
#
# The primary maintainer of this project is
# Arezqui Belaid <info@star2billing.com>
#
from django import forms
from django.forms import ModelForm, Textarea
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext
from dialer_campaign.models import Campaign
from dialer_contact.forms import SearchForm
from survey.models import Survey_template, Section_template, \
Branching_template, Survey
from survey.constants import SECTION_TYPE
from audiofield.models import AudioFile
from mod_utils.forms import SaveUserModelForm, common_submit_buttons
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, Field, Fieldset, HTML
from crispy_forms.bootstrap import AppendedText
def get_audiofile_list(user):
"""Get audio file list for logged in user
with default none option"""
list_af = []
list_af.append(('', '---'))
af_list = AudioFile.objects.values_list('id', 'name').filter(user=user).order_by('-id')
for i in af_list:
list_af.append((i[0], i[1]))
return list_af
def get_section_question_list(survey_id, section_id):
"""Get survey question list for logged in user
with default none option"""
section_branch_list = Branching_template\
.objects.values_list('section_id', flat=True)\
.filter(section_id=section_id)
list_sq = []
list_sq.append(('', _('hangup')))
section_list = Section_template.objects.filter(survey_id=survey_id)\
.exclude(pk=section_id).exclude(id__in=section_branch_list)
for i in section_list:
if i.question:
q_string = i.question
else:
q_string = i.script
list_sq.append((i.id, "Goto: %s" % (q_string)))
return list_sq
def get_multi_question_choice_list(section_id):
"""
Get survey question list for the user with a default none option
"""
keys_list = Branching_template.objects.values_list('keys', flat=True)\
.filter(section_id=int(section_id)).exclude(keys='')
list_sq = []
obj_section = Section_template.objects.get(id=int(section_id))
if keys_list:
keys_list = [integral for integral in keys_list]
for i in range(0, 10):
if (obj_section.__dict__['key_' + str(i)]
and i not in keys_list):
list_sq.append((i, '%s' % (obj_section.__dict__['key_' + str(i)])))
list_sq.append(('any', _('Any other key')))
list_sq.append(('invalid', _('Invalid')))
return list_sq
def get_rating_choice_list(section_id):
"""
Get survey rating laps for logged in user
with default any other key option
"""
keys_list = Branching_template.objects.values_list('keys', flat=True)\
.filter(section_id=int(section_id)).exclude(keys='')
obj_section = Section_template.objects.get(id=int(section_id))
if keys_list:
keys_list = [integral for integral in keys_list]
list_sq = []
if obj_section.rating_laps:
for i in range(1, int(obj_section.rating_laps) + 1):
if i not in keys_list:
list_sq.append((i, '%s' % (str(i))))
list_sq.append(('any', _('Any other key')))
list_sq.append(('invalid', _('Invalid')))
return list_sq
class SurveyForm(SaveUserModelForm):
"""Survey ModelForm"""
class Meta:
model = Survey_template
exclude = ('user',)
def __init__(self, *args, **kwargs):
super(SurveyForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
css_class = 'col-md-4'
self.helper.layout = Layout(
Div(
Div('name', css_class=css_class),
Div('tts_language', css_class=css_class),
Div('description', css_class=css_class),
css_class='row'
),
)
self.fields['description'].widget = forms.TextInput()
html_code_of_completed_field = """
<label for="{{ form.completed.auto_id }}">%s</label><br/>
<div class="make-switch switch-small">
{{ form.completed }}
</div>
""" % (ugettext('Completed'))
append_html_code_to_audio_field = """<a href="#" id="helpover" rel="popover" data-placement="top" data-content="If an audio file is not selected, the script will be played using Text-To-Speech" data-original-title="information"><i class="fa-info"></i></a>"""
class PlayMessageSectionForm(ModelForm):
"""PlayMessageForm ModelForm"""
class Meta:
model = Section_template
# fields = ['type', 'survey', 'question', 'audiofile', 'completed'] # 'retries',
def __init__(self, user, *args, **kwargs):
super(PlayMessageSectionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.layout = Layout(
Field('survey', 'script'),
Div(Div('type', css_class='col-md-10 col-xs-12'), css_class='row'),
Div(
Div('question', css_class='col-md-8 col-xs-12'),
Div('audiofile', css_class='col-md-4 col-xs-12'),
css_class='row'
),
Div(
Div(HTML(html_code_of_completed_field), css_class='col-md-6 col-xs-10'),
css_class='row'
),
)
if self.instance.audiofile:
self.helper.layout[2][1] = AppendedText('audiofile', append_html_code_to_audio_field)
self.fields['survey'].widget = forms.HiddenInput()
self.fields['script'].widget = forms.HiddenInput()
self.fields['question'].label = _('Section title')
self.fields['type'].widget.attrs['onchange'] = 'this.form.submit();'
if user:
self.fields['audiofile'].choices = get_audiofile_list(user)
class MultipleChoiceSectionForm(ModelForm):
"""MultipleChoiceSectionForm ModelForm"""
class Meta:
model = Section_template
# fields = ['type', 'survey', 'question', 'retries', 'timeout', 'audiofile', 'invalid_audiofile',
# 'key_0', 'key_1', 'key_2', 'key_3', 'key_4', 'key_5', 'key_6', 'key_7', 'key_8', 'key_9',
# 'completed']
def __init__(self, user, *args, **kwargs):
super(MultipleChoiceSectionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.layout = Layout(
Field('survey', 'script'),
Div(Div('type', css_class='col-md-10 col-xs-12'), css_class='row'),
Div(
Div('question', css_class='col-md-8 col-xs-12'),
Div('audiofile', css_class='col-md-4 col-xs-12'),
css_class='row'
),
Div(
Div('retries', css_class='col-md-6 col-xs-10'),
Div('timeout', css_class='col-md-6 col-xs-10'),
css_class='row'
),
Div(
Div(
HTML("""%s""" % _('configure valid multi-choice options. The value of each field will be shown in the survey report')),
css_class='col-md-12 col-xs-12'
),
css_class='row'
),
Div(
Div('key_0', css_class='col-xs-2'),
Div('key_1', css_class='col-xs-2'),
Div('key_2', css_class='col-xs-2'),
Div('key_3', css_class='col-xs-2'),
Div('key_4', css_class='col-xs-2'),
Div('key_5', css_class='col-xs-2'),
Div('key_6', css_class='col-xs-2'),
Div('key_7', css_class='col-xs-2'),
Div('key_8', css_class='col-xs-2'),
Div('key_9', css_class='col-xs-2'),
css_class='row'
),
Div(Div('invalid_audiofile', css_class='col-md-6 col-xs-8'), css_class='row'),
Div(
Div(HTML(html_code_of_completed_field), css_class='col-md-6 col-xs-10'),
css_class='row'
),
)
if self.instance.audiofile:
self.helper.layout[2][1] = AppendedText('audiofile', append_html_code_to_audio_field)
if user:
self.fields['audiofile'].choices = get_audiofile_list(user)
self.fields['invalid_audiofile'].choices = self.fields['audiofile'].choices
self.fields['survey'].widget = forms.HiddenInput()
self.fields['script'].widget = forms.HiddenInput()
self.fields['type'].widget.attrs['onchange'] = 'this.form.submit();'
class RatingSectionForm(ModelForm):
"""RatingSectionForm ModelForm"""
class Meta:
model = Section_template
# fields = ['type', 'survey', 'question', 'rating_laps',
# 'retries', 'timeout', 'audiofile', 'invalid_audiofile', 'completed']
def __init__(self, user, *args, **kwargs):
super(RatingSectionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.layout = Layout(
Field('survey', 'script'),
Div(Div('type', css_class='col-md-10 col-xs-12'), css_class='row'),
Div(
Div('question', css_class='col-md-8 col-xs-12'),
Div('audiofile', css_class='col-md-4 col-xs-12'),
css_class='row'
),
Div(
Div('retries', css_class='col-md-4 col-xs-4'),
Div('timeout', css_class='col-md-4 col-xs-4'),
Div('rating_laps', css_class='col-md-4 col-xs-4'),
css_class='row'
),
Div(Div('invalid_audiofile', css_class='col-md-6 col-xs-8'), css_class='row'),
Div(
Div(HTML(html_code_of_completed_field), css_class='col-md-6 col-xs-10'),
css_class='row'
),
)
if self.instance.audiofile:
self.helper.layout[2][1] = AppendedText('audiofile', append_html_code_to_audio_field)
if user:
self.fields['audiofile'].choices = get_audiofile_list(user)
self.fields['invalid_audiofile'].choices = self.fields['audiofile'].choices
self.fields['survey'].widget = forms.HiddenInput()
self.fields['script'].widget = forms.HiddenInput()
self.fields['type'].widget.attrs['onchange'] = 'this.form.submit();'
self.fields['rating_laps'].widget.attrs['maxlength'] = 3
class CaptureDigitsSectionForm(ModelForm):
"""CaptureDigitsSectionForm ModelForm"""
class Meta:
model = Section_template
# fields = ['type', 'survey', 'question', 'validate_number', 'number_digits', 'min_number', 'max_number',
# 'retries', 'timeout', 'audiofile', 'invalid_audiofile', 'completed']
def __init__(self, user, *args, **kwargs):
super(CaptureDigitsSectionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.layout = Layout(
Field('survey', 'script'),
Div(Div('type', css_class='col-md-10 col-xs-12'), css_class='row'),
Div(
Div('question', css_class='col-md-8 col-xs-12'),
Div('audiofile', css_class='col-md-4 col-xs-12'),
css_class='row'
),
Div(
Div('retries', css_class='col-md-6 col-xs-10'),
Div('timeout', css_class='col-md-6 col-xs-10'),
css_class='row'
),
Div(
Div(HTML("""
<div class="btn-group" data-toggle="buttons">
<label for="{{ form.validate_number.auto_id }}">{{ form.validate_number.label }}</label><br/>
<div class="make-switch switch-small">
{{ form.validate_number }}
</div>
</div>
"""), css_class='col-md-4 col-xs-12'),
css_class='row'
),
Div(
Div('number_digits', css_class='col-md-6 col-xs-10'),
Div('min_number', css_class='col-md-3 col-xs-10'),
Div('max_number', css_class='col-md-3 col-xs-10'),
css_class='row'
),
Div(Div('invalid_audiofile', css_class='col-md-6 col-xs-8'), css_class='row'),
Div(
Div(HTML(html_code_of_completed_field), css_class='col-md-6 col-xs-10'),
css_class='row'
),
)
if self.instance.audiofile:
self.helper.layout[2][1] = AppendedText('audiofile', append_html_code_to_audio_field)
if user:
self.fields['audiofile'].choices = get_audiofile_list(user)
self.fields['survey'].widget = forms.HiddenInput()
self.fields['script'].widget = forms.HiddenInput()
self.fields['type'].widget.attrs['onchange'] = 'this.form.submit();'
class RecordMessageSectionForm(ModelForm):
"""RecordMessageSectionForm ModelForm"""
class Meta:
model = Section_template
# fields = ['type', 'survey', 'question', 'audiofile', 'completed']
def __init__(self, user, *args, **kwargs):
super(RecordMessageSectionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.layout = Layout(
Field('survey', 'script'),
Div(Div('type', css_class='col-md-10 col-xs-12'), css_class='row'),
Div(
Div('question', css_class='col-md-8 col-xs-12'),
Div('audiofile', css_class='col-md-4 col-xs-12'),
css_class='row'
),
Div(
Div(HTML(html_code_of_completed_field), css_class='col-md-6 col-xs-10'),
css_class='row'
),
)
if self.instance.audiofile:
self.helper.layout[2][1] = AppendedText('audiofile', append_html_code_to_audio_field)
if user:
self.fields['audiofile'].choices = get_audiofile_list(user)
self.fields['survey'].widget = forms.HiddenInput()
self.fields['script'].widget = forms.HiddenInput()
self.fields['question'].label = _('Section title')
self.fields['type'].widget.attrs['onchange'] = 'this.form.submit();'
class ConferenceSectionForm(ModelForm):
"""ConferenceSectionForm ModelForm"""
class Meta:
model = Section_template
# fields = ['type', 'survey', 'question', 'audiofile', 'conference', 'completed']
def __init__(self, user, *args, **kwargs):
super(ConferenceSectionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.layout = Layout(
Field('survey', 'script'),
Div(Div('type', css_class='col-md-10 col-xs-12'), css_class='row'),
Div(
Div('question', css_class='col-md-8 col-xs-12'),
Div('audiofile', css_class='col-md-4 col-xs-12'),
Div('conference', css_class='col-md-6 col-xs-10'),
css_class='row'
),
Div(
Div(HTML(html_code_of_completed_field), css_class='col-md-6 col-xs-10'),
css_class='row'
),
)
if self.instance.audiofile:
self.helper.layout[2][1] = AppendedText('audiofile', append_html_code_to_audio_field)
if user:
self.fields['audiofile'].choices = get_audiofile_list(user)
self.fields['survey'].widget = forms.HiddenInput()
self.fields['script'].widget = forms.HiddenInput()
self.fields['type'].widget.attrs['onchange'] = 'this.form.submit();'
self.fields['question'].label = _('Section title')
class CallTransferSectionForm(ModelForm):
"""CallTransferSectionForm ModelForm"""
class Meta:
model = Section_template
# fields = ['type', 'survey', 'question', 'audiofile', 'phonenumber', 'completed']
def __init__(self, user, *args, **kwargs):
super(CallTransferSectionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.layout = Layout(
Field('survey', 'script'),
Div(Div('type', css_class='col-md-10 col-xs-12'), css_class='row'),
Div(
Div('question', css_class='col-md-8 col-xs-12'),
Div('audiofile', css_class='col-md-4 col-xs-12'),
Div('phonenumber', css_class='col-md-6 col-xs-10'),
Div('confirm_script', css_class='col-md-12 col-xs-10'),
Div('confirm_key', css_class='col-md-6 col-xs-10'),
css_class='row'
),
Div(
Div(HTML(html_code_of_completed_field), css_class='col-md-6 col-xs-10'),
css_class='row'
),
)
if self.instance.audiofile:
self.helper.layout[2][1] = AppendedText('audiofile', append_html_code_to_audio_field)
if user:
self.fields['audiofile'].choices = get_audiofile_list(user)
self.fields['survey'].widget = forms.HiddenInput()
self.fields['script'].widget = forms.HiddenInput()
self.fields['type'].widget.attrs['onchange'] = 'this.form.submit();'
self.fields['question'].label = _('Section title')
class SMSSectionForm(ModelForm):
"""SMSSectionForm ModelForm"""
class Meta:
model = Section_template
# fields = ['type', 'survey', 'question', 'retries', 'audiofile', 'completed', 'sms_text']
widgets = {
'sms_text': Textarea(attrs={'cols': 23, 'rows': 2}),
}
def __init__(self, user, *args, **kwargs):
super(SMSSectionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.layout = Layout(
Field('survey', 'script'),
Div(Div('type', css_class='col-md-10 col-xs-12'), css_class='row'),
Div(
Div('question', css_class='col-md-8 col-xs-12'),
Div('audiofile', css_class='col-md-4 col-xs-12'),
css_class='row'
),
Div(Div('retries', css_class='col-md-4 col-xs-4'), css_class='row'),
Div(Div('sms_text', css_class='col-md-10 col-xs-10'), css_class='row'),
Div(
Div(HTML(html_code_of_completed_field), css_class='col-md-6 col-xs-10'),
css_class='row'
),
)
if self.instance.audiofile:
self.helper.layout[2][1] = AppendedText('audiofile', append_html_code_to_audio_field)
self.fields['survey'].widget = forms.HiddenInput()
self.fields['script'].widget = forms.HiddenInput()
self.fields['type'].widget.attrs['onchange'] = 'this.form.submit();'
self.fields['question'].label = _('Section title')
if user:
self.fields['audiofile'].choices = get_audiofile_list(user)
class ScriptForm(ModelForm):
"""ScriptForm ModelForm"""
class Meta:
model = Section_template
fields = ['script']
def __init__(self, *args, **kwargs):
super(ScriptForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
css_class = 'col-md-12'
self.helper.layout = Layout(
Div(
Div('script', css_class=css_class),
css_class='row'
)
)
class BranchingForm(ModelForm):
"""BranchingForm ModelForm"""
class Meta:
model = Branching_template
# fields = ['keys', 'section', 'goto']
def __init__(self, survey_id, section_id, *args, **kwargs):
super(BranchingForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
css_class = 'col-xs-6'
self.helper.layout = Layout(
Field('section'),
)
# instance = getattr(self, 'instance', None)
self.fields['section'].widget = forms.HiddenInput()
# multiple choice section
obj_section = Section_template.objects.get(id=section_id)
if obj_section.type == SECTION_TYPE.MULTI_CHOICE:
self.fields['keys'] = forms.ChoiceField(
choices=get_multi_question_choice_list(section_id),
required=False)
# rating section
if obj_section.type == SECTION_TYPE.RATING_SECTION:
self.fields['keys'] = forms.ChoiceField(
choices=get_rating_choice_list(section_id),
required=False)
if (obj_section.type == SECTION_TYPE.PLAY_MESSAGE
or obj_section.type == SECTION_TYPE.RECORD_MSG
or obj_section.type == SECTION_TYPE.CALL_TRANSFER
or obj_section.type == SECTION_TYPE.CONFERENCE
or obj_section.type == SECTION_TYPE.SMS):
self.fields['keys'].initial = 0
self.fields['keys'].widget = forms.HiddenInput()
self.helper.layout.append(
Div(
Field('keys'),
Div('goto', css_class=css_class),
css_class='row'
)
)
else:
if obj_section.type != SECTION_TYPE.HANGUP_SECTION and obj_section.type != SECTION_TYPE.DNC:
self.helper.layout.append(
Div(
Div(HTML(
"""
<div class="btn-group" data-toggle="buttons">
<label class="btn btn-default">
<input type="radio" name="keys_button" id="button-anything"> %s
</label>
<label class="btn btn-default">
<input type="radio" name="keys_button" id="button-invalid"> %s
</label>
</div>
""" % (_('Any Other Key'), _('Invalid'))
), css_class=css_class),
css_class='row'
)
)
self.helper.layout.append(
Div(
Div(Fieldset('', 'keys', 'goto'), css_class=css_class),
css_class='row'
)
)
else:
self.helper.layout.append(
Div(
Div(HTML('%s' % _('no branching, this will terminate the call')), css_class=css_class),
css_class='row'
)
)
self.fields['goto'].choices = get_section_question_list(survey_id, section_id)
class SurveyReportForm(forms.Form):
"""Survey Report Form"""
campaign = forms.ChoiceField(label=_('campaign'), required=False)
def __init__(self, user, *args, **kwargs):
super(SurveyReportForm, self).__init__(*args, **kwargs)
self.fields.keyOrder = ['campaign']
# To get user's campaign list which are attached with survey
if user:
camp_list = []
camp_list.append((0, _('Select Campaign')))
if user.is_superuser:
campaign_list = Campaign.objects.values_list('id', 'name')\
.filter(content_type__model='survey', has_been_started=True).order_by('-id')
else:
campaign_list = Campaign.objects.values_list('id', 'name')\
.filter(user=user, content_type__model='survey', has_been_started=True).order_by('-id')
for i in campaign_list:
camp_list.append((i[0], i[1]))
self.fields['campaign'].choices = camp_list
class SurveyDetailReportForm(SearchForm):
"""Survey Report Form"""
survey_id = forms.ChoiceField(label=_('Survey'), required=False)
def __init__(self, user, *args, **kwargs):
super(SurveyDetailReportForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'well'
css_class = 'col-md-4'
self.helper.layout = Layout(
Div(
Div('survey_id', css_class=css_class),
Div('from_date', css_class=css_class),
Div('to_date', css_class=css_class),
css_class='row'
),
)
common_submit_buttons(self.helper.layout, 'search')
if user:
survey_list = []
survey_list.append((0, _('Select Survey')))
if user.is_superuser:
survey_objs = Survey.objects.values_list('id', 'name', 'campaign__name').all().order_by('-id')
else:
survey_objs = Survey.objects.values_list('id', 'name', 'campaign__name')\
.filter(user=user).order_by('-id')
for i in survey_objs:
if i[2]:
survey_name = i[1] + " : " + i[2]
else:
survey_name = i[1]
survey_list.append((i[0], survey_name))
self.fields['survey_id'].choices = survey_list
class SurveyFileImport(forms.Form):
"""
General Form : file upload
"""
name = forms.CharField(label=_('Survey Name'), required=True)
survey_file = forms.FileField(label=_("Upload file"), required=True, help_text=_("browse text file"),
error_messages={'required': 'please upload File'})
def __init__(self, *args, **kwargs):
super(SurveyFileImport, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
css_class = 'col-md-12'
self.helper.layout = Layout(
Div(
Div('name', css_class=css_class),
Div('survey_file', css_class=css_class),
css_class='row'
)
)
def clean_csv_file(self):
"""Form Validation : File extension Check"""
filename = self.cleaned_data["survey_file"]
file_exts = ["txt"]
if str(filename).split(".")[1].lower() in file_exts:
return filename
else:
raise forms.ValidationError(_(u'document types accepted: %s' % ' '.join(file_exts)))
class SealSurveyForm(forms.Form):
"""
General Form : SealSurveyForm
"""
name = forms.CharField(label=_('Survey Name'), required=True)
def __init__(self, *args, **kwargs):
super(SealSurveyForm, self).__init__(*args, **kwargs)
self.fields.keyOrder = ['name']
self.helper = FormHelper()
self.helper.form_tag = False
css_class = 'col-md-12'
self.helper.layout = Layout(
Div(
Div('name', css_class=css_class),
css_class='row'
)
)
|
saydulk/newfies-dialer
|
newfies/survey/forms.py
|
Python
|
mpl-2.0
| 27,466
|
#----------------------------------------------------------------------------
# Copyright (c) 2014, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
#----------------------------------------------------------------------------
import os
import shutil
import tempfile
import threading
import unittest
from traits.util.event_tracer import (
SentinelRecord, RecordContainer, MultiThreadRecordContainer)
class TestRecordContainers(unittest.TestCase):
def setUp(self):
self.directory = tempfile.mkdtemp()
self.filename = os.path.join(self.directory, 'myfile')
def tearDown(self):
shutil.rmtree(self.directory)
def test_record_container(self):
container = RecordContainer()
# add records
for i in range(7):
container.record(SentinelRecord())
self.assertEqual(len(container._records), 7)
# save records
container.save_to_file(self.filename)
with open(self.filename, 'r') as handle:
lines = handle.readlines()
self.assertEqual(lines, ['\n'] * 7)
def test_multi_thread_record_container(self):
container = MultiThreadRecordContainer()
def record(container):
thread = threading.current_thread().name
collector = container.get_change_event_collector(thread)
collector.record(SentinelRecord())
thread_1 = threading.Thread(target=record, args=(container,))
thread_2 = threading.Thread(target=record, args=(container,))
thread_1.start()
thread_2.start()
record(container)
thread_2.join()
thread_1.join()
self.assertEqual(len(container._record_containers), 3)
for collector in container._record_containers.itervalues():
self.assertTrue(
isinstance(collector._records[0], SentinelRecord))
self.assertEqual(len(collector._records), 1)
# save records
container.save_to_directory(self.directory)
for name in container._record_containers:
filename = os.path.join(self.directory, '{0}.trace'.format(name))
with open(filename, 'r') as handle:
lines = handle.readlines()
self.assertEqual(lines, ['\n'])
if __name__ == '__main__':
unittest.main()
|
burnpanck/traits
|
traits/util/tests/test_record_containers.py
|
Python
|
bsd-3-clause
| 2,637
|
import sys, re
import xbmc, xbmcgui
import contextmenu
__language__ = sys.modules[ "__main__" ].__language__
__cwd__ = sys.modules[ "__main__" ].__cwd__
CANCEL_DIALOG = ( 9, 10, 92, 216, 247, 257, 275, 61467, 61448, )
ACTION_SHOW_INFO = ( 11, )
class GUI( xbmcgui.WindowXMLDialog ):
def __init__( self, *args, **kwargs ):
xbmcgui.WindowXMLDialog.__init__( self )
self.listitem = kwargs[ "listitem" ]
self.content = kwargs[ "content" ]
self.selected_source = None
def onInit( self ):
self._hide_controls()
self._show_info()
def _hide_controls( self ):
self.getControl( 110 ).setVisible( False )
self.getControl( 120 ).setVisible( False )
self.getControl( 130 ).setVisible( False )
self.getControl( 140 ).setVisible( False )
self.getControl( 150 ).setVisible( False )
self.getControl( 160 ).setVisible( False )
self.getControl( 170 ).setVisible( False )
self.getControl( 180 ).setVisible( False )
self.getControl( 191 ).setVisible( False )
self.getControl( 192 ).setVisible( False )
self.getControl( 193 ).setVisible( False )
def _show_info( self ):
self.getControl( 100 ).addItem( self.listitem )
if self.content == 'movies':
self.getControl( 192 ).setLabel( xbmc.getLocalizedString(208) )
self.getControl( 193 ).setLabel( xbmc.getLocalizedString(20410) )
self.getControl( 110 ).setVisible( True )
self.getControl( 191 ).setVisible( True )
self.getControl( 192 ).setVisible( True )
if self.listitem.getProperty('trailer'):
self.getControl( 193 ).setVisible( True )
elif self.content == 'tvshows':
self.getControl( 192 ).setLabel( xbmc.getLocalizedString(1024) )
self.getControl( 120 ).setVisible( True )
self.getControl( 191 ).setVisible( True )
self.getControl( 192 ).setVisible( True )
elif self.content == 'seasons':
self.getControl( 192 ).setLabel( xbmc.getLocalizedString(1024) )
self.getControl( 130 ).setVisible( True )
self.getControl( 191 ).setVisible( True )
self.getControl( 192 ).setVisible( True )
elif self.content == 'episodes':
self.getControl( 192 ).setLabel( xbmc.getLocalizedString(208) )
self.getControl( 140 ).setVisible( True )
self.getControl( 191 ).setVisible( True )
self.getControl( 192 ).setVisible( True )
elif self.content == 'musicvideos':
self.getControl( 192 ).setLabel( xbmc.getLocalizedString(208) )
self.getControl( 150 ).setVisible( True )
self.getControl( 191 ).setVisible( True )
self.getControl( 192 ).setVisible( True )
elif self.content == 'artists':
self.getControl( 192 ).setLabel( xbmc.getLocalizedString(1024) )
self.getControl( 160 ).setVisible( True )
self.getControl( 191 ).setVisible( True )
self.getControl( 192 ).setVisible( True )
elif self.content == 'albums':
self.getControl( 192 ).setLabel( xbmc.getLocalizedString(208) )
self.getControl( 193 ).setLabel( xbmc.getLocalizedString(1024) )
self.getControl( 170 ).setVisible( True )
self.getControl( 191 ).setVisible( True )
self.getControl( 192 ).setVisible( True )
self.getControl( 193 ).setVisible( True )
elif self.content == 'songs':
self.getControl( 192 ).setLabel( xbmc.getLocalizedString(208) )
self.getControl( 180 ).setVisible( True )
self.getControl( 191 ).setVisible( True )
self.getControl( 192 ).setVisible( True )
elif self.content == 'actors':
self.getControl( 192 ).setLabel( xbmc.getLocalizedString(208) )
self.getControl( 193 ).setLabel( xbmc.getLocalizedString(20410) )
self.getControl( 110 ).setVisible( True )
self.getControl( 191 ).setVisible( True )
self.getControl( 192 ).setVisible( True )
if self.listitem.getProperty('trailer'):
self.getControl( 193 ).setVisible( True )
self.setFocus( self.getControl( 191 ) )
def _close_dialog( self, action=None ):
self.action = action
self.close()
def onClick( self, controlId ):
if controlId == 191:
self._close_dialog()
elif controlId == 192:
if self.content == 'movies':
self._close_dialog( 'play_movie' )
elif self.content == 'tvshows':
self._close_dialog( 'browse_tvshow' )
elif self.content == 'seasons':
self._close_dialog( 'browse_season' )
elif self.content == 'episodes':
self._close_dialog( 'play_episode' )
elif self.content == 'musicvideos':
self._close_dialog( 'play_musicvideo' )
elif self.content == 'artists':
self._close_dialog( 'browse_artist' )
elif self.content == 'albums':
self._close_dialog( 'play_album' )
elif self.content == 'songs':
self._close_dialog( 'play_song' )
if self.content == 'actors':
self._close_dialog( 'play_movie_actors' )
elif controlId == 193:
if self.content == 'movies':
self._close_dialog( 'play_trailer' )
if self.content == 'albums':
self._close_dialog( 'browse_album' )
if self.content == 'actors':
self._close_dialog( 'play_trailer_actors' )
elif controlId == 194:
sources = self.listitem.getProperty('source_names').split(',')
print '-> sources: %s' % sources
context_menu = contextmenu.GUI( "script-globalsearch-contextmenu.xml" , __cwd__, "Default", labels=sources )
context_menu.doModal()
if context_menu.selection is not None:
self.selected_source = context_menu.selection
self.onClick( 192 )
del context_menu
def onFocus( self, controlId ):
pass
def onAction( self, action ):
if ( action.getId() in CANCEL_DIALOG ) or ( action.getId() in ACTION_SHOW_INFO ):
self._close_dialog()
|
arKtelix/plugin.program.1.search
|
resources/lib/infodialog.py
|
Python
|
gpl-2.0
| 6,445
|
class Reader:
@staticmethod
def get_number(size):
num = input("Type the number you want to move (Type 0 to quit): ")
while not (num.isdigit() and 0 <= int(num) <= size**2-1):
num = input("Type the number you want to move (Type 0 to quit): ")
return int(num)
|
imscs21/myuniv
|
1학기/programming/basic/파이썬/파이썬 과제/11/slidingPuzzle/SlidingView.py
|
Python
|
apache-2.0
| 306
|
# -*- coding:utf-8 -*-
"""
base.py
~~~~~~~
"""
import re
import datetime
import json
import requests
from torndb import Connection
from tornado.gen import coroutine
from tornado.concurrent import run_on_executor
from tornado.escape import to_unicode, url_escape
from concurrent.futures import ThreadPoolExecutor
from bs4 import BeautifulSoup
# import time
# import random
# from zh_auth import search_xsrf
# import sys
# reload(sys)
# sys.setdefaultencoding('utf8')
mysql_config = {
}
class ZHBase(object):
def __init__(self):
self.db = Connection(host=mysql_config.get('host', '127.0.0.1'), database=mysql_config.get('database', 'test'),
user=mysql_config.get('user', 'root'), password=mysql_config.get('password', ''))
self.executor = ThreadPoolExecutor(max_workers=4)
# self.requests = requests.Session()
self.headers = {
'Host': 'www.zhihu.com',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:42.0) Gecko/20100101 Firefox/42.0',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'X-Requested-With': 'XMLHttpRequest',
'Referer': 'http://www.zhihu.com/topics',
}
class GetTopics(ZHBase):
def __init__(self, login):
ZHBase.__init__(self)
self.p_father = re.compile(r'<li data-id="(\d+)"><a href="(.*?)">(.*?)</a></li>')
self.father_topic_uri = 'http://www.zhihu.com/topics'
self.requests = login.requests
def get_father_topics(self):
try:
result = self.requests.get(url=self.father_topic_uri)
except RuntimeError as e:
print 'curl father topic failed!' # Write logging
print e
if result.status_code != 200:
print 'requests status code is {}'.format(result.status_code)
return
return self.p_father.findall(result.content)
def save_father_topics(self):
now = str(datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'))
for topic in self.get_father_topics():
sql = u'INSERT INTO topics (data_id, topic_name, user_counts, is_deleted, last_update) ' \
u'VALUES (%d, "%s", %d, %d, "%s")' % (int(topic[0]), to_unicode(topic[2]), 0, 0, now)
print sql
try:
self.db.execute(sql)
print 'save {} success'.format(str(topic))
except RuntimeError as e:
print 'save failed : {}'.format(str(e))
class GetSubclassTopics(ZHBase):
def __init__(self, login):
ZHBase.__init__(self)
self.uri = 'http://www.zhihu.com/node/TopicsPlazzaListV2'
self.requests = login.requests
self.get_xsrf = login.get_xsrf()
def get_father_info(self):
sql = 'SELECT data_id FROM topics'
try:
return self.db.query(sql)
except IOError as e:
return []
def get_subclass_topics(self):
topics = self.get_father_info()
if len(topics) == 0:
return
r = self.requests.get(url='http://www.zhihu.com/topics', headers=self.headers)
if r.status_code != 200:
print 'request topics failed!'
return
# with open('topics_home.html', 'wb') as fd:
# fd.write(r.content)
# fd.close()
p = re.compile(r'"user_hash":"(.*?)"', re.M)
user_hash = p.findall(r.content)
if len(user_hash) == 0:
print 'get user hash failed!'
return
hash_id = user_hash[0]
xsrf = self.get_xsrf
print hash_id
for topic in topics[:2]:
print 'now get topic number {}`s subclass'.format(str(topic))
offset = 0
while True:
fd = open('subtopics.txt', 'a')
# uri = self.uri + '?_xsrf={}&method=next&'.format(xsrf) + params
uri = self.uri + '?' + 'method=next¶ms=%7B%22topic_id%22%3A' + str(topic.get('data_id')) + \
'%2C%22offset%22%3A' + str(offset) + '%2C%22hash_id%22%3A%22' + str(hash_id) + '%22%7D&_xsrf=' + str(xsrf)
r = self.requests.post(url=uri, data={}, headers=self.headers)
if '"r":0' not in r.content:
print 'curl subclass topics failed!'
return
contents = json.loads(r.content.replace('\\n', '').replace('\\"', '').replace('\\/', '/')).get('msg')
if len(contents) == 0:
break
for div in contents:
now = str(datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'))
soup = BeautifulSoup(div, "lxml")
try:
item = soup.find(attrs={'class': 'blk'})
name = item.a.strong.get_text()
description = item.p.get_text()
topic_id = item.a.get('href')[7:]
sql = 'INSERT INTO subclass_topics (topic_id, sub_topic_name, description, user_counts, ' \
'is_self, is_deleted, last_update, father_topic_id_id) VALUES ' \
'("%s", "%s", "%s", 0, 0, 0, "%s", 1)' % (topic_id, name, description, now)
print sql
try:
self.db.execute(sql)
except:
print 'failed!'
continue
except RuntimeError as e:
# print soup.find(attrs={'class': 'blk'})
print 'failed in {}'.format(str(div))
continue
print '\n\n'
# with open('sub_topics' + str(offset) + '.html', 'wb') as fd:
# fd.write(str(contents))
fd.close()
offset += 20
class GetSubclassFans(ZHBase):
def __init__(self, uri, login):
ZHBase.__init__(self)
self.topic_uri = uri
self.requests = login.requests
self.get_xsrf = login.get_xsrf()
def get_topic_fans(self):
# get start mi-id.
url = self.topic_uri + '/followers'
r = self.requests.get(url, headers=self.headers)
if r.status_code != 200:
print 'get users failed!'
return
soup = BeautifulSoup(r.content, 'lxml')
first_user = soup.find(attrs={'class': 'zm-person-item'})
mi_id = first_user.get('id', None)
if mi_id:
account = first_user.h2.a.get('href', '')
name = first_user.h2.a.get_text()
print mi_id, account, name
with open('mark_location.txt', 'a') as fd:
fd.write('this loop begin at : \n{}'.format(str((mi_id, account, name))))
fd.write('\n')
else:
return
# began loop get users
offset = 0
_xsrf = self.get_xsrf
n = 0
while n < 11:
print 'begin test.'
post_data = {
'offset': offset,
'_xsrf': _xsrf,
'start': mi_id[3:]
}
r = self.requests.post(url=url, data=post_data, headers=self.headers)
if r.status_code != 200 and '"r":0' not in r.content:
print r.content
print r.status_code
return
soup_test = BeautifulSoup(r.content.replace('\\n', '').replace('\\"', '').replace('\\/', '/'), 'lxml')
users_item = soup_test.find_all(attrs={'class': 'zm-person-item'})
users_list = list()
if users_item:
for user_item in users_item:
# p = re.compile(r'mi-(\d+)')
mi_id = str(user_item.get('id', None))
user = user_item.find(attrs={'class': 'zm-list-content-title'})
account = user.a.get('href', None)
name = user.a.get_text()
print (mi_id, account, name.decode('raw_unicode_escape'))
users_list.append((mi_id, account, name.decode('raw_unicode_escape')))
self.save_users(users_list)
# time.sleep(random.randint(20, 30))
offset += 20
n += 1
# @coroutine
def save_users(self, users):
yield self.execute_sql(users)
@run_on_executor
def execute_sql(self, users):
now = str(datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'))
for user in users:
try:
sql = 'INSERT INTO users (user_number, user_account, user_name, last_update, is_queried, is_deleted) ' \
'VALUES ("%s", "%s", "%s", "%s", 0, 0)' % (user[0], user[1], user[2], now)
self.db.execute(sql)
print sql
except:
with open('mark_location_failed.txt', 'a') as fd:
fd.write(str(user) + '\n')
fd.close()
class GetUserInfo(ZHBase):
def __init__(self, login):
ZHBase.__init__(self)
self.requests = login.requests
def get_uid_account(self):
sql = 'SELECT id, user_account FROM users'
for row in self.db.query(sql):
sql = 'UPDATE users SET is_queried=1 WHERE id=%d' % int(row.get('id'))
self.db.execute(sql)
self.get_user_info(row.get('id'), row.get('user_account'))
@coroutine
def get_user_info(self, uid, account):
yield self.get_personal_information(uid, account)
# @run_on_executor
def save_user_info(self, info):
print '*' * 32
if info:
now = str(datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'))
sql = 'INSERT INTO info (sex, fans_number, follows_number, social_contact, location_by_self, abstract, ' \
'employment, domicile, educational, last_update, user_id_id) VALUES (%s, %s, %s, "%s", "%s", "%s", ' \
'"%s", "%s", "%s", "%s", %s)' % (info.get('sex', 2), info.get('fans', 0), info.get('follows', 0),
info.get('social_contact', ''), info.get('location_by_self', ''), info.get('description', ''),
info.get('employment', ''), info.get('domicile', ''), info.get('education', ''), now, info.get('user_id'))
print sql
self.db.execute(sql)
@coroutine
def get_personal_information(self, id, user):
uri = 'http://www.zhihu.com' + str(user)
print uri
r = self.requests.get(uri, headers=self.headers)
if r.status_code != 200:
print 'get {} failed!'.format(uri)
return
soup = BeautifulSoup(r.content, "lxml")
# get people main info
main_info = soup.find(attrs={"class": 'zm-profile-header-main'})
attention_info = soup.find(attrs={"class": 'zm-profile-side-following zg-clear'})
# sex info
sex = 2
if main_info.find(attrs={'class': 'icon icon-profile-male'}):
sex = 1
elif main_info.find(attrs={'class': 'icon icon-profile-female'}):
sex = 0
else:
pass
print 'sex : {}'.format(sex)
# social contact
contact = ''
contact_info = main_info.find(attrs={'class': 'zm-profile-header-user-weibo'})
if contact_info:
p = re.compile(r'href="(.*?)"', re.M)
contact_info = str(contact_info)
temp = p.findall(contact_info)
if len(temp) == 0:
pass
else:
contact = temp[0]
print 'social contact(sina) : {}'.format(contact)
# people's domicile
domicile = ''
domicile_info = main_info.find(attrs={'class': 'location item'})
if domicile_info:
domicile = domicile_info.get('title', '')
print 'domicile : {}'.format(domicile)
# location by self
location_self = ''
location_by_self = main_info.find(attrs={'class': 'business item'})
if location_by_self:
location_self = location_by_self.get('title', '')
print 'location by self : {}'.format(location_self)
# industry or employment - position
industry = ''
employment = ''
position = ''
employment_item = main_info.find(attrs={'class': 'employment item'})
if employment_item:
employment = employment_item.get('title', '')
position_item = main_info.find(attrs={'class': 'position item'})
if position_item:
position = position_item.get('title', '')
if True:
industry = str(employment) + ' - ' + str(position)
print 'employment : {}'.format(industry)
# occupations
# occupations = u'Now not need.'
# print 'occupations : {}'.format(occupations)
# education
education_info = ''
education = ''
education_extra = ''
education_item = main_info.find(attrs={'class': 'education item'})
if education_item:
education = education_item.get('title', '')
education_extra_item = main_info.find(attrs={'class': 'education-extra item'})
if education_extra_item:
education_extra = education_extra_item.get('title', '')
if True:
education_info = str(education) + ' - ' + str(education_extra)
print 'education information : {}'.format(education_info)
# description
description = ''
description_info = main_info.find(attrs={'class': 'fold-item'})
if description_info:
description = description_info.span.get_text()
print 'description : {}'.format(description)
# fans follows numbers
fans = 0
follows = 0
if attention_info:
p = re.compile(r'<strong>(\d+)</strong>', re.M)
numbers = p.findall(str(attention_info))
if len(numbers) == 2:
fans = numbers[0]
follows = numbers[1]
print 'fans number : {}'.format(fans)
print 'follows number : {}'.format(follows)
profile_info = {'user_id': id, 'sex': sex, 'social_contact': contact, 'domicile': domicile,
'location_by_self': location_self, 'employment': industry, 'education': education_info,
'description': description, 'fans': fans, 'follows': follows}
self.save_user_info(profile_info)
print profile_info
# if __name__ == '__main__':
# if login():
# print 'Spider start.'
# topics = GetTopics()
# topics.save_father_topics()
#
# sub_topics = GetSubclassTopics()
# sub_topics.get_subclass_topics()
#
# user_info = GetUserInfo()
# user_info.get_uid_account()
#
# get_users = GetSubclassFans('http://www.zhihu.com/topic/19550517')
# get_users.get_topic_fans()
|
richardGaoPy/NetSpider
|
zhihuspyder/base.py
|
Python
|
apache-2.0
| 15,004
|
# -*- coding: utf-8 -*-
import datetime
from django.db import models
from librehatti.catalog.models import Category, PurchaseOrder
from librehatti.voucher.models import FinancialSession
class SuspenseOrder(models.Model):
"""
Stores order with over head costs.
"""
voucher = models.IntegerField()
purchase_order = models.ForeignKey(PurchaseOrder, on_delete=models.CASCADE)
session_id = models.ForeignKey(FinancialSession, on_delete=models.CASCADE)
distance_estimated = models.IntegerField(default=0)
is_cleared = models.BooleanField(default=False)
def __str__(self):
return "%s" % (self.id)
class SuspenseClearance(models.Model):
"""
Stores clearance of suspense orders.
"""
session = models.ForeignKey(FinancialSession, on_delete=models.CASCADE)
voucher_no = models.IntegerField()
work_charge = models.IntegerField(blank=True, null=True)
labour_charge = models.IntegerField(blank=True, null=True)
car_taxi_charge = models.IntegerField(blank=True, null=True)
boring_charge_external = models.IntegerField(blank=True, null=True)
boring_charge_internal = models.IntegerField(blank=True, null=True)
lab_testing_staff = models.CharField(max_length=200, blank=True, null=True)
field_testing_staff = models.CharField(
max_length=200, blank=True, null=True
)
test_date = models.CharField(max_length=600, blank=True, null=True)
clear_date = models.DateField()
class Department(models.Model):
"""
Stores department.
"""
title = models.CharField(max_length=50)
address = models.CharField(max_length=150)
phone = models.CharField(max_length=20, blank=True)
dean = models.CharField(max_length=50, blank=True)
def __str__(self):
return self.title
class StaffPosition(models.Model):
"""
Stores position of staff.
"""
position = models.CharField(max_length=50)
rank = models.IntegerField()
def __str__(self):
return self.position
class Staff(models.Model):
"""
Stores staff and map it with position.
"""
department = models.ForeignKey(Department, on_delete=models.CASCADE)
code = models.CharField(max_length=5)
name = models.CharField(max_length=50)
daily_ta_da = models.IntegerField(blank=True)
position = models.ForeignKey(StaffPosition, on_delete=models.CASCADE)
seniority_credits = models.IntegerField()
always_included = models.BooleanField(default=True)
lab = models.ForeignKey(Category, on_delete=models.CASCADE)
email = models.EmailField(blank=True)
class Meta:
verbose_name_plural = "Staff"
def __str__(self):
return self.name
class TaDa(models.Model):
"""
Model to store transport and daily allowances.
"""
date_of_generation = models.DateField(default=datetime.date.today)
voucher_no = models.IntegerField()
session = models.IntegerField()
departure_time_from_tcc = models.TimeField()
arrival_time_at_site = models.TimeField()
departure_time_from_site = models.TimeField()
arrival_time_at_tcc = models.TimeField()
tada_amount = models.IntegerField()
tada_amount_without_tax = models.IntegerField()
start_test_date = models.DateField()
end_test_date = models.DateField()
source_site = models.CharField(max_length=100, default="GNDEC, Ludhiana")
testing_site = models.CharField(max_length=100)
testing_staff = models.CharField(max_length=100)
def __str__(self):
return self.suspense
class TaDa_Tax_Detail(models.Model):
tada = models.ForeignKey(TaDa, on_delete=models.CASCADE)
name = models.CharField(max_length=100)
amount = models.IntegerField()
class QuotedSuspenseOrder(models.Model):
"""
Stores Quoted suspense order.
"""
quoted_order = models.ForeignKey(
"bills.QuotedOrder", on_delete=models.CASCADE
)
distance_estimated = models.IntegerField(default=0)
is_cleared = models.BooleanField(default=False)
def __str__(self):
return "%s" % (self.id)
class Vehicle(models.Model):
"""
Stores vehicle details.
"""
vehicle_id = models.CharField(max_length=20)
vehicle_no = models.CharField(max_length=20)
vehicle_name = models.CharField(max_length=20)
def __str__(self):
return "%s" % (self.vehicle_no)
class Transport(models.Model):
"""
Stores Transportation details.
"""
vehicle = models.ForeignKey(Vehicle, on_delete=models.CASCADE)
kilometer = models.CharField(max_length=500)
rate = models.FloatField(default=10.0)
date_of_generation = models.DateField()
date = models.CharField(blank=True, max_length=600)
total = models.IntegerField()
voucher_no = models.IntegerField()
session = models.ForeignKey(FinancialSession, on_delete=models.CASCADE)
"""def save(self, *args, **kwargs):
# Now decode the kilometers
jsonkilometer = simplejson.loads(self.kilometer)
total_km = 0;
# calculate the total kms
for km in jsonkilometer:
total_km += float(km)
# Now calculate the total and save it in model
self.total = total_km * self.rate
super(Transport, self).save(*args, **kwargs)
"""
class Meta:
verbose_name_plural = "Transport"
def __str__(self):
return "%s" % (self.vehicle)
class TransportBillOfSession(models.Model):
transport = models.ForeignKey(Transport, on_delete=models.CASCADE)
transportbillofsession = models.IntegerField()
session = models.ForeignKey(FinancialSession, on_delete=models.CASCADE)
class SuspenseClearedRegister(models.Model):
suspenseclearednumber = models.IntegerField()
voucher_no = models.IntegerField()
session = models.ForeignKey(FinancialSession, on_delete=models.CASCADE)
class CarTaxiAdvance(models.Model):
voucher_no = models.IntegerField()
session = models.ForeignKey(FinancialSession, on_delete=models.CASCADE)
spent = models.IntegerField()
advance = models.IntegerField()
balance = models.IntegerField()
receipt_no = models.IntegerField()
receipt_session = models.IntegerField()
|
GreatDevelopers/LibreHatti
|
src/librehatti/suspense/models.py
|
Python
|
gpl-2.0
| 6,188
|
def product(*iterables):
"""compute the cartesian product of a list of iterables
>>> for i in product(['a','b','c'],[1,2]):
... print i
...
['a', 1]
['a', 2]
['b', 1]
['b', 2]
['c', 1]
['c', 2]
"""
if iterables:
for head in iterables[0]:
for remainder in product(*iterables[1:]):
yield [head] + remainder
else:
yield []
####
# Function generators
def make_test_function_template(INITIALIZE, TIME, FINALIZE):
import string
import os
function_template_file = os.path.join( os.path.split(__file__)[0], 'test_function_template.cxx')
# test_function_template has locations for $PREAMBLE $INITIALIZE etc.
test_template = string.Template(open(function_template_file).read())
sections = {'INITIALIZE' : INITIALIZE,
'TIME' : TIME,
'FINALIZE' : FINALIZE}
# skeleton has supplied definitions for $INCLUDE and $PREAMBLE
# and has locations for $InputType and $InputSize etc.
skeleton = test_template.safe_substitute(sections)
return string.Template(skeleton)
def make_test_function(fname, TestVariablePairs, ftemplate):
VariableDescription = '\n'.join(['RECORD_VARIABLE("%s","%s");' % pair for pair in TestVariablePairs])
fmap = dict(TestVariablePairs)
fmap['DESCRIPTION'] = VariableDescription
fmap['FUNCTION'] = fname
return ftemplate.substitute(fmap)
def generate_functions(pname, TestVariables, INITIALIZE, TIME, FINALIZE):
ftemplate = make_test_function_template(INITIALIZE, TIME, FINALIZE)
TestVariableNames = [ pair[0] for pair in TestVariables]
TestVariableRanges = [ pair[1] for pair in TestVariables]
for n,values in enumerate(product(*TestVariableRanges)):
converted_values = []
for v in values:
v = str(v)
v = v.replace(" ","_") # C++ tokens we don't want
v = v.replace(".","_")
v = v.replace("<","_")
v = v.replace(">","_")
v = v.replace(",","_")
v = v.replace(":","_")
converted_values.append(v)
fname = '_'.join( [pname] + converted_values )
TestVariablePairs = zip(TestVariableNames, values)
yield (fname, make_test_function(fname, TestVariablePairs, ftemplate))
####
# Program generators
def make_test_program(pname, functions, PREAMBLE = ""):
parts = []
parts.append("#include <build/perftest.h>")
parts.append(PREAMBLE)
for fname,fcode in functions:
parts.append(fcode)
#TODO output TestVariables in <testsuite> somewhere
parts.append("int main(int argc, char **argv)")
parts.append("{")
parts.append("PROCESS_ARGUMENTS(argc, argv);")
parts.append("BEGIN_TESTSUITE(\"" + pname + "\");")
parts.append("RECORD_PLATFORM_INFO();")
for fname,fcode in functions:
parts.append(fname + "();")
parts.append("END_TESTSUITE();")
parts.append("}")
parts.append("\n")
return "\n".join(parts)
def generate_program(pname, TestVariables, PREAMBLE, INITIALIZE, TIME, FINALIZE):
functions = list(generate_functions(pname, TestVariables, INITIALIZE, TIME, FINALIZE))
return make_test_program(pname, functions, PREAMBLE)
###
# Test Input File -> Test Program
def process_test_file(filename):
import os
pname = os.path.splitext(os.path.split(filename)[1])[0]
test_env_file = os.path.join( os.path.split(__file__)[0], 'test_env.py')
# XXX why does execfile() not give us the right namespace?
exec open(test_env_file)
exec open(filename)
return generate_program(pname, TestVariables, PREAMBLE, INITIALIZE, TIME, FINALIZE)
def compile_test(input_name, output_name):
"""Compiles a .test file into a .cu file"""
open(output_name, 'w').write( process_test_file(input_name) )
##
# Simple Driver script
if __name__ == '__main__':
import os, sys
if len(sys.argv) not in [2,3]:
print "usage: %s test_input.py [test_output.cu]" % (sys.argv[0],)
os.exit()
input_name = sys.argv[1]
if len(sys.argv) == 2:
# reduce.test -> reduce.cu
output_name = os.path.splitext(os.path.split(filename)[1])[0] + '.cu'
else:
output_name = sys.argv[2]
# process_test_file returns a string containing
# the whole test program (i.e. the text of a .cu file)
compile_test(input_name, output_name)
# this is just for show, scons integration would do this differently
#import subprocess
#subprocess.call('scons')
#subprocess.call('./' + pname)
#print "collecting data..."
#output = subprocess.Popen(['./' + pname], stdout=subprocess.PIPE).communicate()[0]
#print output
|
egaburov/thrust
|
performance/build/perftest.py
|
Python
|
apache-2.0
| 4,777
|
# -*- coding: utf-8 -*-
"""Module providing views for the site navigation root"""
from Acquisition import aq_inner
from Products.Five.browser import BrowserView
from Products.ZCatalog.interfaces import ICatalogBrain
from plone import api
from plone.app.contentlisting.interfaces import IContentListing
from plone.app.contentlisting.interfaces import IContentListingObject
from plone.app.contenttypes.interfaces import INewsItem
from zope.component import getMultiAdapter
from zope.component import getUtility
from eda.sitecontent.interfaces import IResponsiveImagesTool
IMG = 'data:image/gif;base64,R0lGODlhAQABAIAAAP///wAAACwAAAAAAQABAAACAkQBADs='
class FrontPageView(BrowserView):
""" General purpose frontpage view """
def __call__(self):
self.has_newsitems = len(self.recent_news()) > 0
return self.render()
def render(self):
return self.index()
def can_edit(self):
show = False
if not api.user.is_anonymous():
show = True
return show
def portal_id(self):
portal = api.portal.get()
return portal.id
def recent_news(self):
catalog = api.portal.get_tool(name='portal_catalog')
items = catalog(object_provides=INewsItem.__identifier__,
review_state='published',
sort_on='Date',
sort_order='reverse',
sort_limit=3)[:3]
return IContentListing(items)
def rendered_news_card(self, uuid):
item = api.content.get(UID=uuid)
template = item.restrictedTraverse('@@card-news-item')()
return template
def section_preview(self, section):
info = {}
if section.startswith('/'):
target = section
else:
target = '/{0}'.format(section)
item = api.content.get(path=target)
if item:
info['title'] = item.Title()
info['teaser'] = item.Description()
info['url'] = item.absolute_url()
info['image'] = self.image_tag(item)
info['subitems'] = None
if target in ('/news'):
info['subitems'] = self.recent_news()
return info
def get_image_data(self, uuid):
tool = getUtility(IResponsiveImagesTool)
return tool.create(uuid)
def image_tag(self, item):
data = {}
sizes = ['small', 'medium', 'large']
idx = 0
for size in sizes:
idx += 0
img = self._get_scaled_img(item, size)
data[size] = '{0} {1}w'.format(img['url'], img['width'])
return data
def _get_scaled_img(self, item, size):
if (
ICatalogBrain.providedBy(item) or
IContentListingObject.providedBy(item)
):
obj = item.getObject()
else:
obj = item
info = {}
if hasattr(obj, 'image'):
scales = getMultiAdapter((obj, self.request), name='images')
if size == 'small':
scale = scales.scale('image', width=300, height=300)
if size == 'medium':
scale = scales.scale('image', width=600, height=600)
else:
scale = scales.scale('image', width=900, height=900)
if scale is not None:
info['url'] = scale.url
info['width'] = scale.width
info['height'] = scale.height
else:
info['url'] = IMG
info['width'] = '1px'
info['height'] = '1px'
return info
|
a25kk/eda
|
src/eda.sitecontent/eda/sitecontent/browser/frontpage.py
|
Python
|
mit
| 3,589
|
import numpy as np
import matplotlib.pyplot as plt
import sys
class Sudoku:
def __init__(self):
# build the sudoku board
# @N: board size, 9
# @n: board subgrid size 3
# fill every block with a permutation of 1 to 9
self.N = 9
self.n = 3
self.board = np.zeros((self.N, self.N), dtype=int)
for i in range(0, self.N, self.n):
for j in range(0, self.N, self.n):
self.board[i:i+self.n, j:j+self.n] = \
np.random.permutation(range(1, 1+self.N)).reshape(self.n, self.n)
def __repr__(self):
return('Sudoku(\nboard:\n%s\ncost:%d\n)'%(self.board, self.cost()))
def printBoard(self):
# print the board to screen
for i in range(self.n):
sys.stdout.write('+-------+-------+-------+\n')
for j in range(self.n):
sys.stdout.write('|')
for k in range(self.n):
for l in range(self.n):
sys.stdout.write(' %d'%self.board[3*i+j,3*k+l])
sys.stdout.write(' |')
sys.stdout.write('\n')
sys.stdout.write('+-------+-------+-------+\n')
def cost(self):
# calculate the cost of a board,
# defined as the number of duplicates in rows and columns
costRow = sum([self.N-len(set(self.board[i,:])) for i in range(self.N)])
costCol = sum([self.N-len(set(self.board[:,j])) for j in range(self.N)])
return costRow + costCol
def neighbor(self):
newSdk = Sudoku()
block = np.random.choice(range(self.N))
switch = np.random.choice(range(self.N), 2, replace=False)
newBoard = self.board.copy()
i1 = block//self.n*3 + switch[0]//self.n
j1 = block%self.n*3 + switch[0]%self.n
i2 = block//self.n*3 + switch[1]//self.n
j2 = block%self.n*3 + switch[1]%self.n
newBoard[i1,j1], newBoard[i2,j2] = newBoard[i2,j2], newBoard[i1,j1]
newSdk.board = newBoard
return newSdk
class SimulatedAnnealing:
def __init__(self, tStart, tEnd, tAlpha, num):
self.Ts = tStart
self.Te = tEnd
self.alpha = tAlpha
self.numIter = num
def anneal(self, solution):
old_cost = solution.cost()
T = self.Ts
T_min = self.Te
alpha = self.alpha
t = [T]
c = [old_cost]
while T > T_min:
i = 1
while i <= self.numIter:
new_solution = solution.neighbor()
new_cost = new_solution.cost()
ap = self.acceptance_probability(old_cost, new_cost, T)
if ap > np.random.random():
solution = new_solution
old_cost = new_cost
i += 1
print('(%f): %d'%(T,old_cost))
T = T*alpha
t.append(T)
c.append(old_cost)
if old_cost == 0:
return solution, t, c
return solution, t, c
def acceptance_probability(self, old_cost, new_cost, T):
if new_cost < old_cost:
return 1.
return np.exp((old_cost-new_cost)/T)
SA = SimulatedAnnealing(1., 0.0001, 0.95, 100)
sdk = Sudoku()
sol, t, c = SA.anneal(sdk)
sol.printBoard()
plt.plot(t, c, 'b.-')
plt.ylabel('cost')
plt.xlabel('temperature')
plt.show()
|
yu-H-ang/leetcode-backup
|
optimization/SimulatedAnnealing.py
|
Python
|
mit
| 3,369
|
from django.contrib import admin
from jobs.models import Job, JobParameter
admin.site.register(Job)
admin.site.register(JobParameter)
|
silps/solesite
|
jobs/admin.py
|
Python
|
bsd-3-clause
| 135
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.