max_stars_repo_path stringlengths 4 286 | max_stars_repo_name stringlengths 5 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.03M | content_cleaned stringlengths 6 1.03M | language stringclasses 111 values | language_score float64 0.03 1 | comments stringlengths 0 556k | edu_score float64 0.32 5.03 | edu_int_score int64 0 5 |
|---|---|---|---|---|---|---|---|---|---|---|
src/longmessage_room.py | ArkaneMoose/emailbot | 10 | 6612751 | from euphoria import connection as cn
from euphoria import room
class LongMessageRoom(room.Room):
"""
A long message room allows your bot to automatically retrieve long messages.
If this room is implemented, handle_chat(message) should be ignored if
message.get('truncated') == True, as the handle_chat(message) should be
reissued with the full message by this room.
"""
def __init__(self, roomname, password=<PASSWORD>, attempts=None):
super().__init__(roomname, password, attempts)
self.connection.add_callback("send-event", self.request_full_message)
self.connection.add_callback("get-message-reply", self.handle_getmessagereply)
def request_full_message(self, message):
"""
request_full_message(message) -> None
Checks if message is truncated; if so, issues a get-message command.
"""
if message["data"].get("truncated"):
self.connection.send_packet("get-message", dict(id=message["data"]["id"]))
def handle_getmessagereply(self, message):
"""
handle_getmessagereply(message) -> None
Passes message to handle_chat().
"""
self.handle_chat(message["data"]);
def handle_chat(self, message):
"""
handle_chat(message) -> None
Override this method to handle chats.
"""
pass
| from euphoria import connection as cn
from euphoria import room
class LongMessageRoom(room.Room):
"""
A long message room allows your bot to automatically retrieve long messages.
If this room is implemented, handle_chat(message) should be ignored if
message.get('truncated') == True, as the handle_chat(message) should be
reissued with the full message by this room.
"""
def __init__(self, roomname, password=<PASSWORD>, attempts=None):
super().__init__(roomname, password, attempts)
self.connection.add_callback("send-event", self.request_full_message)
self.connection.add_callback("get-message-reply", self.handle_getmessagereply)
def request_full_message(self, message):
"""
request_full_message(message) -> None
Checks if message is truncated; if so, issues a get-message command.
"""
if message["data"].get("truncated"):
self.connection.send_packet("get-message", dict(id=message["data"]["id"]))
def handle_getmessagereply(self, message):
"""
handle_getmessagereply(message) -> None
Passes message to handle_chat().
"""
self.handle_chat(message["data"]);
def handle_chat(self, message):
"""
handle_chat(message) -> None
Override this method to handle chats.
"""
pass
| en | 0.562009 | A long message room allows your bot to automatically retrieve long messages. If this room is implemented, handle_chat(message) should be ignored if message.get('truncated') == True, as the handle_chat(message) should be reissued with the full message by this room. request_full_message(message) -> None Checks if message is truncated; if so, issues a get-message command. handle_getmessagereply(message) -> None Passes message to handle_chat(). handle_chat(message) -> None Override this method to handle chats. | 3.117581 | 3 |
acra/views.py | Saibamen/credo-webapp | 2 | 6612752 | <reponame>Saibamen/credo-webapp
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from acra.models import CrashReport
@csrf_exempt
def report(request):
if request.method != "POST":
return HttpResponse(status=400)
CrashReport.objects.create(data=request.body)
return HttpResponse(status=200)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from acra.models import CrashReport
@csrf_exempt
def report(request):
if request.method != "POST":
return HttpResponse(status=400)
CrashReport.objects.create(data=request.body)
return HttpResponse(status=200) | en | 0.769321 | # -*- coding: utf-8 -*- | 1.958937 | 2 |
crc16.py | robotika/husky | 3 | 6612753 | ################################################################################
# Checksum
## @defgroup crc CRC Generation
# @ingroup doc
#
# A 16-bit CRC using one of the CCITT polynomials is used to confirm message
# integrity. \n\n
#
# <i>Polynomial:</i> x16+x12+x5+1 (0x1021) \n
#
# <i>Initial value:</i> 0xFFFF \n
#
# <i>Check constant:</i> 0x1D0F \n\n
#
# The calculated CRC of the string '123456789' should be 0x29B1 \n
#
# To confirm CRC implementation, the following process can be used:
# -# Calculate the CRC of any message
# -# XOR it with 0xFFFF (bitwise inversion)
# -# Append it to the original message
# -# Perform the CRC calculation on the extended message
# -# Confirm that the new CRC is equal to the check constant (0x1D0F)
#
# \b Sample \b C \b Code \b for \b table-driven \b CRC \b computation: \n\n
# @code
# /*
# * crc.h
# */
#
# #ifndef __CRC16_H
# #define __CRC16_H
#
# /***----------Table-driven crc function----------***/
# /* Inputs: -size of the character array, */
# /* the CRC of which is being computed */
# /* - the initial value of the register to */
# /* be used in the calculation */
# /* - a pointer to the first element of */
# /* said character array */
# /* Outputs: the crc as an unsigned short int */
# unsigned short int crc16(int size, int init_val, char *data);
#
# #endif
#
# /*
# * crc.c
# */
#
# #include "crc.h"
#
# //CRC lookup table for polynomial 0x1021
# const unsigned short int table[256] =
# {0, 4129, 8258, 12387, 16516, 20645, 24774, 28903, 33032, 37161, 41290,
# 45419, 49548, 53677, 57806, 61935, 4657, 528, 12915, 8786, 21173,
# 17044, 29431, 25302, 37689, 33560, 45947, 41818, 54205, 50076, 62463,
# 58334, 9314, 13379, 1056, 5121, 25830, 29895, 17572, 21637, 42346,
# 46411, 34088, 38153, 58862, 62927, 50604, 54669, 13907, 9842, 5649,
# 1584, 30423, 26358, 22165, 18100, 46939, 42874, 38681, 34616, 63455,
# 59390, 55197, 51132, 18628, 22757, 26758, 30887, 2112, 6241, 10242,
# 14371, 51660, 55789, 59790, 63919, 35144, 39273, 43274, 47403, 23285,
# 19156, 31415, 27286, 6769, 2640, 14899, 10770, 56317, 52188, 64447,
# 60318, 39801, 35672, 47931, 43802, 27814, 31879, 19684, 23749, 11298,
# 15363, 3168, 7233, 60846, 64911, 52716, 56781, 44330, 48395, 36200,
# 40265, 32407, 28342, 24277, 20212, 15891, 11826, 7761, 3696, 65439,
# 61374, 57309, 53244, 48923, 44858, 40793, 36728, 37256, 33193, 45514,
# 41451, 53516, 49453, 61774, 57711, 4224, 161, 12482, 8419, 20484,
# 16421, 28742, 24679, 33721, 37784, 41979, 46042, 49981, 54044, 58239,
# 62302, 689, 4752, 8947, 13010, 16949, 21012, 25207, 29270, 46570,
# 42443, 38312, 34185, 62830, 58703, 54572, 50445, 13538, 9411, 5280,
# 1153, 29798, 25671, 21540, 17413, 42971, 47098, 34713, 38840, 59231,
# 63358, 50973, 55100, 9939, 14066, 1681, 5808, 26199, 30326, 17941,
# 22068, 55628, 51565, 63758, 59695, 39368, 35305, 47498, 43435, 22596,
# 18533, 30726, 26663, 6336, 2273, 14466, 10403, 52093, 56156, 60223,
# 64286, 35833, 39896, 43963, 48026, 19061, 23124, 27191, 31254, 2801,
# 6864, 10931, 14994, 64814, 60687, 56684, 52557, 48554, 44427, 40424,
# 36297, 31782, 27655, 23652, 19525, 15522, 11395, 7392, 3265, 61215,
# 65342, 53085, 57212, 44955, 49082, 36825, 40952, 28183, 32310, 20053,
# 24180, 11923, 16050, 3793, 7920};
#
# /***----------Table-driven crc function----------***/
# /* Inputs: - size of the character array, the CRC */
# /* of which is being computed */
# /* - the initial value of the register to */
# /* be used in the calculation */
# /* - a pointer to the first element of */
# /* said character array */
# /* Outputs: the crc as an unsigned short int */
# unsigned short int crc16(int size, int init_val, char *data)
# {
# unsigned short int crc = (unsigned short int) init_val;
# while(size--) {
# crc = (crc << 8) ^ table[((crc >> 8) ^ *data++) & 0xFFFF];
# }
# return crc;
# }
# @endcode
## Precomputed checksum table. Polynomial 0x1021.
#
# Used for performing a 16-bit CRC with polynomial 0x1021
CCIT_CRC_TABLE = (
0x0, 0x1021, 0x2042, 0x3063, 0x4084, 0x50a5, 0x60c6, 0x70e7,
0x8108, 0x9129, 0xa14a, 0xb16b, 0xc18c, 0xd1ad, 0xe1ce, 0xf1ef,
0x1231, 0x210, 0x3273, 0x2252, 0x52b5, 0x4294, 0x72f7, 0x62d6,
0x9339, 0x8318, 0xb37b, 0xa35a, 0xd3bd, 0xc39c, 0xf3ff, 0xe3de,
0x2462, 0x3443, 0x420, 0x1401, 0x64e6, 0x74c7, 0x44a4, 0x5485,
0xa56a, 0xb54b, 0x8528, 0x9509, 0xe5ee, 0xf5cf, 0xc5ac, 0xd58d,
0x3653, 0x2672, 0x1611, 0x630, 0x76d7, 0x66f6, 0x5695, 0x46b4,
0xb75b, 0xa77a, 0x9719, 0x8738, 0xf7df, 0xe7fe, 0xd79d, 0xc7bc,
0x48c4, 0x58e5, 0x6886, 0x78a7, 0x840, 0x1861, 0x2802, 0x3823,
0xc9cc, 0xd9ed, 0xe98e, 0xf9af, 0x8948, 0x9969, 0xa90a, 0xb92b,
0x5af5, 0x4ad4, 0x7ab7, 0x6a96, 0x1a71, 0xa50, 0x3a33, 0x2a12,
0xdbfd, 0xcbdc, 0xfbbf, 0xeb9e, 0x9b79, 0x8b58, 0xbb3b, 0xab1a,
0x6ca6, 0x7c87, 0x4ce4, 0x5cc5, 0x2c22, 0x3c03, 0xc60, 0x1c41,
0xedae, 0xfd8f, 0xcdec, 0xddcd, 0xad2a, 0xbd0b, 0x8d68, 0x9d49,
0x7e97, 0x6eb6, 0x5ed5, 0x4ef4, 0x3e13, 0x2e32, 0x1e51, 0xe70,
0xff9f, 0xefbe, 0xdfdd, 0xcffc, 0xbf1b, 0xaf3a, 0x9f59, 0x8f78,
0x9188, 0x81a9, 0xb1ca, 0xa1eb, 0xd10c, 0xc12d, 0xf14e, 0xe16f,
0x1080, 0xa1, 0x30c2, 0x20e3, 0x5004, 0x4025, 0x7046, 0x6067,
0x83b9, 0x9398, 0xa3fb, 0xb3da, 0xc33d, 0xd31c, 0xe37f, 0xf35e,
0x2b1, 0x1290, 0x22f3, 0x32d2, 0x4235, 0x5214, 0x6277, 0x7256,
0xb5ea, 0xa5cb, 0x95a8, 0x8589, 0xf56e, 0xe54f, 0xd52c, 0xc50d,
0x34e2, 0x24c3, 0x14a0, 0x481, 0x7466, 0x6447, 0x5424, 0x4405,
0xa7db, 0xb7fa, 0x8799, 0x97b8, 0xe75f, 0xf77e, 0xc71d, 0xd73c,
0x26d3, 0x36f2, 0x691, 0x16b0, 0x6657, 0x7676, 0x4615, 0x5634,
0xd94c, 0xc96d, 0xf90e, 0xe92f, 0x99c8, 0x89e9, 0xb98a, 0xa9ab,
0x5844, 0x4865, 0x7806, 0x6827, 0x18c0, 0x8e1, 0x3882, 0x28a3,
0xcb7d, 0xdb5c, 0xeb3f, 0xfb1e, 0x8bf9, 0x9bd8, 0xabbb, 0xbb9a,
0x4a75, 0x5a54, 0x6a37, 0x7a16, 0xaf1, 0x1ad0, 0x2ab3, 0x3a92,
0xfd2e, 0xed0f, 0xdd6c, 0xcd4d, 0xbdaa, 0xad8b, 0x9de8, 0x8dc9,
0x7c26, 0x6c07, 0x5c64, 0x4c45, 0x3ca2, 0x2c83, 0x1ce0, 0xcc1,
0xef1f, 0xff3e, 0xcf5d, 0xdf7c, 0xaf9b, 0xbfba, 0x8fd9, 0x9ff8,
0x6e17, 0x7e36, 0x4e55, 0x5e74, 0x2e93, 0x3eb2, 0xed1, 0x1ef0
)
## Perform a 16-bit CRC with CCITT Polynomial 0x1021
#
# @param data A Byte List to checksum
# @param init_val The initial value to calculate the checksum with.
# The default value of 0xffff performs a proper checksum.
# @return Resultant Checksum (16-bits)
#
# @pydoc
def ccitt_checksum(data, init_val=0xFFFF):
"""Perform a 16-bit CRC with CCITT Polynomial 0x1021"""
crc = init_val
for byte in data:
crc = ((crc << 8) & 0xff00) ^ CCIT_CRC_TABLE[((crc >> 8) ^ byte) & 0xFF]
return crc
| ################################################################################
# Checksum
## @defgroup crc CRC Generation
# @ingroup doc
#
# A 16-bit CRC using one of the CCITT polynomials is used to confirm message
# integrity. \n\n
#
# <i>Polynomial:</i> x16+x12+x5+1 (0x1021) \n
#
# <i>Initial value:</i> 0xFFFF \n
#
# <i>Check constant:</i> 0x1D0F \n\n
#
# The calculated CRC of the string '123456789' should be 0x29B1 \n
#
# To confirm CRC implementation, the following process can be used:
# -# Calculate the CRC of any message
# -# XOR it with 0xFFFF (bitwise inversion)
# -# Append it to the original message
# -# Perform the CRC calculation on the extended message
# -# Confirm that the new CRC is equal to the check constant (0x1D0F)
#
# \b Sample \b C \b Code \b for \b table-driven \b CRC \b computation: \n\n
# @code
# /*
# * crc.h
# */
#
# #ifndef __CRC16_H
# #define __CRC16_H
#
# /***----------Table-driven crc function----------***/
# /* Inputs: -size of the character array, */
# /* the CRC of which is being computed */
# /* - the initial value of the register to */
# /* be used in the calculation */
# /* - a pointer to the first element of */
# /* said character array */
# /* Outputs: the crc as an unsigned short int */
# unsigned short int crc16(int size, int init_val, char *data);
#
# #endif
#
# /*
# * crc.c
# */
#
# #include "crc.h"
#
# //CRC lookup table for polynomial 0x1021
# const unsigned short int table[256] =
# {0, 4129, 8258, 12387, 16516, 20645, 24774, 28903, 33032, 37161, 41290,
# 45419, 49548, 53677, 57806, 61935, 4657, 528, 12915, 8786, 21173,
# 17044, 29431, 25302, 37689, 33560, 45947, 41818, 54205, 50076, 62463,
# 58334, 9314, 13379, 1056, 5121, 25830, 29895, 17572, 21637, 42346,
# 46411, 34088, 38153, 58862, 62927, 50604, 54669, 13907, 9842, 5649,
# 1584, 30423, 26358, 22165, 18100, 46939, 42874, 38681, 34616, 63455,
# 59390, 55197, 51132, 18628, 22757, 26758, 30887, 2112, 6241, 10242,
# 14371, 51660, 55789, 59790, 63919, 35144, 39273, 43274, 47403, 23285,
# 19156, 31415, 27286, 6769, 2640, 14899, 10770, 56317, 52188, 64447,
# 60318, 39801, 35672, 47931, 43802, 27814, 31879, 19684, 23749, 11298,
# 15363, 3168, 7233, 60846, 64911, 52716, 56781, 44330, 48395, 36200,
# 40265, 32407, 28342, 24277, 20212, 15891, 11826, 7761, 3696, 65439,
# 61374, 57309, 53244, 48923, 44858, 40793, 36728, 37256, 33193, 45514,
# 41451, 53516, 49453, 61774, 57711, 4224, 161, 12482, 8419, 20484,
# 16421, 28742, 24679, 33721, 37784, 41979, 46042, 49981, 54044, 58239,
# 62302, 689, 4752, 8947, 13010, 16949, 21012, 25207, 29270, 46570,
# 42443, 38312, 34185, 62830, 58703, 54572, 50445, 13538, 9411, 5280,
# 1153, 29798, 25671, 21540, 17413, 42971, 47098, 34713, 38840, 59231,
# 63358, 50973, 55100, 9939, 14066, 1681, 5808, 26199, 30326, 17941,
# 22068, 55628, 51565, 63758, 59695, 39368, 35305, 47498, 43435, 22596,
# 18533, 30726, 26663, 6336, 2273, 14466, 10403, 52093, 56156, 60223,
# 64286, 35833, 39896, 43963, 48026, 19061, 23124, 27191, 31254, 2801,
# 6864, 10931, 14994, 64814, 60687, 56684, 52557, 48554, 44427, 40424,
# 36297, 31782, 27655, 23652, 19525, 15522, 11395, 7392, 3265, 61215,
# 65342, 53085, 57212, 44955, 49082, 36825, 40952, 28183, 32310, 20053,
# 24180, 11923, 16050, 3793, 7920};
#
# /***----------Table-driven crc function----------***/
# /* Inputs: - size of the character array, the CRC */
# /* of which is being computed */
# /* - the initial value of the register to */
# /* be used in the calculation */
# /* - a pointer to the first element of */
# /* said character array */
# /* Outputs: the crc as an unsigned short int */
# unsigned short int crc16(int size, int init_val, char *data)
# {
# unsigned short int crc = (unsigned short int) init_val;
# while(size--) {
# crc = (crc << 8) ^ table[((crc >> 8) ^ *data++) & 0xFFFF];
# }
# return crc;
# }
# @endcode
## Precomputed checksum table. Polynomial 0x1021.
#
# Used for performing a 16-bit CRC with polynomial 0x1021
CCIT_CRC_TABLE = (
0x0, 0x1021, 0x2042, 0x3063, 0x4084, 0x50a5, 0x60c6, 0x70e7,
0x8108, 0x9129, 0xa14a, 0xb16b, 0xc18c, 0xd1ad, 0xe1ce, 0xf1ef,
0x1231, 0x210, 0x3273, 0x2252, 0x52b5, 0x4294, 0x72f7, 0x62d6,
0x9339, 0x8318, 0xb37b, 0xa35a, 0xd3bd, 0xc39c, 0xf3ff, 0xe3de,
0x2462, 0x3443, 0x420, 0x1401, 0x64e6, 0x74c7, 0x44a4, 0x5485,
0xa56a, 0xb54b, 0x8528, 0x9509, 0xe5ee, 0xf5cf, 0xc5ac, 0xd58d,
0x3653, 0x2672, 0x1611, 0x630, 0x76d7, 0x66f6, 0x5695, 0x46b4,
0xb75b, 0xa77a, 0x9719, 0x8738, 0xf7df, 0xe7fe, 0xd79d, 0xc7bc,
0x48c4, 0x58e5, 0x6886, 0x78a7, 0x840, 0x1861, 0x2802, 0x3823,
0xc9cc, 0xd9ed, 0xe98e, 0xf9af, 0x8948, 0x9969, 0xa90a, 0xb92b,
0x5af5, 0x4ad4, 0x7ab7, 0x6a96, 0x1a71, 0xa50, 0x3a33, 0x2a12,
0xdbfd, 0xcbdc, 0xfbbf, 0xeb9e, 0x9b79, 0x8b58, 0xbb3b, 0xab1a,
0x6ca6, 0x7c87, 0x4ce4, 0x5cc5, 0x2c22, 0x3c03, 0xc60, 0x1c41,
0xedae, 0xfd8f, 0xcdec, 0xddcd, 0xad2a, 0xbd0b, 0x8d68, 0x9d49,
0x7e97, 0x6eb6, 0x5ed5, 0x4ef4, 0x3e13, 0x2e32, 0x1e51, 0xe70,
0xff9f, 0xefbe, 0xdfdd, 0xcffc, 0xbf1b, 0xaf3a, 0x9f59, 0x8f78,
0x9188, 0x81a9, 0xb1ca, 0xa1eb, 0xd10c, 0xc12d, 0xf14e, 0xe16f,
0x1080, 0xa1, 0x30c2, 0x20e3, 0x5004, 0x4025, 0x7046, 0x6067,
0x83b9, 0x9398, 0xa3fb, 0xb3da, 0xc33d, 0xd31c, 0xe37f, 0xf35e,
0x2b1, 0x1290, 0x22f3, 0x32d2, 0x4235, 0x5214, 0x6277, 0x7256,
0xb5ea, 0xa5cb, 0x95a8, 0x8589, 0xf56e, 0xe54f, 0xd52c, 0xc50d,
0x34e2, 0x24c3, 0x14a0, 0x481, 0x7466, 0x6447, 0x5424, 0x4405,
0xa7db, 0xb7fa, 0x8799, 0x97b8, 0xe75f, 0xf77e, 0xc71d, 0xd73c,
0x26d3, 0x36f2, 0x691, 0x16b0, 0x6657, 0x7676, 0x4615, 0x5634,
0xd94c, 0xc96d, 0xf90e, 0xe92f, 0x99c8, 0x89e9, 0xb98a, 0xa9ab,
0x5844, 0x4865, 0x7806, 0x6827, 0x18c0, 0x8e1, 0x3882, 0x28a3,
0xcb7d, 0xdb5c, 0xeb3f, 0xfb1e, 0x8bf9, 0x9bd8, 0xabbb, 0xbb9a,
0x4a75, 0x5a54, 0x6a37, 0x7a16, 0xaf1, 0x1ad0, 0x2ab3, 0x3a92,
0xfd2e, 0xed0f, 0xdd6c, 0xcd4d, 0xbdaa, 0xad8b, 0x9de8, 0x8dc9,
0x7c26, 0x6c07, 0x5c64, 0x4c45, 0x3ca2, 0x2c83, 0x1ce0, 0xcc1,
0xef1f, 0xff3e, 0xcf5d, 0xdf7c, 0xaf9b, 0xbfba, 0x8fd9, 0x9ff8,
0x6e17, 0x7e36, 0x4e55, 0x5e74, 0x2e93, 0x3eb2, 0xed1, 0x1ef0
)
## Perform a 16-bit CRC with CCITT Polynomial 0x1021
#
# @param data A Byte List to checksum
# @param init_val The initial value to calculate the checksum with.
# The default value of 0xffff performs a proper checksum.
# @return Resultant Checksum (16-bits)
#
# @pydoc
def ccitt_checksum(data, init_val=0xFFFF):
"""Perform a 16-bit CRC with CCITT Polynomial 0x1021"""
crc = init_val
for byte in data:
crc = ((crc << 8) & 0xff00) ^ CCIT_CRC_TABLE[((crc >> 8) ^ byte) & 0xFF]
return crc
| en | 0.442825 | ################################################################################ # Checksum ## @defgroup crc CRC Generation # @ingroup doc # # A 16-bit CRC using one of the CCITT polynomials is used to confirm message # integrity. \n\n # # <i>Polynomial:</i> x16+x12+x5+1 (0x1021) \n # # <i>Initial value:</i> 0xFFFF \n # # <i>Check constant:</i> 0x1D0F \n\n # # The calculated CRC of the string '123456789' should be 0x29B1 \n # # To confirm CRC implementation, the following process can be used: # -# Calculate the CRC of any message # -# XOR it with 0xFFFF (bitwise inversion) # -# Append it to the original message # -# Perform the CRC calculation on the extended message # -# Confirm that the new CRC is equal to the check constant (0x1D0F) # # \b Sample \b C \b Code \b for \b table-driven \b CRC \b computation: \n\n # @code # /* # * crc.h # */ # # #ifndef __CRC16_H # #define __CRC16_H # # /***----------Table-driven crc function----------***/ # /* Inputs: -size of the character array, */ # /* the CRC of which is being computed */ # /* - the initial value of the register to */ # /* be used in the calculation */ # /* - a pointer to the first element of */ # /* said character array */ # /* Outputs: the crc as an unsigned short int */ # unsigned short int crc16(int size, int init_val, char *data); # # #endif # # /* # * crc.c # */ # # #include "crc.h" # # //CRC lookup table for polynomial 0x1021 # const unsigned short int table[256] = # {0, 4129, 8258, 12387, 16516, 20645, 24774, 28903, 33032, 37161, 41290, # 45419, 49548, 53677, 57806, 61935, 4657, 528, 12915, 8786, 21173, # 17044, 29431, 25302, 37689, 33560, 45947, 41818, 54205, 50076, 62463, # 58334, 9314, 13379, 1056, 5121, 25830, 29895, 17572, 21637, 42346, # 46411, 34088, 38153, 58862, 62927, 50604, 54669, 13907, 9842, 5649, # 1584, 30423, 26358, 22165, 18100, 46939, 42874, 38681, 34616, 63455, # 59390, 55197, 51132, 18628, 22757, 26758, 30887, 2112, 6241, 10242, # 14371, 51660, 55789, 59790, 63919, 35144, 39273, 43274, 47403, 23285, # 19156, 31415, 27286, 6769, 2640, 14899, 10770, 56317, 52188, 64447, # 60318, 39801, 35672, 47931, 43802, 27814, 31879, 19684, 23749, 11298, # 15363, 3168, 7233, 60846, 64911, 52716, 56781, 44330, 48395, 36200, # 40265, 32407, 28342, 24277, 20212, 15891, 11826, 7761, 3696, 65439, # 61374, 57309, 53244, 48923, 44858, 40793, 36728, 37256, 33193, 45514, # 41451, 53516, 49453, 61774, 57711, 4224, 161, 12482, 8419, 20484, # 16421, 28742, 24679, 33721, 37784, 41979, 46042, 49981, 54044, 58239, # 62302, 689, 4752, 8947, 13010, 16949, 21012, 25207, 29270, 46570, # 42443, 38312, 34185, 62830, 58703, 54572, 50445, 13538, 9411, 5280, # 1153, 29798, 25671, 21540, 17413, 42971, 47098, 34713, 38840, 59231, # 63358, 50973, 55100, 9939, 14066, 1681, 5808, 26199, 30326, 17941, # 22068, 55628, 51565, 63758, 59695, 39368, 35305, 47498, 43435, 22596, # 18533, 30726, 26663, 6336, 2273, 14466, 10403, 52093, 56156, 60223, # 64286, 35833, 39896, 43963, 48026, 19061, 23124, 27191, 31254, 2801, # 6864, 10931, 14994, 64814, 60687, 56684, 52557, 48554, 44427, 40424, # 36297, 31782, 27655, 23652, 19525, 15522, 11395, 7392, 3265, 61215, # 65342, 53085, 57212, 44955, 49082, 36825, 40952, 28183, 32310, 20053, # 24180, 11923, 16050, 3793, 7920}; # # /***----------Table-driven crc function----------***/ # /* Inputs: - size of the character array, the CRC */ # /* of which is being computed */ # /* - the initial value of the register to */ # /* be used in the calculation */ # /* - a pointer to the first element of */ # /* said character array */ # /* Outputs: the crc as an unsigned short int */ # unsigned short int crc16(int size, int init_val, char *data) # { # unsigned short int crc = (unsigned short int) init_val; # while(size--) { # crc = (crc << 8) ^ table[((crc >> 8) ^ *data++) & 0xFFFF]; # } # return crc; # } # @endcode ## Precomputed checksum table. Polynomial 0x1021. # # Used for performing a 16-bit CRC with polynomial 0x1021 ## Perform a 16-bit CRC with CCITT Polynomial 0x1021 # # @param data A Byte List to checksum # @param init_val The initial value to calculate the checksum with. # The default value of 0xffff performs a proper checksum. # @return Resultant Checksum (16-bits) # # @pydoc Perform a 16-bit CRC with CCITT Polynomial 0x1021 | 2.107113 | 2 |
gists/.ipynb_checkpoints/change_to_project_root-checkpoint.py | safurrier/data-science-utils | 3 | 6612754 | import ruamel.yaml as yaml
import os
import sys
# Change target_fname to a file in the root dir
# If default target_fname='README.md' works, then simply:
import warnings
import os
def get_file_absolute_path(target_fname: str='README.md', levels_to_check: int=10, verbose=0):
"""Pass a filename that exists in a directory an unknown number of
levels higher
"""
original_wd = os.getcwd()
for x in range(0, levels_to_check):
# If reached the max number of directory levels change to original wd and print message
if x + 1 == levels_to_check:
os.chdir(original_wd)
if verbose:
warnings.warn(f"""\n\nUnable to find directory with file {target_fname} within {levels_to_check} parent directories""")
return
# Check if README exists
#cwd_files =
if os.path.isfile(target_fname):
target_dir = os.getcwd()
if verbose:
print(f'Found target file in {target_dir}')
return target_dir
# If not found move back one directory level
else:
os.chdir('../')
os.chdir(get_fpath_absolute_path())
# Add directory to PATH
path = os.getcwd()
if path not in sys.path:
sys.path.append(path)
| import ruamel.yaml as yaml
import os
import sys
# Change target_fname to a file in the root dir
# If default target_fname='README.md' works, then simply:
import warnings
import os
def get_file_absolute_path(target_fname: str='README.md', levels_to_check: int=10, verbose=0):
"""Pass a filename that exists in a directory an unknown number of
levels higher
"""
original_wd = os.getcwd()
for x in range(0, levels_to_check):
# If reached the max number of directory levels change to original wd and print message
if x + 1 == levels_to_check:
os.chdir(original_wd)
if verbose:
warnings.warn(f"""\n\nUnable to find directory with file {target_fname} within {levels_to_check} parent directories""")
return
# Check if README exists
#cwd_files =
if os.path.isfile(target_fname):
target_dir = os.getcwd()
if verbose:
print(f'Found target file in {target_dir}')
return target_dir
# If not found move back one directory level
else:
os.chdir('../')
os.chdir(get_fpath_absolute_path())
# Add directory to PATH
path = os.getcwd()
if path not in sys.path:
sys.path.append(path)
| en | 0.675551 | # Change target_fname to a file in the root dir # If default target_fname='README.md' works, then simply: Pass a filename that exists in a directory an unknown number of levels higher # If reached the max number of directory levels change to original wd and print message \n\nUnable to find directory with file {target_fname} within {levels_to_check} parent directories # Check if README exists #cwd_files = # If not found move back one directory level # Add directory to PATH | 2.641958 | 3 |
views/__init__.py | maipatana/flask-restful-basic | 0 | 6612755 | <reponame>maipatana/flask-restful-basic
from flask import request, jsonify, g
from flask_restful import Resource
from models import User
from permissions.auth import auth
from .port_view import PortViewSet, PortsViewSet
from .user_view import UserViewSet, UsersViewSet
## ------------------------ Authentication and Token ------------------------ ##
class RefreshToken(Resource):
def post(self):
refresh_token = request.headers.get('Authorization')
if refresh_token:
user = User.verify_refresh_token(refresh_token.split(' ')[1])
if user:
g.user = user
access_token = g.user.generate_auth_token()
refresh_token = g.user.generate_refresh_token()
return jsonify({ 'access_token': access_token.decode('ascii'),
'refresh_token': refresh_token.decode('ascii'),
'username': g.user.username })
return None
class AuthforToken(Resource):
@auth.login_required
def post(self):
access_token = g.user.generate_auth_token()
refresh_token = g.user.generate_refresh_token()
return jsonify({ 'access_token': access_token.decode('ascii'),
'refresh_token': refresh_token.decode('ascii'),
'username': g.user.username })
| from flask import request, jsonify, g
from flask_restful import Resource
from models import User
from permissions.auth import auth
from .port_view import PortViewSet, PortsViewSet
from .user_view import UserViewSet, UsersViewSet
## ------------------------ Authentication and Token ------------------------ ##
class RefreshToken(Resource):
def post(self):
refresh_token = request.headers.get('Authorization')
if refresh_token:
user = User.verify_refresh_token(refresh_token.split(' ')[1])
if user:
g.user = user
access_token = g.user.generate_auth_token()
refresh_token = g.user.generate_refresh_token()
return jsonify({ 'access_token': access_token.decode('ascii'),
'refresh_token': refresh_token.decode('ascii'),
'username': g.user.username })
return None
class AuthforToken(Resource):
@auth.login_required
def post(self):
access_token = g.user.generate_auth_token()
refresh_token = g.user.generate_refresh_token()
return jsonify({ 'access_token': access_token.decode('ascii'),
'refresh_token': refresh_token.decode('ascii'),
'username': g.user.username }) | en | 0.230191 | ## ------------------------ Authentication and Token ------------------------ ## | 2.387087 | 2 |
octopus/api/disassembler.py | ZarvisD/octopus | 2 | 6612756 | import binascii
class Disassembler(object):
def __init__(self, bytecode, asm):
self.bytecode = bytecode
self.instructions = list()
self.reverse_instructions = dict()
self.asm = asm
def disassemble_opcode(self, bytecode, offset=0):
'''
TODO
'''
raise NotImplementedError
def disassemble(self, bytecode=None, offset=0, r_format='list'):
'''
TODO
'''
self.bytecode = bytecode if bytecode else self.bytecode
# convert hex to bytes
if str(self.bytecode).startswith('0x'):
self.bytecode = str(self.bytecode)[2:]
if isinstance(self.bytecode, str):
self.bytecode = binascii.unhexlify(self.bytecode)
while offset < len(self.bytecode):
instr = self.disassemble_opcode(self.bytecode[offset:], offset)
offset += instr.size
self.instructions.append(instr)
# fill reverse instructions
self.reverse_instructions = {k: v for k, v in
enumerate(self.instructions)}
# return instructions
if r_format == 'list':
return self.instructions
elif r_format == 'text':
return '\n'.join(map(str, self.instructions))
elif r_format == 'reverse':
return self.reverse_instructions
def disassemble_contract(self, contract):
'''
TODO
'''
self.instructions = list()
self.reverse_instructions = dict()
self.bytecode = contract.bytecode
self.disassemble(self.bytecode)
| import binascii
class Disassembler(object):
def __init__(self, bytecode, asm):
self.bytecode = bytecode
self.instructions = list()
self.reverse_instructions = dict()
self.asm = asm
def disassemble_opcode(self, bytecode, offset=0):
'''
TODO
'''
raise NotImplementedError
def disassemble(self, bytecode=None, offset=0, r_format='list'):
'''
TODO
'''
self.bytecode = bytecode if bytecode else self.bytecode
# convert hex to bytes
if str(self.bytecode).startswith('0x'):
self.bytecode = str(self.bytecode)[2:]
if isinstance(self.bytecode, str):
self.bytecode = binascii.unhexlify(self.bytecode)
while offset < len(self.bytecode):
instr = self.disassemble_opcode(self.bytecode[offset:], offset)
offset += instr.size
self.instructions.append(instr)
# fill reverse instructions
self.reverse_instructions = {k: v for k, v in
enumerate(self.instructions)}
# return instructions
if r_format == 'list':
return self.instructions
elif r_format == 'text':
return '\n'.join(map(str, self.instructions))
elif r_format == 'reverse':
return self.reverse_instructions
def disassemble_contract(self, contract):
'''
TODO
'''
self.instructions = list()
self.reverse_instructions = dict()
self.bytecode = contract.bytecode
self.disassemble(self.bytecode)
| en | 0.344509 | TODO TODO # convert hex to bytes # fill reverse instructions # return instructions TODO | 3.553218 | 4 |
syfertext/encoders/bert_encoder.py | Dat-Boi-Arjun/SyferText | 0 | 6612757 | from typing import Dict, List
from transformers import BertTokenizer
class BERTEncoder:
def __init__(self):
self.tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
def __call__(self, text:List) -> Dict:
inputs = self.tokenizer(text)
return {"token_ids": inputs["input_ids"]} | from typing import Dict, List
from transformers import BertTokenizer
class BERTEncoder:
def __init__(self):
self.tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
def __call__(self, text:List) -> Dict:
inputs = self.tokenizer(text)
return {"token_ids": inputs["input_ids"]} | none | 1 | 2.79092 | 3 | |
calib/__init__.py | Algomorph/LevelSetFusion-Python | 8 | 6612758 | __all__ = ["utils", "geom", "io", "video", "camera", "app", "app_synced", "app_unsynced"]
| __all__ = ["utils", "geom", "io", "video", "camera", "app", "app_synced", "app_unsynced"]
| none | 1 | 1.027045 | 1 | |
stats/pitching.py | KevinElevin/Python-Baseball | 0 | 6612759 | <gh_stars>0
from data import games
import pandas as pd
import matplotlib.pyplot as plt
import sys
import os
sys.path.append(os.path.abspath("/d/git/Python-Baseball/stats"))
plays = games[games['type'] == 'play']
strike_outs = plays[plays['event'].str.contains('K')]
strike_outs = strike_outs.groupby(['year', 'game_id']).size()
strike_outs = strike_outs.reset_index(name='strike_outs')
strike_outs = strike_outs.loc[:, [
'year', 'strike_outs']].apply(pd.to_numeric)
strike_outs.plot(kind='scatter', x='year',
y='strike_outs').legend('Strike Outs')
plt.show()
| from data import games
import pandas as pd
import matplotlib.pyplot as plt
import sys
import os
sys.path.append(os.path.abspath("/d/git/Python-Baseball/stats"))
plays = games[games['type'] == 'play']
strike_outs = plays[plays['event'].str.contains('K')]
strike_outs = strike_outs.groupby(['year', 'game_id']).size()
strike_outs = strike_outs.reset_index(name='strike_outs')
strike_outs = strike_outs.loc[:, [
'year', 'strike_outs']].apply(pd.to_numeric)
strike_outs.plot(kind='scatter', x='year',
y='strike_outs').legend('Strike Outs')
plt.show() | none | 1 | 3.016946 | 3 | |
work/Clashes/Old work/wtest/run1 (1).py | youdar/work | 0 | 6612760 | from libtbx import easy_pickle
from mmtbx import monomer_library
import mmtbx.monomer_library.server
import mmtbx.monomer_library.pdb_interpretation
from scitbx.array_family import flex
pdb_str0="""\n
CRYST1 10.000 10.000 10.000 90.00 90.00 90.00 P 21 21 21 4
ATOM 1 N LYS 1 5.000 5.000 5.000 1.00 20.00 N
ATOM 1 N LYS 2 6.000 5.000 5.000 1.00 20.00 N
ATOM 1 N LYS 4 5.000 5.500 5.500 1.00 20.00 N
TER
END
"""
pdb_str1="""\n
CRYST1 10.000 10.000 10.000 90.00 90.00 90.00 P 21 21 21 4
ATOM 1 N LYS 1 5.000 5.000 5.000 1.00 20.00 N
ATOM 1 N LYS 2 5.100 5.000 5.000 1.00 20.00 N
ATOM 1 N LYS 4 5.000 5.500 5.500 1.00 20.00 N
TER
END
"""
def exercise(pdb_str, i):
print "-"*79
of = open("m%s.pdb"%str(i), "w")
print >> of, pdb_str
of.close()
mon_lib_srv = monomer_library.server.server()
ener_lib = monomer_library.server.ener_lib()
processed_pdb_file = monomer_library.pdb_interpretation.process(
mon_lib_srv = mon_lib_srv,
ener_lib = ener_lib,
raw_records = pdb_str,
force_symmetry = True)
geometry = processed_pdb_file.geometry_restraints_manager(
show_energies = False,
plain_pairs_radius = 5.0)
xrs = processed_pdb_file.xray_structure()
assert xrs is not None
# tst 1
obj = geometry.get_nonbonded_clashscore(
sites_cart = xrs.sites_cart(),
site_labels = xrs.scatterers().extract_labels(),
hd_sel = xrs.hd_selection())
print obj.nb_clashscore_all_clashes
print obj.nb_clashscore_due_to_sym_op
print obj.nb_clashscore_without_sym_op
print
# tst 2
sel = flex.bool([True, True, False])
xrs = xrs.select(sel)
geometry = geometry.select(selection=sel)
obj = geometry.get_nonbonded_clashscore(
sites_cart = xrs.sites_cart(),
site_labels = xrs.scatterers().extract_labels(),
hd_sel = xrs.hd_selection())
print obj.nb_clashscore_all_clashes
print obj.nb_clashscore_due_to_sym_op
print obj.nb_clashscore_without_sym_op
if (__name__ == "__main__"):
for i, pdb_str in enumerate([pdb_str0, pdb_str1]):
exercise(pdb_str=pdb_str, i=i)
| from libtbx import easy_pickle
from mmtbx import monomer_library
import mmtbx.monomer_library.server
import mmtbx.monomer_library.pdb_interpretation
from scitbx.array_family import flex
pdb_str0="""\n
CRYST1 10.000 10.000 10.000 90.00 90.00 90.00 P 21 21 21 4
ATOM 1 N LYS 1 5.000 5.000 5.000 1.00 20.00 N
ATOM 1 N LYS 2 6.000 5.000 5.000 1.00 20.00 N
ATOM 1 N LYS 4 5.000 5.500 5.500 1.00 20.00 N
TER
END
"""
pdb_str1="""\n
CRYST1 10.000 10.000 10.000 90.00 90.00 90.00 P 21 21 21 4
ATOM 1 N LYS 1 5.000 5.000 5.000 1.00 20.00 N
ATOM 1 N LYS 2 5.100 5.000 5.000 1.00 20.00 N
ATOM 1 N LYS 4 5.000 5.500 5.500 1.00 20.00 N
TER
END
"""
def exercise(pdb_str, i):
print "-"*79
of = open("m%s.pdb"%str(i), "w")
print >> of, pdb_str
of.close()
mon_lib_srv = monomer_library.server.server()
ener_lib = monomer_library.server.ener_lib()
processed_pdb_file = monomer_library.pdb_interpretation.process(
mon_lib_srv = mon_lib_srv,
ener_lib = ener_lib,
raw_records = pdb_str,
force_symmetry = True)
geometry = processed_pdb_file.geometry_restraints_manager(
show_energies = False,
plain_pairs_radius = 5.0)
xrs = processed_pdb_file.xray_structure()
assert xrs is not None
# tst 1
obj = geometry.get_nonbonded_clashscore(
sites_cart = xrs.sites_cart(),
site_labels = xrs.scatterers().extract_labels(),
hd_sel = xrs.hd_selection())
print obj.nb_clashscore_all_clashes
print obj.nb_clashscore_due_to_sym_op
print obj.nb_clashscore_without_sym_op
print
# tst 2
sel = flex.bool([True, True, False])
xrs = xrs.select(sel)
geometry = geometry.select(selection=sel)
obj = geometry.get_nonbonded_clashscore(
sites_cart = xrs.sites_cart(),
site_labels = xrs.scatterers().extract_labels(),
hd_sel = xrs.hd_selection())
print obj.nb_clashscore_all_clashes
print obj.nb_clashscore_due_to_sym_op
print obj.nb_clashscore_without_sym_op
if (__name__ == "__main__"):
for i, pdb_str in enumerate([pdb_str0, pdb_str1]):
exercise(pdb_str=pdb_str, i=i)
| en | 0.078334 | \n CRYST1 10.000 10.000 10.000 90.00 90.00 90.00 P 21 21 21 4 ATOM 1 N LYS 1 5.000 5.000 5.000 1.00 20.00 N ATOM 1 N LYS 2 6.000 5.000 5.000 1.00 20.00 N ATOM 1 N LYS 4 5.000 5.500 5.500 1.00 20.00 N TER END \n CRYST1 10.000 10.000 10.000 90.00 90.00 90.00 P 21 21 21 4 ATOM 1 N LYS 1 5.000 5.000 5.000 1.00 20.00 N ATOM 1 N LYS 2 5.100 5.000 5.000 1.00 20.00 N ATOM 1 N LYS 4 5.000 5.500 5.500 1.00 20.00 N TER END # tst 1 # tst 2 | 2.221914 | 2 |
tools/Vitis-AI-Optimizer/vai_p_tensorflow/fashion_mnist/export_inference_graph.py | hito0512/Vitis-AI | 1 | 6612761 | # Copyright 2021 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
from tensorflow.keras import backend as K
from tensorflow.python.framework import graph_util
from net import build_model
tf.app.flags.DEFINE_string('output_nodes', '', 'Output nodes of the given graph.')
tf.app.flags.DEFINE_string('graph_filename', '', 'Filename of the graph are saved to.')
tf.app.flags.DEFINE_string('save_dir', './', 'Directory where the graph file are saved to.')
FLAGS = tf.app.flags.FLAGS
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
tf.keras.backend.set_learning_phase(0)
model = build_model()
model.compile(loss=tf.keras.losses.SparseCategoricalCrossentropy())
graph_def = K.get_session().graph.as_graph_def()
graph_def = graph_util.extract_sub_graph(graph_def, [FLAGS.output_nodes])
tf.train.write_graph(graph_def,
FLAGS.save_dir,
FLAGS.graph_filename,
as_text=True)
print("Finish export inference graph: {}".format(FLAGS.save_dir))
if __name__ == '__main__':
tf.app.run()
| # Copyright 2021 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
from tensorflow.keras import backend as K
from tensorflow.python.framework import graph_util
from net import build_model
tf.app.flags.DEFINE_string('output_nodes', '', 'Output nodes of the given graph.')
tf.app.flags.DEFINE_string('graph_filename', '', 'Filename of the graph are saved to.')
tf.app.flags.DEFINE_string('save_dir', './', 'Directory where the graph file are saved to.')
FLAGS = tf.app.flags.FLAGS
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
tf.keras.backend.set_learning_phase(0)
model = build_model()
model.compile(loss=tf.keras.losses.SparseCategoricalCrossentropy())
graph_def = K.get_session().graph.as_graph_def()
graph_def = graph_util.extract_sub_graph(graph_def, [FLAGS.output_nodes])
tf.train.write_graph(graph_def,
FLAGS.save_dir,
FLAGS.graph_filename,
as_text=True)
print("Finish export inference graph: {}".format(FLAGS.save_dir))
if __name__ == '__main__':
tf.app.run()
| en | 0.847874 | # Copyright 2021 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. | 2.232044 | 2 |
package/PartSegCore/segmentation/noise_filtering.py | monotropauniflora/PartSeg | 0 | 6612762 | import typing
from abc import ABC
from enum import Enum
import numpy as np
from ..algorithm_describe_base import AlgorithmDescribeBase, AlgorithmProperty, Register
from ..class_generator import enum_register
from ..image_operations import gaussian, median
from .algorithm_base import calculate_operation_radius as _calculate_operation_radius
class DimensionType(Enum):
Layer = 1
Stack = 2
def __str__(self):
return self.name.replace("_", " ")
try:
# noinspection PyUnresolvedReferences,PyUnboundLocalVariable
reloading
except NameError:
reloading = False # means the module is being imported
enum_register.register_class(DimensionType, old_name="GaussType")
class NoiseFilteringBase(AlgorithmDescribeBase, ABC):
"""Base class for noise filtering operations"""
@classmethod
def noise_filter(cls, channel: np.ndarray, spacing: typing.Iterable[float], arguments: dict) -> np.ndarray:
"""
This function need be overloaded in implementation
:param channel: single channel ad 2d or 3d array
:param spacing: image spacing
:param arguments: additional arguments defined by :py:meth:`get_fields`
:return: channel array with removed noise
"""
raise NotImplementedError()
class NoneNoiseFiltering(NoiseFilteringBase):
@classmethod
def get_name(cls):
return "None"
@classmethod
def get_fields(cls):
return []
@classmethod
def noise_filter(cls, channel: np.ndarray, spacing: typing.Iterable[float], arguments: dict):
return channel
class GaussNoiseFiltering(NoiseFilteringBase):
@classmethod
def get_name(cls):
return "Gauss"
@classmethod
def get_fields(cls):
return [
AlgorithmProperty("dimension_type", "Gauss type", DimensionType.Layer),
AlgorithmProperty("radius", "Gauss radius", 1.0, property_type=float),
]
@classmethod
def noise_filter(cls, channel: np.ndarray, spacing: typing.Iterable[float], arguments: dict):
gauss_radius = calculate_operation_radius(arguments["radius"], spacing, arguments["dimension_type"])
layer = arguments["dimension_type"] == DimensionType.Layer
return gaussian(channel, gauss_radius, layer=layer)
def calculate_operation_radius(radius, spacing, gauss_type):
res = _calculate_operation_radius(radius, spacing, gauss_type)
if res == radius:
return [radius for _ in spacing]
return res
class MedianNoiseFiltering(NoiseFilteringBase):
@classmethod
def get_name(cls):
return "Median"
@classmethod
def get_fields(cls):
return [
AlgorithmProperty("dimension_type", "Median type", DimensionType.Layer),
AlgorithmProperty("radius", "Median radius", 1, property_type=int),
]
@classmethod
def noise_filter(cls, channel: np.ndarray, spacing: typing.Iterable[float], arguments: dict):
gauss_radius = calculate_operation_radius(arguments["radius"], spacing, arguments["dimension_type"])
layer = arguments["dimension_type"] == DimensionType.Layer
gauss_radius = [int(x) for x in gauss_radius]
return median(channel, gauss_radius, layer=layer)
noise_filtering_dict = Register(
NoneNoiseFiltering, GaussNoiseFiltering, MedianNoiseFiltering, class_methods=["noise_filter"]
)
| import typing
from abc import ABC
from enum import Enum
import numpy as np
from ..algorithm_describe_base import AlgorithmDescribeBase, AlgorithmProperty, Register
from ..class_generator import enum_register
from ..image_operations import gaussian, median
from .algorithm_base import calculate_operation_radius as _calculate_operation_radius
class DimensionType(Enum):
Layer = 1
Stack = 2
def __str__(self):
return self.name.replace("_", " ")
try:
# noinspection PyUnresolvedReferences,PyUnboundLocalVariable
reloading
except NameError:
reloading = False # means the module is being imported
enum_register.register_class(DimensionType, old_name="GaussType")
class NoiseFilteringBase(AlgorithmDescribeBase, ABC):
"""Base class for noise filtering operations"""
@classmethod
def noise_filter(cls, channel: np.ndarray, spacing: typing.Iterable[float], arguments: dict) -> np.ndarray:
"""
This function need be overloaded in implementation
:param channel: single channel ad 2d or 3d array
:param spacing: image spacing
:param arguments: additional arguments defined by :py:meth:`get_fields`
:return: channel array with removed noise
"""
raise NotImplementedError()
class NoneNoiseFiltering(NoiseFilteringBase):
@classmethod
def get_name(cls):
return "None"
@classmethod
def get_fields(cls):
return []
@classmethod
def noise_filter(cls, channel: np.ndarray, spacing: typing.Iterable[float], arguments: dict):
return channel
class GaussNoiseFiltering(NoiseFilteringBase):
@classmethod
def get_name(cls):
return "Gauss"
@classmethod
def get_fields(cls):
return [
AlgorithmProperty("dimension_type", "Gauss type", DimensionType.Layer),
AlgorithmProperty("radius", "Gauss radius", 1.0, property_type=float),
]
@classmethod
def noise_filter(cls, channel: np.ndarray, spacing: typing.Iterable[float], arguments: dict):
gauss_radius = calculate_operation_radius(arguments["radius"], spacing, arguments["dimension_type"])
layer = arguments["dimension_type"] == DimensionType.Layer
return gaussian(channel, gauss_radius, layer=layer)
def calculate_operation_radius(radius, spacing, gauss_type):
res = _calculate_operation_radius(radius, spacing, gauss_type)
if res == radius:
return [radius for _ in spacing]
return res
class MedianNoiseFiltering(NoiseFilteringBase):
@classmethod
def get_name(cls):
return "Median"
@classmethod
def get_fields(cls):
return [
AlgorithmProperty("dimension_type", "Median type", DimensionType.Layer),
AlgorithmProperty("radius", "Median radius", 1, property_type=int),
]
@classmethod
def noise_filter(cls, channel: np.ndarray, spacing: typing.Iterable[float], arguments: dict):
gauss_radius = calculate_operation_radius(arguments["radius"], spacing, arguments["dimension_type"])
layer = arguments["dimension_type"] == DimensionType.Layer
gauss_radius = [int(x) for x in gauss_radius]
return median(channel, gauss_radius, layer=layer)
noise_filtering_dict = Register(
NoneNoiseFiltering, GaussNoiseFiltering, MedianNoiseFiltering, class_methods=["noise_filter"]
)
| en | 0.701274 | # noinspection PyUnresolvedReferences,PyUnboundLocalVariable # means the module is being imported Base class for noise filtering operations This function need be overloaded in implementation :param channel: single channel ad 2d or 3d array :param spacing: image spacing :param arguments: additional arguments defined by :py:meth:`get_fields` :return: channel array with removed noise | 2.43749 | 2 |
src/ocgis/test/fragments/test_interpreter.py | doutriaux1/ocgis | 1 | 6612763 | <reponame>doutriaux1/ocgis<gh_stars>1-10
import unittest
from ocg.api.interp.interpreter import Interpreter
from ocg.test.misc import gen_descriptor_classes, pause_test
class TestInterpreter(unittest.TestCase):
@pause_test
def test_check(self):
for desc in gen_descriptor_classes():
interp = Interpreter.get_interpreter(desc)
interp.check()
def test_execute(self):
for desc in gen_descriptor_classes(niter=1):
interp = Interpreter.get_interpreter(desc)
interp.execute()
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | import unittest
from ocg.api.interp.interpreter import Interpreter
from ocg.test.misc import gen_descriptor_classes, pause_test
class TestInterpreter(unittest.TestCase):
@pause_test
def test_check(self):
for desc in gen_descriptor_classes():
interp = Interpreter.get_interpreter(desc)
interp.check()
def test_execute(self):
for desc in gen_descriptor_classes(niter=1):
interp = Interpreter.get_interpreter(desc)
interp.execute()
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | en | 0.163976 | #import sys;sys.argv = ['', 'Test.testName'] | 2.419735 | 2 |
bin/install.py | elihuvillarausvov4y/a9tjeremyb | 2 | 6612764 | <gh_stars>1-10
# coding:utf-8
import os
os.chdir('..')
if not os.path.exists('../wetool'):
print('wetool repository not exists, please clone it from gitee or github')
exit()
# 拉取代码
print(os.popen('git pull').read())
# 安装support
os.chdir('./wetool-plugin-support')
print('install wetool-plugin-support...\r\n')
print(os.popen('mvn clean install').read())
# 切至wetool拉取代码,注释掉maven打包所有依赖的插件
os.chdir('../../wetool')
print(os.popen('git pull').read())
fr = open('./pom.xml', 'r', encoding='utf-8')
content = fr.read()
fr.close()
fw = open('./pom.xml', 'w', encoding='utf-8')
fw.write(content.replace('<plugin>', '<!--', 1).replace('</plugin>', '-->', 1))
fw.close()
# 打包wetool,恢复pom文件
print('install wetool...\r\n')
print(os.popen('mvn clean install').read())
fw = open('./pom.xml', 'w', encoding='utf-8')
fw.write(content)
fw.close()
# 打包test
print('install wetool-plugin-test...\r\n')
os.chdir('../wetool-plugin/wetool-plugin-test')
print(os.popen('mvn clean install').read())
| # coding:utf-8
import os
os.chdir('..')
if not os.path.exists('../wetool'):
print('wetool repository not exists, please clone it from gitee or github')
exit()
# 拉取代码
print(os.popen('git pull').read())
# 安装support
os.chdir('./wetool-plugin-support')
print('install wetool-plugin-support...\r\n')
print(os.popen('mvn clean install').read())
# 切至wetool拉取代码,注释掉maven打包所有依赖的插件
os.chdir('../../wetool')
print(os.popen('git pull').read())
fr = open('./pom.xml', 'r', encoding='utf-8')
content = fr.read()
fr.close()
fw = open('./pom.xml', 'w', encoding='utf-8')
fw.write(content.replace('<plugin>', '<!--', 1).replace('</plugin>', '-->', 1))
fw.close()
# 打包wetool,恢复pom文件
print('install wetool...\r\n')
print(os.popen('mvn clean install').read())
fw = open('./pom.xml', 'w', encoding='utf-8')
fw.write(content)
fw.close()
# 打包test
print('install wetool-plugin-test...\r\n')
os.chdir('../wetool-plugin/wetool-plugin-test')
print(os.popen('mvn clean install').read()) | zh | 0.846337 | # coding:utf-8 # 拉取代码 # 安装support # 切至wetool拉取代码,注释掉maven打包所有依赖的插件 # 打包wetool,恢复pom文件 # 打包test | 2.242348 | 2 |
Chapter04/defaultdict_age_groups.py | PacktPublishing/Secret-Recipes-of-the-Python-Ninja | 13 | 6612765 | In [8]: from collections import defaultdict
In [9]: age_groups = defaultdict(list)
In [10]: for person in people:
...: age_groups[person.age].append(person)
...:
In [11]: for k in age_groups:
...: print(k, age_groups[k])
...:
40 [40, 40]
18 [18, 18, 18]
42 [42]
25 [25]
23 [23]
80 [80]
67 [67]
| In [8]: from collections import defaultdict
In [9]: age_groups = defaultdict(list)
In [10]: for person in people:
...: age_groups[person.age].append(person)
...:
In [11]: for k in age_groups:
...: print(k, age_groups[k])
...:
40 [40, 40]
18 [18, 18, 18]
42 [42]
25 [25]
23 [23]
80 [80]
67 [67]
| none | 1 | 3.411335 | 3 | |
torchreid/models/resnet.py | fremigereau/MTDA_KD_REID | 0 | 6612766 | <filename>torchreid/models/resnet.py
"""
Code source: https://github.com/pytorch/vision
"""
from __future__ import division, absolute_import
import copy
import torch
import torch.utils.model_zoo as model_zoo
import torch.nn.functional as F
from torch.nn import init
from torch import nn
import math
from scipy.stats import norm
__all__ = ['resnet18','resnet34','resnet50','resnet101','resnet152']
model_urls = {'resnet18':'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34':'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50':'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101':'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152':'https://download.pytorch.org/models/resnet152-b121ed2d.pth'
}
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
"""3x3 convolution with padding"""
return nn.Conv2d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=dilation,
groups=groups,
bias=False,
dilation=dilation
)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(
in_planes, out_planes, kernel_size=1, stride=stride, bias=False
)
class BasicBlock(nn.Module):
expansion = 1
def __init__(
self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
norm_layer=None
):
super(BasicBlock, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
if groups != 1 or base_width != 64:
raise ValueError(
'BasicBlock only supports groups=1 and base_width=64'
)
if dilation > 1:
raise NotImplementedError(
"Dilation > 1 not supported in BasicBlock"
)
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = norm_layer(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = norm_layer(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
x = F.relu(x)
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
# out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(
self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
norm_layer=None
):
super(Bottleneck, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
width = int(planes * (base_width/64.)) * groups
# Both self.conv2 and self.downsample layers downsample the input when stride != 1
self.conv1 = conv1x1(inplanes, width)
self.bn1 = norm_layer(width)
self.conv2 = conv3x3(width, width, stride, groups, dilation)
self.bn2 = norm_layer(width)
self.conv3 = conv1x1(width, planes * self.expansion)
self.bn3 = norm_layer(planes * self.expansion)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
x = F.relu(x)
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
# out = self.relu(out)
return out
class ResNet(nn.Module):
"""Residual network.
Reference:
- He et al. Deep Residual Learning for Image Recognition. CVPR 2016.
- Xie et al. Aggregated Residual Transformations for Deep Neural Networks. CVPR 2017.
Public keys:
- ``resnet18``: ResNet18.
- ``resnet34``: ResNet34.
- ``resnet50``: ResNet50.
- ``resnet101``: ResNet101.
- ``resnet152``: ResNet152.
- ``resnext50_32x4d``: ResNeXt50.
- ``resnext101_32x8d``: ResNeXt101.
- ``resnet50_fc512``: ResNet50 + FC.
"""
def __init__(
self,
num_classes,
loss,
block,
layers,
zero_init_residual=False,
groups=1,
fc_dim=2048,
width_per_group=64,
replace_stride_with_dilation=None,
norm_layer=None,
last_stride=2, # was 2 initially
dropout_p=None,
teacher_arch=None,
**kwargs
):
super(ResNet, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
self.loss = loss
self.teacher_arch = teacher_arch
self.margins = None
self.out_dim = 512 * block.expansion
self.feature_dim = self.out_dim
self.fc_dim = fc_dim
self.inplanes = 64
self.dilation = 1
self.expansion = block.expansion
self.multi_head = False
if replace_stride_with_dilation is None:
# each element in the tuple indicates if we should replace
# the 2x2 stride with a dilated convolution instead
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError(
"replace_stride_with_dilation should be None "
"or a 3-element tuple, got {}".
format(replace_stride_with_dilation)
)
self.groups = groups
self.base_width = width_per_group
self.conv1 = nn.Conv2d(
3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False
)
self.bn1 = norm_layer(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(
block,
128,
layers[1],
stride=2,
dilate=replace_stride_with_dilation[0]
)
self.layer3 = self._make_layer(
block,
256,
layers[2],
stride=2,
dilate=replace_stride_with_dilation[1]
)
self.layer4 = self._make_layer(
block,
512,
layers[3],
stride=last_stride,
dilate=replace_stride_with_dilation[2]
)
self.global_avgpool = nn.AdaptiveAvgPool2d((1, 1))
if fc_dim > 0:
self.feat = nn.Linear(self.out_dim, self.feature_dim)
self.feat_bn = nn.BatchNorm1d(self.feature_dim)
init.kaiming_normal_(self.feat.weight, mode='fan_out')
init.constant_(self.feat.bias, 0)
self.feature_dim = fc_dim
self.classifier = nn.Linear(self.feature_dim, num_classes)
self._init_params()
if self.teacher_arch != None:
if self.teacher_arch == "resnet50" or self.teacher_arch == "resnet101" or self.teacher_arch == "resnet152":
teacher_feat_dims = [256, 512, 1024, 2048]
else:
teacher_feat_dims = [64, 128, 256, 512]
student_feat_dims = [64 * self.expansion, 128 * self.expansion, 256 * self.expansion,
512 * self.expansion]
# 1x1 conv to match smaller resnet feature dimension with larger models
if self.loss == 'kd_reid':
self.feat_matcher_list = nn.ModuleList([self._construct_feat_matchers(s, t) for s, t in zip(student_feat_dims, teacher_feat_dims)])
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, blocks, stride=1, dilate=False):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
norm_layer(planes * block.expansion),
)
layers = []
layers.append(
block(
self.inplanes, planes, stride, downsample, self.groups,
self.base_width, previous_dilation, norm_layer
)
)
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer
)
)
return nn.Sequential(*layers)
def _construct_fc_layer(self, fc_dims, input_dim, dropout_p=None):
"""Constructs fully connected layer
Args:
fc_dims (list or tuple): dimensions of fc layers, if None, no fc layers are constructed
input_dim (int): input dimension
dropout_p (float): dropout probability, if None, dropout is unused
"""
if fc_dims is None:
self.feature_dim = input_dim
return None
assert isinstance(
fc_dims, (list, tuple)
), 'fc_dims must be either list or tuple, but got {}'.format(
type(fc_dims)
)
layers = []
for dim in fc_dims:
layers.append(nn.Linear(input_dim, dim))
layers.append(nn.BatchNorm1d(dim))
layers.append(nn.ReLU(inplace=True))
if dropout_p is not None:
layers.append(nn.Dropout(p=dropout_p))
input_dim = dim
self.feature_dim = fc_dims[-1]
return nn.Sequential(*layers)
def _construct_feat_matchers(self, dim_in, dim_out):
C = [nn.Conv2d(dim_in, dim_out, kernel_size=1, stride=1, padding=0, bias=False),
nn.BatchNorm2d(dim_out)]
for m in C:
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
return nn.Sequential(*C)
def _init_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(
m.weight, mode='fan_out', nonlinearity='relu'
)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def get_margin_from_bn(self):
if isinstance(self.layer1[0], Bottleneck):
bn1 = self.layer1[-1].bn3
bn2 = self.layer2[-1].bn3
bn3 = self.layer3[-1].bn3
bn4 = self.layer4[-1].bn3
elif isinstance(self.layer1[0], BasicBlock):
bn1 = self.layer1[-1].bn2
bn2 = self.layer2[-1].bn2
bn3 = self.layer3[-1].bn2
bn4 = self.layer4[-1].bn2
else:
raise KeyError('ResNet unknown block error !!!')
bns = [bn1, bn2, bn3, bn4]
for i, bn in enumerate(bns):
margin = []
std = bn.weight.data
mean = bn.bias.data
for (s, m) in zip(std, mean):
s = abs(s.item())
m = m.item()
if norm.cdf(-m / s) > 0.001:
margin.append(
- s * math.exp(- (m / s) ** 2 / 2) / math.sqrt(2 * math.pi) / norm.cdf(-m / s) + m)
else:
margin.append(-3 * s)
margin = torch.FloatTensor(margin).to(std.device)
self.register_buffer('margin%d' % (i+1), margin.unsqueeze(1).unsqueeze(2).unsqueeze(0).detach())
return margin
def get_channel_num(self):
return [64 * self.expansion, 128 * self.expansion, 256 * self.expansion, 512 * self.expansion]
def forward(self, input, target=None):
x = self.conv1(input)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
f1 = self.layer1(x)
f2 = self.layer2(f1)
f3 = self.layer3(f2)
f4 = self.layer4(f3)
f = F.relu(f4)
v = self.global_avgpool(f)
v = v.view(v.size(0), -1)
if self.fc_dim > 0:
if self.multi_head:
v = self.feat_fc_multi[target](v)
else:
v = self.feat_bn(self.feat(v))
if not self.training:
v = F.normalize(v)
return v
y = self.classifier(v)
if self.loss == 'softmax':
return y
elif self.loss == 'kd_mmd' or self.loss == 'mmd' or self.loss == 'triplet':
return y, v
elif self.loss == 'kd_reid':
# Margin ReLU if teacher, 1x1 Conv for student
if self.teacher_arch == None:
f1 = torch.max(f1, getattr(self, 'margin%d' % (1)))
f1 = f1.view(f1.size(0), -1)
f2 = torch.max(f2, getattr(self, 'margin%d' % (2)))
f2 = f2.view(f2.size(0), -1)
f3 = torch.max(f3, getattr(self, 'margin%d' % (3)))
f3 = f3.view(f3.size(0), -1)
f4 = torch.max(f4, getattr(self, 'margin%d' % (4)))
f4 = f4.view(f4.size(0), -1)
else:
f1 = self.feat_matcher_list[0](f1)
f1 = f1.view(f1.size(0), -1)
f2 = self.feat_matcher_list[1](f2)
f2 = f2.view(f2.size(0), -1)
f3 = self.feat_matcher_list[2](f3)
f3 = f3.view(f3.size(0), -1)
f4 = self.feat_matcher_list[3](f4)
f4 = f4.view(f4.size(0), -1)
return [f1, f2, f3, f4], v, y
elif self.loss == 'feat_kd':
f1 = F.relu(f1)
f1 = f1.view(f1.size(0), -1)
f2 = F.relu(f2)
f2 = f2.view(f2.size(0), -1)
f3 = F.relu(f3)
f3 = f3.view(f3.size(0), -1)
f4 = F.relu(f4)
f4 = f4.view(f4.size(0), -1)
return [f1, f2, f3, f4], v, y
elif self.loss == 'adv_feat_kd':
f1 = F.relu(f1)
f2 = F.relu(f2)
f3 = F.relu(f3)
f4 = F.relu(f4)
return [f1, f2, f3, f4], v, y
else:
raise KeyError("Unsupported loss: {}".format(self.loss))
def convert_2_multi_head(model, multi_head):
model.multi_head = True
model.feat_fc_multi = nn.ModuleList()
for t in range(multi_head):
feat_tmp = copy.deepcopy(model.feat)
feat_bn_tmp = copy.deepcopy(model.feat_bn)
C = [feat_tmp, feat_bn_tmp]
model.feat_fc_multi.append(nn.Sequential(*C))
def init_pretrained_weights(model, model_url):
"""Initializes model with pretrained weights.
Layers that don't match with pretrained layers in name or size are kept unchanged.
"""
pretrain_dict = model_zoo.load_url(model_url)
model_dict = model.state_dict()
pretrain_dict = {
k: v
for k, v in pretrain_dict.items()
if k in model_dict and model_dict[k].size() == v.size()
}
model_dict.update(pretrain_dict)
model.load_state_dict(model_dict)
def resnet18(num_classes, loss='softmax', pretrained=True, teacher_arch=None, fc_dim=2048, **kwargs):
model = ResNet(
num_classes=num_classes,
loss=loss,
block=BasicBlock,
layers=[2, 2, 2, 2],
last_stride=2,
fc_dim=fc_dim,
dropout_p=None,
teacher_arch=teacher_arch,
**kwargs
)
if pretrained:
init_pretrained_weights(model, model_urls['resnet18'])
model.margins = model.get_margin_from_bn()
return model
def resnet34(num_classes, loss='softmax', pretrained=True, teacher_arch=None, fc_dim=2048, **kwargs):
model = ResNet(
num_classes=num_classes,
loss=loss,
block=BasicBlock,
layers=[3, 4, 6, 3],
last_stride=2,
fc_dim=fc_dim,
dropout_p=None,
teacher_arch=teacher_arch,
**kwargs
)
if pretrained:
init_pretrained_weights(model, model_urls['resnet34'])
model.margins = model.get_margin_from_bn()
return model
def resnet50(num_classes, loss='softmax', pretrained=True, teacher_arch=None, fc_dim=2048, **kwargs):
model = ResNet(
num_classes=num_classes,
loss=loss,
block=Bottleneck,
layers=[3, 4, 6, 3],
last_stride=2,
fc_dim=fc_dim,
dropout_p=None,
teacher_arch=teacher_arch,
**kwargs
)
if pretrained:
init_pretrained_weights(model, model_urls['resnet50'])
model.margins = model.get_margin_from_bn()
return model
def resnet101(num_classes, loss='softmax', pretrained=True, teacher_arch=None, fc_dim=2048, **kwargs):
model = ResNet(
num_classes=num_classes,
loss=loss,
block=Bottleneck,
layers=[3, 4, 23, 3],
last_stride=2,
fc_dim=fc_dim,
dropout_p=None,
teacher_arch=teacher_arch,
**kwargs
)
if pretrained:
init_pretrained_weights(model, model_urls['resnet101'])
model.margins = model.get_margin_from_bn()
return model
def resnet152(num_classes, loss='softmax', pretrained=True, teacher_arch=None, fc_dim=2048, **kwargs):
model = ResNet(
num_classes=num_classes,
loss=loss,
block=Bottleneck,
layers=[3, 8, 36, 3],
last_stride=2,
fc_dim=fc_dim,
dropout_p=None,
teacher_arch=teacher_arch,
**kwargs
)
if pretrained:
init_pretrained_weights(model, model_urls['resnet152'])
model.margins = model.get_margin_from_bn()
return model
| <filename>torchreid/models/resnet.py
"""
Code source: https://github.com/pytorch/vision
"""
from __future__ import division, absolute_import
import copy
import torch
import torch.utils.model_zoo as model_zoo
import torch.nn.functional as F
from torch.nn import init
from torch import nn
import math
from scipy.stats import norm
__all__ = ['resnet18','resnet34','resnet50','resnet101','resnet152']
model_urls = {'resnet18':'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34':'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50':'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101':'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152':'https://download.pytorch.org/models/resnet152-b121ed2d.pth'
}
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
"""3x3 convolution with padding"""
return nn.Conv2d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=dilation,
groups=groups,
bias=False,
dilation=dilation
)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(
in_planes, out_planes, kernel_size=1, stride=stride, bias=False
)
class BasicBlock(nn.Module):
expansion = 1
def __init__(
self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
norm_layer=None
):
super(BasicBlock, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
if groups != 1 or base_width != 64:
raise ValueError(
'BasicBlock only supports groups=1 and base_width=64'
)
if dilation > 1:
raise NotImplementedError(
"Dilation > 1 not supported in BasicBlock"
)
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = norm_layer(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = norm_layer(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
x = F.relu(x)
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
# out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(
self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
norm_layer=None
):
super(Bottleneck, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
width = int(planes * (base_width/64.)) * groups
# Both self.conv2 and self.downsample layers downsample the input when stride != 1
self.conv1 = conv1x1(inplanes, width)
self.bn1 = norm_layer(width)
self.conv2 = conv3x3(width, width, stride, groups, dilation)
self.bn2 = norm_layer(width)
self.conv3 = conv1x1(width, planes * self.expansion)
self.bn3 = norm_layer(planes * self.expansion)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
x = F.relu(x)
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
# out = self.relu(out)
return out
class ResNet(nn.Module):
"""Residual network.
Reference:
- He et al. Deep Residual Learning for Image Recognition. CVPR 2016.
- Xie et al. Aggregated Residual Transformations for Deep Neural Networks. CVPR 2017.
Public keys:
- ``resnet18``: ResNet18.
- ``resnet34``: ResNet34.
- ``resnet50``: ResNet50.
- ``resnet101``: ResNet101.
- ``resnet152``: ResNet152.
- ``resnext50_32x4d``: ResNeXt50.
- ``resnext101_32x8d``: ResNeXt101.
- ``resnet50_fc512``: ResNet50 + FC.
"""
def __init__(
self,
num_classes,
loss,
block,
layers,
zero_init_residual=False,
groups=1,
fc_dim=2048,
width_per_group=64,
replace_stride_with_dilation=None,
norm_layer=None,
last_stride=2, # was 2 initially
dropout_p=None,
teacher_arch=None,
**kwargs
):
super(ResNet, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
self.loss = loss
self.teacher_arch = teacher_arch
self.margins = None
self.out_dim = 512 * block.expansion
self.feature_dim = self.out_dim
self.fc_dim = fc_dim
self.inplanes = 64
self.dilation = 1
self.expansion = block.expansion
self.multi_head = False
if replace_stride_with_dilation is None:
# each element in the tuple indicates if we should replace
# the 2x2 stride with a dilated convolution instead
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError(
"replace_stride_with_dilation should be None "
"or a 3-element tuple, got {}".
format(replace_stride_with_dilation)
)
self.groups = groups
self.base_width = width_per_group
self.conv1 = nn.Conv2d(
3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False
)
self.bn1 = norm_layer(self.inplanes)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(
block,
128,
layers[1],
stride=2,
dilate=replace_stride_with_dilation[0]
)
self.layer3 = self._make_layer(
block,
256,
layers[2],
stride=2,
dilate=replace_stride_with_dilation[1]
)
self.layer4 = self._make_layer(
block,
512,
layers[3],
stride=last_stride,
dilate=replace_stride_with_dilation[2]
)
self.global_avgpool = nn.AdaptiveAvgPool2d((1, 1))
if fc_dim > 0:
self.feat = nn.Linear(self.out_dim, self.feature_dim)
self.feat_bn = nn.BatchNorm1d(self.feature_dim)
init.kaiming_normal_(self.feat.weight, mode='fan_out')
init.constant_(self.feat.bias, 0)
self.feature_dim = fc_dim
self.classifier = nn.Linear(self.feature_dim, num_classes)
self._init_params()
if self.teacher_arch != None:
if self.teacher_arch == "resnet50" or self.teacher_arch == "resnet101" or self.teacher_arch == "resnet152":
teacher_feat_dims = [256, 512, 1024, 2048]
else:
teacher_feat_dims = [64, 128, 256, 512]
student_feat_dims = [64 * self.expansion, 128 * self.expansion, 256 * self.expansion,
512 * self.expansion]
# 1x1 conv to match smaller resnet feature dimension with larger models
if self.loss == 'kd_reid':
self.feat_matcher_list = nn.ModuleList([self._construct_feat_matchers(s, t) for s, t in zip(student_feat_dims, teacher_feat_dims)])
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, blocks, stride=1, dilate=False):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
norm_layer(planes * block.expansion),
)
layers = []
layers.append(
block(
self.inplanes, planes, stride, downsample, self.groups,
self.base_width, previous_dilation, norm_layer
)
)
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer
)
)
return nn.Sequential(*layers)
def _construct_fc_layer(self, fc_dims, input_dim, dropout_p=None):
"""Constructs fully connected layer
Args:
fc_dims (list or tuple): dimensions of fc layers, if None, no fc layers are constructed
input_dim (int): input dimension
dropout_p (float): dropout probability, if None, dropout is unused
"""
if fc_dims is None:
self.feature_dim = input_dim
return None
assert isinstance(
fc_dims, (list, tuple)
), 'fc_dims must be either list or tuple, but got {}'.format(
type(fc_dims)
)
layers = []
for dim in fc_dims:
layers.append(nn.Linear(input_dim, dim))
layers.append(nn.BatchNorm1d(dim))
layers.append(nn.ReLU(inplace=True))
if dropout_p is not None:
layers.append(nn.Dropout(p=dropout_p))
input_dim = dim
self.feature_dim = fc_dims[-1]
return nn.Sequential(*layers)
def _construct_feat_matchers(self, dim_in, dim_out):
C = [nn.Conv2d(dim_in, dim_out, kernel_size=1, stride=1, padding=0, bias=False),
nn.BatchNorm2d(dim_out)]
for m in C:
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
return nn.Sequential(*C)
def _init_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(
m.weight, mode='fan_out', nonlinearity='relu'
)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def get_margin_from_bn(self):
if isinstance(self.layer1[0], Bottleneck):
bn1 = self.layer1[-1].bn3
bn2 = self.layer2[-1].bn3
bn3 = self.layer3[-1].bn3
bn4 = self.layer4[-1].bn3
elif isinstance(self.layer1[0], BasicBlock):
bn1 = self.layer1[-1].bn2
bn2 = self.layer2[-1].bn2
bn3 = self.layer3[-1].bn2
bn4 = self.layer4[-1].bn2
else:
raise KeyError('ResNet unknown block error !!!')
bns = [bn1, bn2, bn3, bn4]
for i, bn in enumerate(bns):
margin = []
std = bn.weight.data
mean = bn.bias.data
for (s, m) in zip(std, mean):
s = abs(s.item())
m = m.item()
if norm.cdf(-m / s) > 0.001:
margin.append(
- s * math.exp(- (m / s) ** 2 / 2) / math.sqrt(2 * math.pi) / norm.cdf(-m / s) + m)
else:
margin.append(-3 * s)
margin = torch.FloatTensor(margin).to(std.device)
self.register_buffer('margin%d' % (i+1), margin.unsqueeze(1).unsqueeze(2).unsqueeze(0).detach())
return margin
def get_channel_num(self):
return [64 * self.expansion, 128 * self.expansion, 256 * self.expansion, 512 * self.expansion]
def forward(self, input, target=None):
x = self.conv1(input)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
f1 = self.layer1(x)
f2 = self.layer2(f1)
f3 = self.layer3(f2)
f4 = self.layer4(f3)
f = F.relu(f4)
v = self.global_avgpool(f)
v = v.view(v.size(0), -1)
if self.fc_dim > 0:
if self.multi_head:
v = self.feat_fc_multi[target](v)
else:
v = self.feat_bn(self.feat(v))
if not self.training:
v = F.normalize(v)
return v
y = self.classifier(v)
if self.loss == 'softmax':
return y
elif self.loss == 'kd_mmd' or self.loss == 'mmd' or self.loss == 'triplet':
return y, v
elif self.loss == 'kd_reid':
# Margin ReLU if teacher, 1x1 Conv for student
if self.teacher_arch == None:
f1 = torch.max(f1, getattr(self, 'margin%d' % (1)))
f1 = f1.view(f1.size(0), -1)
f2 = torch.max(f2, getattr(self, 'margin%d' % (2)))
f2 = f2.view(f2.size(0), -1)
f3 = torch.max(f3, getattr(self, 'margin%d' % (3)))
f3 = f3.view(f3.size(0), -1)
f4 = torch.max(f4, getattr(self, 'margin%d' % (4)))
f4 = f4.view(f4.size(0), -1)
else:
f1 = self.feat_matcher_list[0](f1)
f1 = f1.view(f1.size(0), -1)
f2 = self.feat_matcher_list[1](f2)
f2 = f2.view(f2.size(0), -1)
f3 = self.feat_matcher_list[2](f3)
f3 = f3.view(f3.size(0), -1)
f4 = self.feat_matcher_list[3](f4)
f4 = f4.view(f4.size(0), -1)
return [f1, f2, f3, f4], v, y
elif self.loss == 'feat_kd':
f1 = F.relu(f1)
f1 = f1.view(f1.size(0), -1)
f2 = F.relu(f2)
f2 = f2.view(f2.size(0), -1)
f3 = F.relu(f3)
f3 = f3.view(f3.size(0), -1)
f4 = F.relu(f4)
f4 = f4.view(f4.size(0), -1)
return [f1, f2, f3, f4], v, y
elif self.loss == 'adv_feat_kd':
f1 = F.relu(f1)
f2 = F.relu(f2)
f3 = F.relu(f3)
f4 = F.relu(f4)
return [f1, f2, f3, f4], v, y
else:
raise KeyError("Unsupported loss: {}".format(self.loss))
def convert_2_multi_head(model, multi_head):
model.multi_head = True
model.feat_fc_multi = nn.ModuleList()
for t in range(multi_head):
feat_tmp = copy.deepcopy(model.feat)
feat_bn_tmp = copy.deepcopy(model.feat_bn)
C = [feat_tmp, feat_bn_tmp]
model.feat_fc_multi.append(nn.Sequential(*C))
def init_pretrained_weights(model, model_url):
"""Initializes model with pretrained weights.
Layers that don't match with pretrained layers in name or size are kept unchanged.
"""
pretrain_dict = model_zoo.load_url(model_url)
model_dict = model.state_dict()
pretrain_dict = {
k: v
for k, v in pretrain_dict.items()
if k in model_dict and model_dict[k].size() == v.size()
}
model_dict.update(pretrain_dict)
model.load_state_dict(model_dict)
def resnet18(num_classes, loss='softmax', pretrained=True, teacher_arch=None, fc_dim=2048, **kwargs):
model = ResNet(
num_classes=num_classes,
loss=loss,
block=BasicBlock,
layers=[2, 2, 2, 2],
last_stride=2,
fc_dim=fc_dim,
dropout_p=None,
teacher_arch=teacher_arch,
**kwargs
)
if pretrained:
init_pretrained_weights(model, model_urls['resnet18'])
model.margins = model.get_margin_from_bn()
return model
def resnet34(num_classes, loss='softmax', pretrained=True, teacher_arch=None, fc_dim=2048, **kwargs):
model = ResNet(
num_classes=num_classes,
loss=loss,
block=BasicBlock,
layers=[3, 4, 6, 3],
last_stride=2,
fc_dim=fc_dim,
dropout_p=None,
teacher_arch=teacher_arch,
**kwargs
)
if pretrained:
init_pretrained_weights(model, model_urls['resnet34'])
model.margins = model.get_margin_from_bn()
return model
def resnet50(num_classes, loss='softmax', pretrained=True, teacher_arch=None, fc_dim=2048, **kwargs):
model = ResNet(
num_classes=num_classes,
loss=loss,
block=Bottleneck,
layers=[3, 4, 6, 3],
last_stride=2,
fc_dim=fc_dim,
dropout_p=None,
teacher_arch=teacher_arch,
**kwargs
)
if pretrained:
init_pretrained_weights(model, model_urls['resnet50'])
model.margins = model.get_margin_from_bn()
return model
def resnet101(num_classes, loss='softmax', pretrained=True, teacher_arch=None, fc_dim=2048, **kwargs):
model = ResNet(
num_classes=num_classes,
loss=loss,
block=Bottleneck,
layers=[3, 4, 23, 3],
last_stride=2,
fc_dim=fc_dim,
dropout_p=None,
teacher_arch=teacher_arch,
**kwargs
)
if pretrained:
init_pretrained_weights(model, model_urls['resnet101'])
model.margins = model.get_margin_from_bn()
return model
def resnet152(num_classes, loss='softmax', pretrained=True, teacher_arch=None, fc_dim=2048, **kwargs):
model = ResNet(
num_classes=num_classes,
loss=loss,
block=Bottleneck,
layers=[3, 8, 36, 3],
last_stride=2,
fc_dim=fc_dim,
dropout_p=None,
teacher_arch=teacher_arch,
**kwargs
)
if pretrained:
init_pretrained_weights(model, model_urls['resnet152'])
model.margins = model.get_margin_from_bn()
return model
| en | 0.805691 | Code source: https://github.com/pytorch/vision 3x3 convolution with padding 1x1 convolution # Both self.conv1 and self.downsample layers downsample the input when stride != 1 # out = self.relu(out) # Both self.conv2 and self.downsample layers downsample the input when stride != 1 # out = self.relu(out) Residual network. Reference: - He et al. Deep Residual Learning for Image Recognition. CVPR 2016. - Xie et al. Aggregated Residual Transformations for Deep Neural Networks. CVPR 2017. Public keys: - ``resnet18``: ResNet18. - ``resnet34``: ResNet34. - ``resnet50``: ResNet50. - ``resnet101``: ResNet101. - ``resnet152``: ResNet152. - ``resnext50_32x4d``: ResNeXt50. - ``resnext101_32x8d``: ResNeXt101. - ``resnet50_fc512``: ResNet50 + FC. # was 2 initially # each element in the tuple indicates if we should replace # the 2x2 stride with a dilated convolution instead # 1x1 conv to match smaller resnet feature dimension with larger models # Zero-initialize the last BN in each residual branch, # so that the residual branch starts with zeros, and each residual block behaves like an identity. # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677 Constructs fully connected layer Args: fc_dims (list or tuple): dimensions of fc layers, if None, no fc layers are constructed input_dim (int): input dimension dropout_p (float): dropout probability, if None, dropout is unused # Margin ReLU if teacher, 1x1 Conv for student Initializes model with pretrained weights. Layers that don't match with pretrained layers in name or size are kept unchanged. | 2.243026 | 2 |
tests/test_transform.py | BastienCagna/deep_folding | 1 | 6612767 | <filename>tests/test_transform.py
from soma import aims
import os
import glob
from deep_folding.anatomist_tools import transform
def test_transform():
"""Tests if the transformation of one subject gives the expected result
The source and the reference are in the data subdirectory
"""
# Gets the source directory
src_dir = os.path.join(os.getcwd(), 'data/source/unsupervised')
src_dir = os.path.abspath(src_dir)
# Gets the reference directory
ref_dir = os.path.join(os.getcwd(), 'data/reference/transform')
ref_dir = os.path.abspath(ref_dir)
print("ref_dir = " + ref_dir)
# Defines the target directory
tgt_dir = os.path.join(os.getcwd(), 'data/target/transform')
tgt_dir = os.path.abspath(tgt_dir)
# Performs the actual transform
transform.transform_to_spm(src_dir=src_dir, tgt_dir=tgt_dir,
number_subjects=transform._ALL_SUBJECTS)
# takes and reads first target file
tgt_file = os.path.join(tgt_dir,
glob.glob(tgt_dir + '/natif_to_template*.trm')[0])
tgt_transfo = aims.read(tgt_file)
print(tgt_transfo)
# takes and read first reference file
ref_file = os.path.join(ref_dir,
glob.glob(ref_dir + '/natif_to_template*.trm')[0])
ref_transfo = aims.read(ref_file)
print(type(ref_transfo))
print(ref_transfo)
assert tgt_transfo == ref_transfo
| <filename>tests/test_transform.py
from soma import aims
import os
import glob
from deep_folding.anatomist_tools import transform
def test_transform():
"""Tests if the transformation of one subject gives the expected result
The source and the reference are in the data subdirectory
"""
# Gets the source directory
src_dir = os.path.join(os.getcwd(), 'data/source/unsupervised')
src_dir = os.path.abspath(src_dir)
# Gets the reference directory
ref_dir = os.path.join(os.getcwd(), 'data/reference/transform')
ref_dir = os.path.abspath(ref_dir)
print("ref_dir = " + ref_dir)
# Defines the target directory
tgt_dir = os.path.join(os.getcwd(), 'data/target/transform')
tgt_dir = os.path.abspath(tgt_dir)
# Performs the actual transform
transform.transform_to_spm(src_dir=src_dir, tgt_dir=tgt_dir,
number_subjects=transform._ALL_SUBJECTS)
# takes and reads first target file
tgt_file = os.path.join(tgt_dir,
glob.glob(tgt_dir + '/natif_to_template*.trm')[0])
tgt_transfo = aims.read(tgt_file)
print(tgt_transfo)
# takes and read first reference file
ref_file = os.path.join(ref_dir,
glob.glob(ref_dir + '/natif_to_template*.trm')[0])
ref_transfo = aims.read(ref_file)
print(type(ref_transfo))
print(ref_transfo)
assert tgt_transfo == ref_transfo
| en | 0.752561 | Tests if the transformation of one subject gives the expected result The source and the reference are in the data subdirectory # Gets the source directory # Gets the reference directory # Defines the target directory # Performs the actual transform # takes and reads first target file # takes and read first reference file | 2.380456 | 2 |
build/GodotBuilder.py | Cat-Lips/GodotSharp.BuildEngine.WSL2 | 3 | 6612768 | <gh_stars>1-10
import os
import utils
import shutil
from constants import *
from decorators import perf, title, scoped_env
from export_targets import get_current_target, yield_export_targets
class GodotBuilder:
@perf('Total Runtime')
def run(self, args):
self.init(args)
self.build(args)
@perf('Total Init Time')
def init(self, args):
@title(f'Retrieving {utils.repo_name(godot_remote, godot_version)}')
def get_godot():
update_state = utils.get_git(godot_remote, godot_local, godot_version)
if update_state == 'cloned':
utils.apply_patch(f'{patch_path}/godot.diff', godot_local)
elif update_state == 'updated':
mono_glue_generator = utils.find_path(f'{godot_local}/bin/glue.*')
if mono_glue_generator: os.remove(mono_glue_generator)
@title(f'Retrieving {utils.repo_name(godot_build_tools_remote)}')
def get_godot_build_tools():
update_state = utils.get_git(godot_build_tools_remote, godot_build_tools_local)
if update_state == 'cloned':
utils.apply_patch(f'{patch_path}/godot-build-tools.diff', godot_build_tools_local)
elif update_state == 'updated':
print(f'WARNING: {godot_build_tools_remote} has been modified')
print(' - Check if changes require rebuild or config.py version update')
print('Meanwhile, continuing with current build...')
@title(f'Retrieving {utils.repo_name(mono_remote, mono_version)}')
def get_and_patch_mono():
update_state = utils.get_git(mono_remote, mono_local, mono_version, submodules=True)
if update_state == 'cloned':
print('\nApplying Godot Patches...')
utils.execute(f'{godot_build_tools_local}/patch_mono.py --mono-sources {mono_local}')
@title(f'Retrieving {utils.repo_name(emscripten_remote, emscripten_version)}')
def get_and_patch_emscripten():
update_state = utils.get_git(emscripten_remote, emscripten_local)
if update_state == 'cloned':
print('\nApplying Godot Patches...')
utils.execute(f'{godot_build_tools_local}/patch_emscripten.py --mono-sources {mono_local}')
utils.execute(f'./emsdk install {emscripten_version}', emscripten_local)
utils.execute(f'./emsdk activate {emscripten_version}', emscripten_local)
def get_and_build_mxe():
@title(f'Retrieving {utils.repo_name(mxe_remote)}')
def get_mxe():
utils.get_git(mxe_remote, mxe_local)
@title(f'Building mxe')
def build_mxe():
mxe_plugins = 'MXE_PLUGIN_DIRS="plugins/gcc10"'
mxe_targets = 'MXE_TARGETS="x86_64-w64-mingw32 i686-w64-mingw32"'
utils.execute(f'make -j{utils.cores} --jobs={utils.cores} cc {mxe_targets} {mxe_plugins}', mxe_local)
get_mxe()
build_mxe()
mono_required = args.mono
emscripten_required = args.export
godot_build_tools_required = mono_required or emscripten_required
if args.update or not os.path.exists(godot_local): get_godot()
if godot_build_tools_required and (args.update or not os.path.exists(godot_build_tools_local)): get_godot_build_tools()
if emscripten_required and not os.path.exists(emscripten_local): get_and_patch_emscripten()
if mono_required and not os.path.exists(mono_local): get_and_patch_mono()
if mxe_required and not os.path.exists(mxe_root): get_and_build_mxe()
@perf('Total Build Time')
def build(self, args):
def build(target, *, export=False):
mono_arch = target.arch.mono
godot_arch = target.arch.godot
mono_platform = target.platform.mono
godot_platform = target.platform.godot
def mono_configure_dir():
return f'{mono_cfg}/{mono_platform}.{mono_arch}'
def mono_install_dir():
return f'{mono_bin}/{mono_platform}.{mono_arch}'
def mono_prefix_path():
return utils.find_path(f'{mono_install_dir()}/*-{mono_arch}-*')
def mono_glue_generator():
return utils.find_path(f'{godot_local}/bin/glue.{godot_platform}.*')
def godot_build_target():
return utils.find_path(f'{godot_local}/bin/godot.{godot_platform}.*')
@perf(f'Total Mono Build Time ({mono_platform}.{mono_arch})')
def build_mono():
install_dir = mono_install_dir()
if os.path.exists(install_dir): return
configure_dir = mono_configure_dir()
product = 'desktop' if mono_platform in ['windows', 'linux', 'osx'] else mono_platform
if mono_platform == 'windows' and mono_arch == 'x86': product += '-win32'
bcl_cmd = f'python3 {godot_build_tools_local}/bcl.py'
mono_cmd = f'python3 {godot_build_tools_local}/{mono_platform}.py'
install_cmd = f'python3 {godot_build_tools_local}/reference_assemblies.py'
base_args = f'-j{utils.cores} --configure-dir {configure_dir} --install-dir {install_dir} --mono-sources {mono_local}'
if mxe_required: base_args += f' --mxe-prefix {mxe_root}'
mono_args = f'{base_args} --target={mono_arch}'
bcl_args = f'{base_args} --product={product}'
@title(f'Configuring Mono ({mono_platform}.{mono_arch})')
def configure_mono():
utils.execute(f'{mono_cmd} configure {mono_args}')
@title(f'Compiling Mono ({mono_platform}.{mono_arch})')
def compile_mono():
utils.execute(f'{mono_cmd} make {mono_args}')
@title(f'Compiling BCL ({mono_platform}.{mono_arch})')
def compile_bcl():
utils.execute(f'{bcl_cmd} make {bcl_args}')
@title(f'Copying BCL ({mono_platform}.{mono_arch})')
def copy_bcl():
utils.execute(f'{mono_cmd} copy-bcl {mono_args}')
@title(f'Installing Mono ({mono_platform}.{mono_arch})')
def install_mono():
utils.execute(f'{install_cmd} install {base_args}')
configure_mono()
compile_mono()
compile_bcl()
copy_bcl()
install_mono()
@perf(f'Total Mono Glue Generation Time')
def generate_mono_glue():
@title('Generating Mono Glue')
def run_mono_glue_generator():
utils.execute(f'{mono_glue_generator()} --generate-mono-glue {godot_local}/modules/mono/glue --no-window')
os.remove('logs/godot.log')
os.rmdir('logs')
if not mono_glue_generator():
build_godot(mono_glue=True)
run_mono_glue_generator()
@perf(f'Total Godot Build Time ({godot_platform}.{mono_arch})')
def build_godot(*, mono_glue=False):
build_type = 'Mono Glue Generator' if mono_glue else 'Godot'
def platform_args():
return f'platform={godot_platform} {godot_arch}'
def profile_args():
profile_py = export_profile if export else editor_profile
return f' profile={profile_py}' if os.path.exists(profile_py) else ''
def mono_args():
mono_glue_arg = 'mono_glue=no' if mono_glue else 'copy_mono_root=yes'
return f' module_mono_enabled=yes mono_static=yes {mono_glue_arg} mono_prefix={os.path.abspath(mono_prefix_path())}' if args.mono else ''
def export_args():
return ' tools=no target=release debug_symbols=no use_lto=yes' if export else ' target=release_debug'
scons_args = f'{platform_args()}{profile_args()}{mono_args()}{export_args()}'
@scoped_env('PATH', mxe_path if mxe_required else None)
@title(f'Building {build_type} ({godot_platform}.{mono_arch})')
def build():
utils.execute(f'scons -j{utils.cores} --directory {godot_local} {scons_args}')
@perf('(post-build)')
def post_build():
godot_exe = godot_build_target()
if mono_glue:
mono_glue_exe = godot_exe.replace('godot.', 'glue.', 1)
utils.execute(f'mv {godot_exe} {mono_glue_exe}')
elif export:
os.makedirs(export_bin, exist_ok=True)
utils.execute(f'mv {godot_exe} {export_bin}')
else:
target_dir = f'{editor_bin}/{godot_platform}.{mono_arch}'
rollback_dir = f'{target_dir}.rollback'
if os.path.exists(rollback_dir):
print('Removing rollback...')
utils.execute(f'rm -rf {rollback_dir}')
if os.path.exists(target_dir):
print('Creating rollback...')
utils.execute(f'mv {target_dir} {rollback_dir}')
print('Moving build to bin...')
os.makedirs(target_dir)
utils.execute(f'mv {godot_exe} {target_dir}')
if args.mono: utils.execute(f'mv {godot_local}/bin/GodotSharp {target_dir}')
open(f'{target_dir}/_sc_', 'a').close()
editor_data = f'{rollback_dir}/editor_data'
if os.path.exists(editor_data): shutil.copytree(editor_data, f'{target_dir}/editor_data')
build()
post_build()
if args.mono:
build_mono()
generate_mono_glue()
build_godot()
if args.editor:
build(get_current_target())
if args.export:
[build(export_target, export=True) for export_target in yield_export_targets()]
| import os
import utils
import shutil
from constants import *
from decorators import perf, title, scoped_env
from export_targets import get_current_target, yield_export_targets
class GodotBuilder:
@perf('Total Runtime')
def run(self, args):
self.init(args)
self.build(args)
@perf('Total Init Time')
def init(self, args):
@title(f'Retrieving {utils.repo_name(godot_remote, godot_version)}')
def get_godot():
update_state = utils.get_git(godot_remote, godot_local, godot_version)
if update_state == 'cloned':
utils.apply_patch(f'{patch_path}/godot.diff', godot_local)
elif update_state == 'updated':
mono_glue_generator = utils.find_path(f'{godot_local}/bin/glue.*')
if mono_glue_generator: os.remove(mono_glue_generator)
@title(f'Retrieving {utils.repo_name(godot_build_tools_remote)}')
def get_godot_build_tools():
update_state = utils.get_git(godot_build_tools_remote, godot_build_tools_local)
if update_state == 'cloned':
utils.apply_patch(f'{patch_path}/godot-build-tools.diff', godot_build_tools_local)
elif update_state == 'updated':
print(f'WARNING: {godot_build_tools_remote} has been modified')
print(' - Check if changes require rebuild or config.py version update')
print('Meanwhile, continuing with current build...')
@title(f'Retrieving {utils.repo_name(mono_remote, mono_version)}')
def get_and_patch_mono():
update_state = utils.get_git(mono_remote, mono_local, mono_version, submodules=True)
if update_state == 'cloned':
print('\nApplying Godot Patches...')
utils.execute(f'{godot_build_tools_local}/patch_mono.py --mono-sources {mono_local}')
@title(f'Retrieving {utils.repo_name(emscripten_remote, emscripten_version)}')
def get_and_patch_emscripten():
update_state = utils.get_git(emscripten_remote, emscripten_local)
if update_state == 'cloned':
print('\nApplying Godot Patches...')
utils.execute(f'{godot_build_tools_local}/patch_emscripten.py --mono-sources {mono_local}')
utils.execute(f'./emsdk install {emscripten_version}', emscripten_local)
utils.execute(f'./emsdk activate {emscripten_version}', emscripten_local)
def get_and_build_mxe():
@title(f'Retrieving {utils.repo_name(mxe_remote)}')
def get_mxe():
utils.get_git(mxe_remote, mxe_local)
@title(f'Building mxe')
def build_mxe():
mxe_plugins = 'MXE_PLUGIN_DIRS="plugins/gcc10"'
mxe_targets = 'MXE_TARGETS="x86_64-w64-mingw32 i686-w64-mingw32"'
utils.execute(f'make -j{utils.cores} --jobs={utils.cores} cc {mxe_targets} {mxe_plugins}', mxe_local)
get_mxe()
build_mxe()
mono_required = args.mono
emscripten_required = args.export
godot_build_tools_required = mono_required or emscripten_required
if args.update or not os.path.exists(godot_local): get_godot()
if godot_build_tools_required and (args.update or not os.path.exists(godot_build_tools_local)): get_godot_build_tools()
if emscripten_required and not os.path.exists(emscripten_local): get_and_patch_emscripten()
if mono_required and not os.path.exists(mono_local): get_and_patch_mono()
if mxe_required and not os.path.exists(mxe_root): get_and_build_mxe()
@perf('Total Build Time')
def build(self, args):
def build(target, *, export=False):
mono_arch = target.arch.mono
godot_arch = target.arch.godot
mono_platform = target.platform.mono
godot_platform = target.platform.godot
def mono_configure_dir():
return f'{mono_cfg}/{mono_platform}.{mono_arch}'
def mono_install_dir():
return f'{mono_bin}/{mono_platform}.{mono_arch}'
def mono_prefix_path():
return utils.find_path(f'{mono_install_dir()}/*-{mono_arch}-*')
def mono_glue_generator():
return utils.find_path(f'{godot_local}/bin/glue.{godot_platform}.*')
def godot_build_target():
return utils.find_path(f'{godot_local}/bin/godot.{godot_platform}.*')
@perf(f'Total Mono Build Time ({mono_platform}.{mono_arch})')
def build_mono():
install_dir = mono_install_dir()
if os.path.exists(install_dir): return
configure_dir = mono_configure_dir()
product = 'desktop' if mono_platform in ['windows', 'linux', 'osx'] else mono_platform
if mono_platform == 'windows' and mono_arch == 'x86': product += '-win32'
bcl_cmd = f'python3 {godot_build_tools_local}/bcl.py'
mono_cmd = f'python3 {godot_build_tools_local}/{mono_platform}.py'
install_cmd = f'python3 {godot_build_tools_local}/reference_assemblies.py'
base_args = f'-j{utils.cores} --configure-dir {configure_dir} --install-dir {install_dir} --mono-sources {mono_local}'
if mxe_required: base_args += f' --mxe-prefix {mxe_root}'
mono_args = f'{base_args} --target={mono_arch}'
bcl_args = f'{base_args} --product={product}'
@title(f'Configuring Mono ({mono_platform}.{mono_arch})')
def configure_mono():
utils.execute(f'{mono_cmd} configure {mono_args}')
@title(f'Compiling Mono ({mono_platform}.{mono_arch})')
def compile_mono():
utils.execute(f'{mono_cmd} make {mono_args}')
@title(f'Compiling BCL ({mono_platform}.{mono_arch})')
def compile_bcl():
utils.execute(f'{bcl_cmd} make {bcl_args}')
@title(f'Copying BCL ({mono_platform}.{mono_arch})')
def copy_bcl():
utils.execute(f'{mono_cmd} copy-bcl {mono_args}')
@title(f'Installing Mono ({mono_platform}.{mono_arch})')
def install_mono():
utils.execute(f'{install_cmd} install {base_args}')
configure_mono()
compile_mono()
compile_bcl()
copy_bcl()
install_mono()
@perf(f'Total Mono Glue Generation Time')
def generate_mono_glue():
@title('Generating Mono Glue')
def run_mono_glue_generator():
utils.execute(f'{mono_glue_generator()} --generate-mono-glue {godot_local}/modules/mono/glue --no-window')
os.remove('logs/godot.log')
os.rmdir('logs')
if not mono_glue_generator():
build_godot(mono_glue=True)
run_mono_glue_generator()
@perf(f'Total Godot Build Time ({godot_platform}.{mono_arch})')
def build_godot(*, mono_glue=False):
build_type = 'Mono Glue Generator' if mono_glue else 'Godot'
def platform_args():
return f'platform={godot_platform} {godot_arch}'
def profile_args():
profile_py = export_profile if export else editor_profile
return f' profile={profile_py}' if os.path.exists(profile_py) else ''
def mono_args():
mono_glue_arg = 'mono_glue=no' if mono_glue else 'copy_mono_root=yes'
return f' module_mono_enabled=yes mono_static=yes {mono_glue_arg} mono_prefix={os.path.abspath(mono_prefix_path())}' if args.mono else ''
def export_args():
return ' tools=no target=release debug_symbols=no use_lto=yes' if export else ' target=release_debug'
scons_args = f'{platform_args()}{profile_args()}{mono_args()}{export_args()}'
@scoped_env('PATH', mxe_path if mxe_required else None)
@title(f'Building {build_type} ({godot_platform}.{mono_arch})')
def build():
utils.execute(f'scons -j{utils.cores} --directory {godot_local} {scons_args}')
@perf('(post-build)')
def post_build():
godot_exe = godot_build_target()
if mono_glue:
mono_glue_exe = godot_exe.replace('godot.', 'glue.', 1)
utils.execute(f'mv {godot_exe} {mono_glue_exe}')
elif export:
os.makedirs(export_bin, exist_ok=True)
utils.execute(f'mv {godot_exe} {export_bin}')
else:
target_dir = f'{editor_bin}/{godot_platform}.{mono_arch}'
rollback_dir = f'{target_dir}.rollback'
if os.path.exists(rollback_dir):
print('Removing rollback...')
utils.execute(f'rm -rf {rollback_dir}')
if os.path.exists(target_dir):
print('Creating rollback...')
utils.execute(f'mv {target_dir} {rollback_dir}')
print('Moving build to bin...')
os.makedirs(target_dir)
utils.execute(f'mv {godot_exe} {target_dir}')
if args.mono: utils.execute(f'mv {godot_local}/bin/GodotSharp {target_dir}')
open(f'{target_dir}/_sc_', 'a').close()
editor_data = f'{rollback_dir}/editor_data'
if os.path.exists(editor_data): shutil.copytree(editor_data, f'{target_dir}/editor_data')
build()
post_build()
if args.mono:
build_mono()
generate_mono_glue()
build_godot()
if args.editor:
build(get_current_target())
if args.export:
[build(export_target, export=True) for export_target in yield_export_targets()] | none | 1 | 2.065507 | 2 | |
src/allennlp_utils/data/dataset_reader.py | wj-Mcat/allennlp-utils | 0 | 6612769 | <filename>src/allennlp_utils/data/dataset_reader.py<gh_stars>0
from typing import Iterable
from allennlp.data import DatasetReader, Instance
@DatasetReader.register("utils-atis")
class ATISDatasetReader(DatasetReader):
"""
read the ATIS dataset for slot filling and intent detection task
"""
def __init__(self):
super(ATISDatasetReader, self).__init__()
pass
def text_to_instance(self, *inputs) -> Instance:
pass
def _read(self, file_path: str) -> Iterable[Instance]:
pass | <filename>src/allennlp_utils/data/dataset_reader.py<gh_stars>0
from typing import Iterable
from allennlp.data import DatasetReader, Instance
@DatasetReader.register("utils-atis")
class ATISDatasetReader(DatasetReader):
"""
read the ATIS dataset for slot filling and intent detection task
"""
def __init__(self):
super(ATISDatasetReader, self).__init__()
pass
def text_to_instance(self, *inputs) -> Instance:
pass
def _read(self, file_path: str) -> Iterable[Instance]:
pass | en | 0.726469 | read the ATIS dataset for slot filling and intent detection task | 2.517064 | 3 |
Arrays/bmodexecution.py | rando3/leetcode-python | 0 | 6612770 | <reponame>rando3/leetcode-python<filename>Arrays/bmodexecution.py
# Python program to print topological sorting of a DAG
from collections import defaultdict
class Graph:
def __init__(self, vertices):
self.graph = defaultdict(list) # dictionary containing adjacency List
self.V = vertices # No. of vertices
def addEdge(self, u, v):
''' Function to add an edge to graph '''
self.graph[u].append(v)
def topologicalSortUtil(self, v, visited, stack):
''' A recursive function used by topologicalSort '''
visited[v] = True # curr node visited
for i in self.graph[v]: # Recur for all the vertices adjacent to this vertex
if not visited[i]:
self.topologicalSortUtil(i, visited, stack)
stack.insert(0, v) # Push current vertex to stack which stores result
def topologicalSort(self):
'''
The function to do Topological Sort.
It uses recursive topologicalSortUtil()
'''
visited = [False] * self.V # base case not visited
stack = []
# Call the recursive helper function to store Topological
# Sort starting from all vertices one by one
for i in range(self.V):
if not visited[i]:
self.topologicalSortUtil(i, visited, stack)
print(stack) # Print contents of the stack
if __name__ == "__main__":
g = Graph(3)
g.addEdge(1, 3)
g.addEdge(1, 2)
print("Following is a Topological Sort of the given graph")
g.topologicalSort()
| # Python program to print topological sorting of a DAG
from collections import defaultdict
class Graph:
def __init__(self, vertices):
self.graph = defaultdict(list) # dictionary containing adjacency List
self.V = vertices # No. of vertices
def addEdge(self, u, v):
''' Function to add an edge to graph '''
self.graph[u].append(v)
def topologicalSortUtil(self, v, visited, stack):
''' A recursive function used by topologicalSort '''
visited[v] = True # curr node visited
for i in self.graph[v]: # Recur for all the vertices adjacent to this vertex
if not visited[i]:
self.topologicalSortUtil(i, visited, stack)
stack.insert(0, v) # Push current vertex to stack which stores result
def topologicalSort(self):
'''
The function to do Topological Sort.
It uses recursive topologicalSortUtil()
'''
visited = [False] * self.V # base case not visited
stack = []
# Call the recursive helper function to store Topological
# Sort starting from all vertices one by one
for i in range(self.V):
if not visited[i]:
self.topologicalSortUtil(i, visited, stack)
print(stack) # Print contents of the stack
if __name__ == "__main__":
g = Graph(3)
g.addEdge(1, 3)
g.addEdge(1, 2)
print("Following is a Topological Sort of the given graph")
g.topologicalSort() | en | 0.776061 | # Python program to print topological sorting of a DAG # dictionary containing adjacency List # No. of vertices Function to add an edge to graph A recursive function used by topologicalSort # curr node visited # Recur for all the vertices adjacent to this vertex # Push current vertex to stack which stores result The function to do Topological Sort. It uses recursive topologicalSortUtil() # base case not visited # Call the recursive helper function to store Topological # Sort starting from all vertices one by one # Print contents of the stack | 4.324502 | 4 |
ex019.py | Rhodytesla/PythonMundo01 | 0 | 6612771 | <reponame>Rhodytesla/PythonMundo01
import random
a = []
for y in range(0,4):
x=input('insira o nome do aluno {}\n'.format(y+1))
a.append(x)
y+1
print('o aluno sorteado é {}'.format(a[random.randint(0,3)]))
| import random
a = []
for y in range(0,4):
x=input('insira o nome do aluno {}\n'.format(y+1))
a.append(x)
y+1
print('o aluno sorteado é {}'.format(a[random.randint(0,3)])) | none | 1 | 3.62679 | 4 | |
main/reconstruct-itinerary/reconstruct-itinerary-fast.py | EliahKagan/old-practice-snapshot | 0 | 6612772 | <reponame>EliahKagan/old-practice-snapshot<gh_stars>0
class Solution:
def findItinerary(self, tickets):
"""
:type tickets: List[List[str]]
:rtype: List[str]
"""
# Create the adjacency list.
adj = collections.defaultdict(list)
for src, dest in tickets:
adj[src].append(dest)
# Sort each row so items can be popped in lexicographic order.
for row in adj.values():
row.sort(reverse=True)
# We will build the path backwards.
path = []
# Recursive Hierholzer's algorithm.
def dfs(src):
row = adj[src]
while row:
dfs(row.pop())
path.append(src)
# Build the path and return it forwards.
dfs('JFK')
path.reverse()
return path
| class Solution:
def findItinerary(self, tickets):
"""
:type tickets: List[List[str]]
:rtype: List[str]
"""
# Create the adjacency list.
adj = collections.defaultdict(list)
for src, dest in tickets:
adj[src].append(dest)
# Sort each row so items can be popped in lexicographic order.
for row in adj.values():
row.sort(reverse=True)
# We will build the path backwards.
path = []
# Recursive Hierholzer's algorithm.
def dfs(src):
row = adj[src]
while row:
dfs(row.pop())
path.append(src)
# Build the path and return it forwards.
dfs('JFK')
path.reverse()
return path | en | 0.749591 | :type tickets: List[List[str]] :rtype: List[str] # Create the adjacency list. # Sort each row so items can be popped in lexicographic order. # We will build the path backwards. # Recursive Hierholzer's algorithm. # Build the path and return it forwards. | 3.748258 | 4 |
src/dataload/datasets.py | ReinholdM/OpenASR | 0 | 6612773 | <reponame>ReinholdM/OpenASR
import json
import os
import torch.utils.data as data
class TextLineByLineDataset(data.Dataset):
def __init__(self, fn):
super(TextLineByLineDataset, self).__init__()
with open(fn, 'r') as f:
self.data = f.read().strip().split('\n')
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
class SpeechDataset(data.Dataset):
def __init__(self, data_json_path, reverse=False):
super().__init__()
with open(data_json_path, 'rb') as f:
data = json.load(f)
self.data = sorted(data, key=lambda x: float(x["duration"]))
if reverse:
self.data.reverse()
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
class ArkDataset(SpeechDataset):
def __init__(self, json_path, reverse=False,
feat_range=(1, 99999), label_range=(1, 100), rate_in_out=(4,999)):
try:
# json_path is a single file
with open(json_path) as f:
data = json.load(f)
except:
# json_path is a dir where *.json in
data = []
for dir, _, fs in os.walk(json_path): # os.walk获取所有的目录
for f in fs:
if f.endswith('.json'): # 判断是否是".json"结尾
filename = os.path.join(dir, f)
print('loading json file :', filename)
with open(filename) as f:
add = json.load(f)
data.extend(add)
print('loaded {} samples'.format(len(add)))
# filter
list_to_pop = []
for i, sample in enumerate(data):
len_x = sample['feat_length']
len_y = sample['token_length']
if not (feat_range[0] <= len_x <= feat_range[1]) or \
not (label_range[0] <= len_y <= label_range[1]) or \
not (rate_in_out[0] <= (len_x / len_y) <= rate_in_out[1]):
list_to_pop.append(i)
print('filtered {}/{} samples\n'.format(len(list_to_pop), len(data)))
list_to_pop.reverse()
[data.pop(i) for i in list_to_pop]
self.data = sorted(data, key=lambda x: float(x["feat_length"]))
if reverse:
self.data.reverse()
| import json
import os
import torch.utils.data as data
class TextLineByLineDataset(data.Dataset):
def __init__(self, fn):
super(TextLineByLineDataset, self).__init__()
with open(fn, 'r') as f:
self.data = f.read().strip().split('\n')
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
class SpeechDataset(data.Dataset):
def __init__(self, data_json_path, reverse=False):
super().__init__()
with open(data_json_path, 'rb') as f:
data = json.load(f)
self.data = sorted(data, key=lambda x: float(x["duration"]))
if reverse:
self.data.reverse()
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
class ArkDataset(SpeechDataset):
def __init__(self, json_path, reverse=False,
feat_range=(1, 99999), label_range=(1, 100), rate_in_out=(4,999)):
try:
# json_path is a single file
with open(json_path) as f:
data = json.load(f)
except:
# json_path is a dir where *.json in
data = []
for dir, _, fs in os.walk(json_path): # os.walk获取所有的目录
for f in fs:
if f.endswith('.json'): # 判断是否是".json"结尾
filename = os.path.join(dir, f)
print('loading json file :', filename)
with open(filename) as f:
add = json.load(f)
data.extend(add)
print('loaded {} samples'.format(len(add)))
# filter
list_to_pop = []
for i, sample in enumerate(data):
len_x = sample['feat_length']
len_y = sample['token_length']
if not (feat_range[0] <= len_x <= feat_range[1]) or \
not (label_range[0] <= len_y <= label_range[1]) or \
not (rate_in_out[0] <= (len_x / len_y) <= rate_in_out[1]):
list_to_pop.append(i)
print('filtered {}/{} samples\n'.format(len(list_to_pop), len(data)))
list_to_pop.reverse()
[data.pop(i) for i in list_to_pop]
self.data = sorted(data, key=lambda x: float(x["feat_length"]))
if reverse:
self.data.reverse() | en | 0.525281 | # json_path is a single file # json_path is a dir where *.json in # os.walk获取所有的目录 # 判断是否是".json"结尾 # filter | 2.68185 | 3 |
legacy/artie/apps/octopod/priv/test/pyctotest_pub_one_msg.py | MaxStrange/ArtieInfant | 1 | 6612774 | import pyctopod
def main():
pyctopod.publish("test_topic", "This is a Test".encode('utf8'), from_id="pyctotest_pub_one_msg")
def register_handler(pid):
pyctopod.register_main(main)
pyctopod.register_handler(pid)
| import pyctopod
def main():
pyctopod.publish("test_topic", "This is a Test".encode('utf8'), from_id="pyctotest_pub_one_msg")
def register_handler(pid):
pyctopod.register_main(main)
pyctopod.register_handler(pid)
| none | 1 | 1.496911 | 1 | |
tinycal/tcal.py | pi314/tiny-cal | 5 | 6612775 | """
Core function of `tcal` command
"""
from __future__ import print_function
import re
import sys
from calendar import Calendar, SUNDAY, MONDAY
from datetime import date
from os.path import expanduser
from sys import stdout, stderr
from . import CALRCS
from .cli import parser
from .render import TinyCalRenderer, Cell
from .config import TinyCalConfig, Color
weekday_codes = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']
LANG = {
'en': {
'weekday': ['Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa', 'Su', 'WK'],
'month': ['<Error>',
'January', 'February', 'March',
'April', 'May', 'June',
'July', 'August', 'September',
'October', 'November', 'December'],
'month_abbr': ['<Error>',
'Jan', 'Feb', 'Mar',
'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep',
'Oct', 'Nov', 'Dec'],
},
'zh': {
'weekday': ['一', '二', '三', '四', '五', '六', '日', '週'],
'month': ['<Error>',
'1月', '2月', '3月',
'4月', '5月', '6月',
'7月', '8月', '9月',
'10月', '11月', '12月'],
},
'jp': {
'weekday': ['月', '火', '水', '木', '金', '土', '日', '週'],
'month': ['<Error>',
'睦月 (1月)', '如月 (2月)', '彌生 (3月)',
'卯月 (4月)', '皐月 (5月)', '水無月 (6月)',
'文月 (7月)', '葉月 (8月)', '長月 (9月)',
'神無月 (10月)', '霜月 (11月)', '師走 (12月)'],
},
}
date_mark_regex = re.compile(r'^(\d\d\d\d/\d\d/\d\d) +([\w:]+) *')
def calculate_month_range(before, after, year, month):
r"""
>>> calculate_month_range(1, 1, 2018, 1)
[datetime.date(2017, 12, 1), datetime.date(2018, 1, 1), datetime.date(2018, 2, 1)]
"""
return [date(year - (month <= i), (month - 1 - i) % 12 + 1, 1) for i in range(before, 0, -1)] + \
[date(year, month, 1)] + \
[date(year + (month + i > 12), (month - 1 + i) % 12 + 1, 1) for i in range(1, after+1)]
def calculate_week_of_the_year(first_date_of_year, target_date):
return (target_date - first_date_of_year).days // 7 + 1
def main():
conf = TinyCalConfig.parse_conf(CALRCS)
args = parser.parse_args()
border_args = args.border
args.border = None
args.border_style = None
args.border_weld = None
for i in border_args:
if i in ('full', 'basic', 'off', 'false'):
args.border = i
elif i in ('ascii', 'single', 'bold', 'double'):
args.border_style = i
elif i in ('weld', 'noweld'):
args.border_weld = (i == 'weld')
# Merge args and conf in-place into conf
for k in vars(conf):
if k in vars(args) and getattr(args, k) is not None:
setattr(conf, k , getattr(args, k))
if conf.border == 'true':
conf.border = 'full'
elif conf.border == 'false':
conf.border = 'off'
if conf.color_today_wk == TinyCalConfig.color_today_wk.default:
# If today.wk.color is not configured, and wk.color.fg is configured
# Re-assign today.wk.color to a brighter version of wk.color
if conf.color_wk.fg != None and conf.color_wk.bg == None:
conf.color_today_wk = conf.color_wk.upper()
else:
conf.color_today_wk = conf.color_wk
date_marks = {}
if (args.color == 'never') or (args.color == 'auto' and not stdout.isatty()):
# Disable coloring
for k in vars(conf):
if k.startswith('color_'):
setattr(conf, k, Color(''))
elif conf.marks:
# Read date marking file
try:
with open(expanduser(conf.marks)) as marks_file:
for line in marks_file:
m = date_mark_regex.match(line.strip())
if not m:
# Silently ignore invalid lines
continue
mark_date, mark_color = date(*map(int, m.group(1).split('/'))), m.group(2)
try:
date_marks[mark_date] = Color(mark_color)
except ValueError:
pass
except FileNotFoundError:
print('Warning: Mark file "{}" does not exist'.format(conf.marks), file=stderr)
calendar = Calendar(MONDAY if conf.start_monday else SUNDAY)
monthdates = calendar.monthdatescalendar
today = args.today if args.today else date.today()
today_wk = calculate_week_of_the_year(monthdates(today.year, 1)[0][0], today)
# Calculate display range (from which month to which month)
if args.year is not None and args.month is None:
month_leading_dates = [date(args.year, month, 1) for month in range(1, 13)]
else:
year = args.year or today.year
month = args.month or today.month
before, after = (1, 1) if args.a1b1 else (conf.before, conf.after)
month_leading_dates = calculate_month_range(before, after, year, month)
# Create TinyCalRenderer object for rendering
renderer = TinyCalRenderer(conf)
# Colors are calculated *outside* the renderer
# It's for contiguous mode
def colorize_weekday(idx):
color_name = 'color_weekday_%s' % weekday_codes[idx]
color = getattr(conf, color_name)
string = LANG[conf.lang]['weekday'][idx]
return color(string) + conf.color_weekday.code if color else string
weekday_title = conf.color_weekday(' '.join(map(colorize_weekday, calendar.iterweekdays())))
def colorize_wk(wk, contain_today=False):
if isinstance(wk, int):
if contain_today:
c = conf.color_today_wk
else:
c = conf.color_wk
return c('{:>2}'.format(wk))
return conf.color_wk(wk)
wk_title = colorize_wk(LANG[conf.lang]['weekday'][-1])
month_range = [ld.month for ld in month_leading_dates]
month_abbr = {}
for m in range(1, 13):
month_abbr[m] = (LANG[conf.lang].get('month_abbr') or LANG[conf.lang]['month'])[m].split() + [''] * 5
def colorize_day(day):
if (not args.cont and day.month != ld.month) or (args.cont and day.month not in month_range):
c = (conf.color_fill) if (conf.fill) else (lambda s: ' ')
else:
if day == today:
c = conf.color_today
elif day in date_marks:
c = date_marks[day]
else:
c = getattr(conf, 'color_%s' % weekday_codes[day.weekday()])
return c('{:>2}'.format(day.day))
def get_month_abbr(month):
if month not in month_range:
return ''
else:
return month_abbr[month].pop(0)
if args.cont:
# For contiguous mode, only 1 Cell obj needed
cells = [Cell(conf)]
f = month_leading_dates[0]
t = month_leading_dates[-1]
if f == t:
cells[0].title = '{m} {y}'.format(m=LANG[conf.lang]['month'][f.month], y=f.year)
def get_month_abbr(month):
return ''
else:
cells[0].title = '{}/{:02} ~ {}/{:02}'.format(f.year, f.month, t.year, t.month)
cells[0].weekday_title = weekday_title
cells[0].wk_title = wk_title
else:
# For non-contiguous mode, every month has its own Cell obj
cells = []
for ld in month_leading_dates:
cell = Cell(conf)
cell.title = '{m} {y}'.format(m=LANG[conf.lang]['month'][ld.month], y=ld.year)
cell.weekday_title = weekday_title
cell.wk_title = wk_title
cells.append(cell)
def get_month_abbr(month):
return ''
# Put the days into cells, and cells into renderer
last_cell = None
last_week_leading_date = None
for ld in month_leading_dates:
for week in monthdates(ld.year, ld.month):
# calculate week number
if args.cont and ld.month != week[-1].month and ld.year != today.year:
# Edge case, sometimes wk53 needs to be changed to wk01
wk = calculate_week_of_the_year(monthdates(week[-1].year, 1)[0][-1], week[-1])
else:
# Normal case
wk = calculate_week_of_the_year(monthdates(ld.year, 1)[0][0], week[0])
# Highlight current week
if (not args.cont and today.month != ld.month) or (args.cont and today.month not in month_range):
wk_contain_today = False
else:
wk_contain_today = today in week
# Dont append days into the same cell twice (ok for different cell)
if (last_cell, last_week_leading_date) != (cells[0], week[0]):
cells[0].append(
wk=colorize_wk(wk, contain_today=wk_contain_today),
days=' '.join([colorize_day(day) for day in week]),
month=get_month_abbr(week[-1].month),
)
last_week_leading_date = week[0]
last_cell = cells[0]
if len(cells) > 1:
renderer.append(cells.pop(0))
assert len(cells) == 1
renderer.append(cells[0])
print(renderer.render())
| """
Core function of `tcal` command
"""
from __future__ import print_function
import re
import sys
from calendar import Calendar, SUNDAY, MONDAY
from datetime import date
from os.path import expanduser
from sys import stdout, stderr
from . import CALRCS
from .cli import parser
from .render import TinyCalRenderer, Cell
from .config import TinyCalConfig, Color
weekday_codes = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']
LANG = {
'en': {
'weekday': ['Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa', 'Su', 'WK'],
'month': ['<Error>',
'January', 'February', 'March',
'April', 'May', 'June',
'July', 'August', 'September',
'October', 'November', 'December'],
'month_abbr': ['<Error>',
'Jan', 'Feb', 'Mar',
'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep',
'Oct', 'Nov', 'Dec'],
},
'zh': {
'weekday': ['一', '二', '三', '四', '五', '六', '日', '週'],
'month': ['<Error>',
'1月', '2月', '3月',
'4月', '5月', '6月',
'7月', '8月', '9月',
'10月', '11月', '12月'],
},
'jp': {
'weekday': ['月', '火', '水', '木', '金', '土', '日', '週'],
'month': ['<Error>',
'睦月 (1月)', '如月 (2月)', '彌生 (3月)',
'卯月 (4月)', '皐月 (5月)', '水無月 (6月)',
'文月 (7月)', '葉月 (8月)', '長月 (9月)',
'神無月 (10月)', '霜月 (11月)', '師走 (12月)'],
},
}
date_mark_regex = re.compile(r'^(\d\d\d\d/\d\d/\d\d) +([\w:]+) *')
def calculate_month_range(before, after, year, month):
r"""
>>> calculate_month_range(1, 1, 2018, 1)
[datetime.date(2017, 12, 1), datetime.date(2018, 1, 1), datetime.date(2018, 2, 1)]
"""
return [date(year - (month <= i), (month - 1 - i) % 12 + 1, 1) for i in range(before, 0, -1)] + \
[date(year, month, 1)] + \
[date(year + (month + i > 12), (month - 1 + i) % 12 + 1, 1) for i in range(1, after+1)]
def calculate_week_of_the_year(first_date_of_year, target_date):
return (target_date - first_date_of_year).days // 7 + 1
def main():
conf = TinyCalConfig.parse_conf(CALRCS)
args = parser.parse_args()
border_args = args.border
args.border = None
args.border_style = None
args.border_weld = None
for i in border_args:
if i in ('full', 'basic', 'off', 'false'):
args.border = i
elif i in ('ascii', 'single', 'bold', 'double'):
args.border_style = i
elif i in ('weld', 'noweld'):
args.border_weld = (i == 'weld')
# Merge args and conf in-place into conf
for k in vars(conf):
if k in vars(args) and getattr(args, k) is not None:
setattr(conf, k , getattr(args, k))
if conf.border == 'true':
conf.border = 'full'
elif conf.border == 'false':
conf.border = 'off'
if conf.color_today_wk == TinyCalConfig.color_today_wk.default:
# If today.wk.color is not configured, and wk.color.fg is configured
# Re-assign today.wk.color to a brighter version of wk.color
if conf.color_wk.fg != None and conf.color_wk.bg == None:
conf.color_today_wk = conf.color_wk.upper()
else:
conf.color_today_wk = conf.color_wk
date_marks = {}
if (args.color == 'never') or (args.color == 'auto' and not stdout.isatty()):
# Disable coloring
for k in vars(conf):
if k.startswith('color_'):
setattr(conf, k, Color(''))
elif conf.marks:
# Read date marking file
try:
with open(expanduser(conf.marks)) as marks_file:
for line in marks_file:
m = date_mark_regex.match(line.strip())
if not m:
# Silently ignore invalid lines
continue
mark_date, mark_color = date(*map(int, m.group(1).split('/'))), m.group(2)
try:
date_marks[mark_date] = Color(mark_color)
except ValueError:
pass
except FileNotFoundError:
print('Warning: Mark file "{}" does not exist'.format(conf.marks), file=stderr)
calendar = Calendar(MONDAY if conf.start_monday else SUNDAY)
monthdates = calendar.monthdatescalendar
today = args.today if args.today else date.today()
today_wk = calculate_week_of_the_year(monthdates(today.year, 1)[0][0], today)
# Calculate display range (from which month to which month)
if args.year is not None and args.month is None:
month_leading_dates = [date(args.year, month, 1) for month in range(1, 13)]
else:
year = args.year or today.year
month = args.month or today.month
before, after = (1, 1) if args.a1b1 else (conf.before, conf.after)
month_leading_dates = calculate_month_range(before, after, year, month)
# Create TinyCalRenderer object for rendering
renderer = TinyCalRenderer(conf)
# Colors are calculated *outside* the renderer
# It's for contiguous mode
def colorize_weekday(idx):
color_name = 'color_weekday_%s' % weekday_codes[idx]
color = getattr(conf, color_name)
string = LANG[conf.lang]['weekday'][idx]
return color(string) + conf.color_weekday.code if color else string
weekday_title = conf.color_weekday(' '.join(map(colorize_weekday, calendar.iterweekdays())))
def colorize_wk(wk, contain_today=False):
if isinstance(wk, int):
if contain_today:
c = conf.color_today_wk
else:
c = conf.color_wk
return c('{:>2}'.format(wk))
return conf.color_wk(wk)
wk_title = colorize_wk(LANG[conf.lang]['weekday'][-1])
month_range = [ld.month for ld in month_leading_dates]
month_abbr = {}
for m in range(1, 13):
month_abbr[m] = (LANG[conf.lang].get('month_abbr') or LANG[conf.lang]['month'])[m].split() + [''] * 5
def colorize_day(day):
if (not args.cont and day.month != ld.month) or (args.cont and day.month not in month_range):
c = (conf.color_fill) if (conf.fill) else (lambda s: ' ')
else:
if day == today:
c = conf.color_today
elif day in date_marks:
c = date_marks[day]
else:
c = getattr(conf, 'color_%s' % weekday_codes[day.weekday()])
return c('{:>2}'.format(day.day))
def get_month_abbr(month):
if month not in month_range:
return ''
else:
return month_abbr[month].pop(0)
if args.cont:
# For contiguous mode, only 1 Cell obj needed
cells = [Cell(conf)]
f = month_leading_dates[0]
t = month_leading_dates[-1]
if f == t:
cells[0].title = '{m} {y}'.format(m=LANG[conf.lang]['month'][f.month], y=f.year)
def get_month_abbr(month):
return ''
else:
cells[0].title = '{}/{:02} ~ {}/{:02}'.format(f.year, f.month, t.year, t.month)
cells[0].weekday_title = weekday_title
cells[0].wk_title = wk_title
else:
# For non-contiguous mode, every month has its own Cell obj
cells = []
for ld in month_leading_dates:
cell = Cell(conf)
cell.title = '{m} {y}'.format(m=LANG[conf.lang]['month'][ld.month], y=ld.year)
cell.weekday_title = weekday_title
cell.wk_title = wk_title
cells.append(cell)
def get_month_abbr(month):
return ''
# Put the days into cells, and cells into renderer
last_cell = None
last_week_leading_date = None
for ld in month_leading_dates:
for week in monthdates(ld.year, ld.month):
# calculate week number
if args.cont and ld.month != week[-1].month and ld.year != today.year:
# Edge case, sometimes wk53 needs to be changed to wk01
wk = calculate_week_of_the_year(monthdates(week[-1].year, 1)[0][-1], week[-1])
else:
# Normal case
wk = calculate_week_of_the_year(monthdates(ld.year, 1)[0][0], week[0])
# Highlight current week
if (not args.cont and today.month != ld.month) or (args.cont and today.month not in month_range):
wk_contain_today = False
else:
wk_contain_today = today in week
# Dont append days into the same cell twice (ok for different cell)
if (last_cell, last_week_leading_date) != (cells[0], week[0]):
cells[0].append(
wk=colorize_wk(wk, contain_today=wk_contain_today),
days=' '.join([colorize_day(day) for day in week]),
month=get_month_abbr(week[-1].month),
)
last_week_leading_date = week[0]
last_cell = cells[0]
if len(cells) > 1:
renderer.append(cells.pop(0))
assert len(cells) == 1
renderer.append(cells[0])
print(renderer.render())
| en | 0.79819 | Core function of `tcal` command >>> calculate_month_range(1, 1, 2018, 1) [datetime.date(2017, 12, 1), datetime.date(2018, 1, 1), datetime.date(2018, 2, 1)] # Merge args and conf in-place into conf # If today.wk.color is not configured, and wk.color.fg is configured # Re-assign today.wk.color to a brighter version of wk.color # Disable coloring # Read date marking file # Silently ignore invalid lines # Calculate display range (from which month to which month) # Create TinyCalRenderer object for rendering # Colors are calculated *outside* the renderer # It's for contiguous mode # For contiguous mode, only 1 Cell obj needed # For non-contiguous mode, every month has its own Cell obj # Put the days into cells, and cells into renderer # calculate week number # Edge case, sometimes wk53 needs to be changed to wk01 # Normal case # Highlight current week # Dont append days into the same cell twice (ok for different cell) | 1.988184 | 2 |
farm-manager/status-report/chkrate.py | binsgit/avalon-extras | 0 | 6612776 | <reponame>binsgit/avalon-extras
#!/usr/bin/env python
from __future__ import print_function
import datetime
import sys
from poolrate import poolrate
def chkrate(data,data0,cfg,time,time0):
if data !=None:
print('Calculating hashrate ... ',end="")
sys.stdout.flush()
deltaT = datetime.timedelta(hours=26)
t = []
v1 = []
v2 = []
vp = []
vps = []
try:
logfile = open(cfg['General']['hashrate_log'], 'r')
time_flag = False
for line in logfile:
tmp = line.split(';')
if time_flag:
t.append((datetime.datetime.strptime(tmp[0],"%Y_%m_%d_%H_%M")-time).total_seconds())
v1.append(float(tmp[1]))
v2.append(float(tmp[2]))
vp.append(float(tmp[3]))
vps.append(float(tmp[4]))
continue
if datetime.datetime.strptime(tmp[0],"%Y_%m_%d_%H_%M") + deltaT > time:
time_flag = True
logfile.close()
except IOError:
logfile = open(cfg['General']['hashrate_log'], 'w')
logfile.close()
pass
if data0 != None:
t.append(0)
dt = (time - time0).total_seconds()
v1u = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
v2u = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
h = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
h0 = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
tt = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
tt0 = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
i = 0
v1n = 0
v2n = 0
for mminer in data:
j = 0
try:mminer0 = data0[i]
except:pass
for miner in mminer[1:]:
try:miner0 = mminer0[j+1]
except:pass
if miner[1] != "Dead":
h[i][j] = float(miner[3])
try:h0[i][j] = float(miner0[3])
except:pass
tt[i][j] = float(miner[2])
try:tt0[i][j] = float(miner0[2])
except:pass
if tt[i][j] - tt0[i][j] > dt - int(cfg['HSplot']['delay_time']):
v1u[i][j] = (h[i][j]-h0[i][j])/(tt[i][j]-tt0[i][j])
v2u[i][j] = (h[i][j]-h0[i][j])/(tt[i][j]-tt0[i][j])
elif miner[2] != '0':
v1u[i][j] = h[i][j]/tt[i][j]
v2u[i][j] = h[i][j]/dt
else:
pass
v1n += v1u[i][j]
v2n += v2u[i][j]
j += 1
i += 1
v1.append(v1n)
v2.append(v2n)
print('Done.')
print('Fetching pool hashrate data ... ',end="")
sys.stdout.flush()
sum_pool_rate, pool_rate = poolrate(cfg)
print('Done.')
vp.append(float(pool_rate))
vps.append(float(sum_pool_rate))
logfile = open(cfg['General']['hashrate_log'], 'a')
logfile.write(time.strftime('%Y_%m_%d_%H_%M')+';'+str(v1n)+';'+str(v2n)+';'+str(pool_rate)+';'+str(sum_pool_rate)+'\n')
logfile.close()
return [t,v1,v2,vp,vps]
| #!/usr/bin/env python
from __future__ import print_function
import datetime
import sys
from poolrate import poolrate
def chkrate(data,data0,cfg,time,time0):
if data !=None:
print('Calculating hashrate ... ',end="")
sys.stdout.flush()
deltaT = datetime.timedelta(hours=26)
t = []
v1 = []
v2 = []
vp = []
vps = []
try:
logfile = open(cfg['General']['hashrate_log'], 'r')
time_flag = False
for line in logfile:
tmp = line.split(';')
if time_flag:
t.append((datetime.datetime.strptime(tmp[0],"%Y_%m_%d_%H_%M")-time).total_seconds())
v1.append(float(tmp[1]))
v2.append(float(tmp[2]))
vp.append(float(tmp[3]))
vps.append(float(tmp[4]))
continue
if datetime.datetime.strptime(tmp[0],"%Y_%m_%d_%H_%M") + deltaT > time:
time_flag = True
logfile.close()
except IOError:
logfile = open(cfg['General']['hashrate_log'], 'w')
logfile.close()
pass
if data0 != None:
t.append(0)
dt = (time - time0).total_seconds()
v1u = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
v2u = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
h = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
h0 = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
tt = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
tt0 = [[0 for i2 in range(0,len(cfg['port_list'][i1]))]for i1 in range(0,len(cfg['miner_list']))]
i = 0
v1n = 0
v2n = 0
for mminer in data:
j = 0
try:mminer0 = data0[i]
except:pass
for miner in mminer[1:]:
try:miner0 = mminer0[j+1]
except:pass
if miner[1] != "Dead":
h[i][j] = float(miner[3])
try:h0[i][j] = float(miner0[3])
except:pass
tt[i][j] = float(miner[2])
try:tt0[i][j] = float(miner0[2])
except:pass
if tt[i][j] - tt0[i][j] > dt - int(cfg['HSplot']['delay_time']):
v1u[i][j] = (h[i][j]-h0[i][j])/(tt[i][j]-tt0[i][j])
v2u[i][j] = (h[i][j]-h0[i][j])/(tt[i][j]-tt0[i][j])
elif miner[2] != '0':
v1u[i][j] = h[i][j]/tt[i][j]
v2u[i][j] = h[i][j]/dt
else:
pass
v1n += v1u[i][j]
v2n += v2u[i][j]
j += 1
i += 1
v1.append(v1n)
v2.append(v2n)
print('Done.')
print('Fetching pool hashrate data ... ',end="")
sys.stdout.flush()
sum_pool_rate, pool_rate = poolrate(cfg)
print('Done.')
vp.append(float(pool_rate))
vps.append(float(sum_pool_rate))
logfile = open(cfg['General']['hashrate_log'], 'a')
logfile.write(time.strftime('%Y_%m_%d_%H_%M')+';'+str(v1n)+';'+str(v2n)+';'+str(pool_rate)+';'+str(sum_pool_rate)+'\n')
logfile.close()
return [t,v1,v2,vp,vps] | ru | 0.26433 | #!/usr/bin/env python | 2.417656 | 2 |
magnebot/wheel.py | neuroailab/magnebot | 3 | 6612777 | from enum import Enum
class Wheel(Enum):
"""
The name of each wheel on the Magnebot.
"""
wheel_left_front = 0
wheel_left_back = 1
wheel_right_front = 2
wheel_right_back = 3
| from enum import Enum
class Wheel(Enum):
"""
The name of each wheel on the Magnebot.
"""
wheel_left_front = 0
wheel_left_back = 1
wheel_right_front = 2
wheel_right_back = 3
| en | 0.743271 | The name of each wheel on the Magnebot. | 3.246895 | 3 |
src/widgets.py | ENDERZOMBI102/BEEPackageMaker | 0 | 6612778 | <reponame>ENDERZOMBI102/BEEPackageMaker
import wx
class TextButton(wx.Window):
txt: wx.TextCtrl
btn: wx.Button
def __init__( self, parent: wx.Window, label='', value='', name='' ):
super( TextButton, self ).__init__(
parent=parent,
name=name
)
sizer = wx.BoxSizer()
self.txt = wx.TextCtrl(
parent=self,
value=value
)
sizer.Add( self.txt )
self.btn = wx.Button(
parent=self,
label=label
)
sizer.Add( self.btn )
self.SetSizer( sizer )
self.Show()
| import wx
class TextButton(wx.Window):
txt: wx.TextCtrl
btn: wx.Button
def __init__( self, parent: wx.Window, label='', value='', name='' ):
super( TextButton, self ).__init__(
parent=parent,
name=name
)
sizer = wx.BoxSizer()
self.txt = wx.TextCtrl(
parent=self,
value=value
)
sizer.Add( self.txt )
self.btn = wx.Button(
parent=self,
label=label
)
sizer.Add( self.btn )
self.SetSizer( sizer )
self.Show() | none | 1 | 3.009019 | 3 | |
app.py | Lioncat2002/SubjectAPI.py | 0 | 6612779 | from typing import Optional
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
origins=["*"]
app=FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
science_subs=['physics','chemistry','maths','biology','computer']
arts_subs=['history','geography']
@app.get('/')
def read_root():
return {'Home':'World'}
@app.get('/subjects/{subjects}')
def read_subject(subjects:str):
subs=subjects.split(',')
science_nums=sum(i in subs for i in science_subs)
arts_nums=sum(i in subs for i in arts_subs)
if science_nums>=arts_nums:
return {'sub':'Science'}
else:
return {'sub':'Arts'}
| from typing import Optional
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
origins=["*"]
app=FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
science_subs=['physics','chemistry','maths','biology','computer']
arts_subs=['history','geography']
@app.get('/')
def read_root():
return {'Home':'World'}
@app.get('/subjects/{subjects}')
def read_subject(subjects:str):
subs=subjects.split(',')
science_nums=sum(i in subs for i in science_subs)
arts_nums=sum(i in subs for i in arts_subs)
if science_nums>=arts_nums:
return {'sub':'Science'}
else:
return {'sub':'Arts'}
| none | 1 | 3.001164 | 3 | |
SH-Plotting.py | aspilotros/YouTube_views_forecasting | 0 | 6612780 | # -*- coding: utf-8 -*-
"""
Created on Sat Jun 24 14:51:27 2017
@author: Alessandro
"""
import pandas as pd
import numpy as np
import plotly
import plotly.plotly as py
from plotly.graph_objs import *
#%% Importing the data
df = pd.read_csv("C:/Users/Alessandro/windows-share/Portfolio/DATA/df_views_30d.csv")
#%%
df_cum = df.cumsum(axis=1)
#%% Plotting views on logarithmic scale
patterns = df_cum
X=patterns.iloc[0:500000:10,7].values
Y=patterns.iloc[0:500000:10,29].values
line_x = np.linspace(0.5, 50000000, 10000)
line_y = line_x
#np.corrcoef()
trace=list()
trace.append(
Scatter(
x=X,
y=Y,
mode="markers"
)
)
trace.append(
Scatter(
x=line_x,
y=line_y,
# mode="lines",
line = dict(width = 2, dash= 'dash')
)
)
data = Data(trace)
layout = plotly.graph_objs.Layout(
title = '1st week vs. 1st month',
showlegend=False,
font=dict(family='Cambria', size=18, color='#7f7f7f'),
annotations=[
dict(
x=6,
y=7.5,
xref='x',
yref='y',
text='r = 0.96',
showarrow=True,
arrowhead=7,
ax=0,
ay=0
)
],
xaxis=dict(
type='log',
autorange=True,
title = "Popularity after 7 days",
),
yaxis=dict(
type='log',
autorange=True,
title = "Popularity after 30 days"
)
)
fig = plotly.graph_objs.Figure(data=data, layout=layout)
plotly.offline.plot(fig, filename = 'SH-line')
| # -*- coding: utf-8 -*-
"""
Created on Sat Jun 24 14:51:27 2017
@author: Alessandro
"""
import pandas as pd
import numpy as np
import plotly
import plotly.plotly as py
from plotly.graph_objs import *
#%% Importing the data
df = pd.read_csv("C:/Users/Alessandro/windows-share/Portfolio/DATA/df_views_30d.csv")
#%%
df_cum = df.cumsum(axis=1)
#%% Plotting views on logarithmic scale
patterns = df_cum
X=patterns.iloc[0:500000:10,7].values
Y=patterns.iloc[0:500000:10,29].values
line_x = np.linspace(0.5, 50000000, 10000)
line_y = line_x
#np.corrcoef()
trace=list()
trace.append(
Scatter(
x=X,
y=Y,
mode="markers"
)
)
trace.append(
Scatter(
x=line_x,
y=line_y,
# mode="lines",
line = dict(width = 2, dash= 'dash')
)
)
data = Data(trace)
layout = plotly.graph_objs.Layout(
title = '1st week vs. 1st month',
showlegend=False,
font=dict(family='Cambria', size=18, color='#7f7f7f'),
annotations=[
dict(
x=6,
y=7.5,
xref='x',
yref='y',
text='r = 0.96',
showarrow=True,
arrowhead=7,
ax=0,
ay=0
)
],
xaxis=dict(
type='log',
autorange=True,
title = "Popularity after 7 days",
),
yaxis=dict(
type='log',
autorange=True,
title = "Popularity after 30 days"
)
)
fig = plotly.graph_objs.Figure(data=data, layout=layout)
plotly.offline.plot(fig, filename = 'SH-line')
| en | 0.569092 | # -*- coding: utf-8 -*- Created on Sat Jun 24 14:51:27 2017
@author: Alessandro #%% Importing the data #%% #%% Plotting views on logarithmic scale #np.corrcoef() # mode="lines", | 2.680531 | 3 |
day1/data_type.py | Flevian/andelabootcamp16 | 0 | 6612781 | def data_types(variablee):
if isinstance(variable, str):
return len(variable)
elif isinstance(variable, bool):
return ("Boolean")
elif isinstance(variable, int):
if variable < 100:
return "Less than 100"
elif variable > 100:
return "Greater than 100"
else:
return "Equal to 100"
elif isinstance(variable, list):
return (variable[3])
elif isnotinstance(variable, str):
return "None"
else:
return "invalid input"
variable = 59
print(data_types(variable))
| def data_types(variablee):
if isinstance(variable, str):
return len(variable)
elif isinstance(variable, bool):
return ("Boolean")
elif isinstance(variable, int):
if variable < 100:
return "Less than 100"
elif variable > 100:
return "Greater than 100"
else:
return "Equal to 100"
elif isinstance(variable, list):
return (variable[3])
elif isnotinstance(variable, str):
return "None"
else:
return "invalid input"
variable = 59
print(data_types(variable))
| none | 1 | 3.740429 | 4 | |
led-ring/spin-spi.py | lyneca/microbit-demos | 0 | 6612782 | <reponame>lyneca/microbit-demos<filename>led-ring/spin-spi.py
from microbit import *
import time
import machine
b = bytearray(13*2)
def set_level(n, l):
l = ((l >> 1) & 1) | ((l >> 2) & 2) | ((l >> 3) & 4) | ((l >> 4) & 8)
if n >= 12:
n += 1
b[(n + 1)] = l
def update():
spi.init(baudrate=100000, bits=8, mode=0, sclk=pin0, mosi=pin13, miso=None)
spi.write(b)
# spi1->enable = 0
machine.mem8[0x40004500] = 0
# pin 13 = 0
machine.mem8[0x5000050e] |= 0b10000000
for i in range(4):
# pin13 = 1
machine.mem8[0x5000050a] |= 0b10000000
# pin13 = 0
machine.mem8[0x5000050e] |= 0b10000000
while True:
for i in range(24):
set_level((i + 21) % 24, 0)
set_level((i + 22) % 24, 2)
set_level((i + 23) % 24, 10)
set_level((i + 24) % 24, 255)
set_level((i + 25) % 24, 10)
set_level((i + 26) % 24, 2)
set_level((i + 27) % 24, 0)
update()
sleep(10)
| from microbit import *
import time
import machine
b = bytearray(13*2)
def set_level(n, l):
l = ((l >> 1) & 1) | ((l >> 2) & 2) | ((l >> 3) & 4) | ((l >> 4) & 8)
if n >= 12:
n += 1
b[(n + 1)] = l
def update():
spi.init(baudrate=100000, bits=8, mode=0, sclk=pin0, mosi=pin13, miso=None)
spi.write(b)
# spi1->enable = 0
machine.mem8[0x40004500] = 0
# pin 13 = 0
machine.mem8[0x5000050e] |= 0b10000000
for i in range(4):
# pin13 = 1
machine.mem8[0x5000050a] |= 0b10000000
# pin13 = 0
machine.mem8[0x5000050e] |= 0b10000000
while True:
for i in range(24):
set_level((i + 21) % 24, 0)
set_level((i + 22) % 24, 2)
set_level((i + 23) % 24, 10)
set_level((i + 24) % 24, 255)
set_level((i + 25) % 24, 10)
set_level((i + 26) % 24, 2)
set_level((i + 27) % 24, 0)
update()
sleep(10) | en | 0.542721 | # spi1->enable = 0 # pin 13 = 0 # pin13 = 1 # pin13 = 0 | 2.819776 | 3 |
jsonschema/tests/test_disallow.py | michilu/lightweight-schema | 3 | 6612783 | <reponame>michilu/lightweight-schema
#!/usr/bin/env python
#:coding=utf-8:
#:tabSize=2:indentSize=2:noTabs=true:
#:folding=explicit:collapseFolds=1:
from unittest import TestCase
import jsonschema
class TestDisallow(TestCase):
def test_integer(self):
for x in [1, 89, 48, 32, 49, 42]:
try:
jsonschema.validate(x, {"disallow":"integer"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, "bad", {"test":"blah"}, [32, 49], None, True]:
try:
jsonschema.validate(x, {"disallow":"integer"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_string(self):
for x in ["surrender?", "nuts!", "ok", "@hsuha", "\'ok?\'", "blah"]:
try:
jsonschema.validate(x, {"disallow":"string"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, 1, {"test":"blah"}, [32, 49], None, True]:
try:
jsonschema.validate(x, {"disallow":"string"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_number(self):
for x in [1.2, 89.42, 48.5224242, 32, 49, 42.24324]:
try:
jsonschema.validate(x, {"disallow":"number"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in ["bad", {"test":"blah"}, [32.42, 494242], None, True]:
try:
jsonschema.validate(x, {"disallow":"number"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_boolean(self):
for x in [True, False]:
try:
jsonschema.validate(x, {"disallow":"boolean"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, "False", {"test":"blah"}, [32, 49], None, 1, 0]:
try:
jsonschema.validate(x, {"disallow":"boolean"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_object(self):
for x in [{"blah": "test"}, {"this":{"blah":"test"}}, {1:2, 10:20}]:
try:
jsonschema.validate(x, {"disallow":"object"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, "bad", 123, [32, 49], None, True]:
try:
jsonschema.validate(x, {"disallow":"object"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_array(self):
for x in [[1, 89], [48, {"test":"blah"}, "49", 42]]:
try:
jsonschema.validate(x, {"disallow":"array"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, "bad", {"test":"blah"}, 1234, None, True]:
try:
jsonschema.validate(x, {"disallow":"array"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_null(self):
try:
jsonschema.validate(None, {"disallow":"null"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, "bad", {"test":"blah"}, [32, 49], 1284, True]:
try:
jsonschema.validate(x, {"disallow":"null"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_any(self):
#test "any" and default value
for x in [1.2, "bad", {"test":"blah"}, [32, 49], None, 1284, True]:
try:
jsonschema.validate(x, {"disallow":"any"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
def test_multi(self):
schema = {"disallow":["null", "integer", "string"]}
try:
jsonschema.validate(None, schema)
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
try:
jsonschema.validate(183, schema)
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
try:
jsonschema.validate("mystring", schema)
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, {"test":"blah"}, [32, 49], True]:
try:
jsonschema.validate(x, schema)
except ValueError:
self.fail("Unexpected failure: %s" % e) | #!/usr/bin/env python
#:coding=utf-8:
#:tabSize=2:indentSize=2:noTabs=true:
#:folding=explicit:collapseFolds=1:
from unittest import TestCase
import jsonschema
class TestDisallow(TestCase):
def test_integer(self):
for x in [1, 89, 48, 32, 49, 42]:
try:
jsonschema.validate(x, {"disallow":"integer"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, "bad", {"test":"blah"}, [32, 49], None, True]:
try:
jsonschema.validate(x, {"disallow":"integer"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_string(self):
for x in ["surrender?", "nuts!", "ok", "@hsuha", "\'ok?\'", "blah"]:
try:
jsonschema.validate(x, {"disallow":"string"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, 1, {"test":"blah"}, [32, 49], None, True]:
try:
jsonschema.validate(x, {"disallow":"string"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_number(self):
for x in [1.2, 89.42, 48.5224242, 32, 49, 42.24324]:
try:
jsonschema.validate(x, {"disallow":"number"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in ["bad", {"test":"blah"}, [32.42, 494242], None, True]:
try:
jsonschema.validate(x, {"disallow":"number"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_boolean(self):
for x in [True, False]:
try:
jsonschema.validate(x, {"disallow":"boolean"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, "False", {"test":"blah"}, [32, 49], None, 1, 0]:
try:
jsonschema.validate(x, {"disallow":"boolean"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_object(self):
for x in [{"blah": "test"}, {"this":{"blah":"test"}}, {1:2, 10:20}]:
try:
jsonschema.validate(x, {"disallow":"object"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, "bad", 123, [32, 49], None, True]:
try:
jsonschema.validate(x, {"disallow":"object"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_array(self):
for x in [[1, 89], [48, {"test":"blah"}, "49", 42]]:
try:
jsonschema.validate(x, {"disallow":"array"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, "bad", {"test":"blah"}, 1234, None, True]:
try:
jsonschema.validate(x, {"disallow":"array"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_null(self):
try:
jsonschema.validate(None, {"disallow":"null"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, "bad", {"test":"blah"}, [32, 49], 1284, True]:
try:
jsonschema.validate(x, {"disallow":"null"})
except ValueError:
self.fail("Unexpected failure: %s" % e)
def test_any(self):
#test "any" and default value
for x in [1.2, "bad", {"test":"blah"}, [32, 49], None, 1284, True]:
try:
jsonschema.validate(x, {"disallow":"any"})
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
def test_multi(self):
schema = {"disallow":["null", "integer", "string"]}
try:
jsonschema.validate(None, schema)
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
try:
jsonschema.validate(183, schema)
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
try:
jsonschema.validate("mystring", schema)
except ValueError:
pass
else:
self.fail("Expected failure for %s" % repr(None))
#failures
for x in [1.2, {"test":"blah"}, [32, 49], True]:
try:
jsonschema.validate(x, schema)
except ValueError:
self.fail("Unexpected failure: %s" % e) | en | 0.266566 | #!/usr/bin/env python #:coding=utf-8: #:tabSize=2:indentSize=2:noTabs=true: #:folding=explicit:collapseFolds=1: #failures #failures #failures #failures #failures #failures #failures #test "any" and default value #failures | 2.966601 | 3 |
simtrans/vrml.py | RyodoTanaka/simtrans | 11 | 6612784 | # -*- coding:utf-8 -*-
"""Reader and writer for VRML format
:Organization:
AIST
Requirements
------------
* numpy
* omniorb-python
* jinja2 template engine
Examples
--------
Read vrml model data given the file path
>>> r = VRMLReader()
>>> m = r.read(os.path.expandvars('$OPENHRP_MODEL_PATH/closed-link-sample.wrl'))
Write simulation model in VRML format
>>> import subprocess
>>> subprocess.call('rosrun xacro xacro.py `rospack find atlas_description`/robots/atlas_v3.urdf.xacro > /tmp/atlas.urdf', shell=True)
0
>>> from . import urdf
>>> r = urdf.URDFReader()
>>> m = r.read('/tmp/atlas.urdf')
>>> w = VRMLWriter()
>>> w.write(m, '/tmp/atlas.wrl')
>>> from . import sdf
>>> r = sdf.SDFReader()
>>> m = r.read('model://pr2/model.sdf')
>>> w = VRMLWriter()
>>> w.write(m, '/tmp/pr2.wrl')
"""
from . import model
from . import utils
import os
import sys
import time
import subprocess
import atexit
import logging
import warnings
with warnings.catch_warnings():
warnings.simplefilter('ignore')
from .thirdparty import transformations as tf
import math
import numpy
import copy
import jinja2
import uuid
try:
import CORBA
import CosNaming
import OpenHRP
except ImportError:
print "Unable to find CORBA and OpenHRP library."
print "You can install the library by:"
print "$ sudo add-apt-repository ppa:hrg/daily"
print "$ sudo apt-get update"
print "$ sudo apt-get install openhrp openrtm-aist-python"
pass
plist = []
def terminator():
global plist
for p in plist:
p.terminate()
atexit.register(terminator)
class VRMLReader(object):
'''
VRML reader class
'''
def __init__(self):
self._orb = CORBA.ORB_init([sys.argv[0],
"-ORBInitRef",
"NameService=corbaloc::localhost:2809/NameService"],
CORBA.ORB_ID)
self._loader = None
self._ns = None
self._model = None
self._joints = []
self._links = []
self._linknamemap = {}
self._linknamemap['world'] = 'world'
self._materials = []
self._sensors = []
self._assethandler = None
def read(self, f, assethandler=None, options=None):
'''
Read vrml model data given the file path
'''
self._assethandler = assethandler
try:
self.resolveModelLoader()
self._loader.clearData()
except (CosNaming.NamingContext.NotFound, CORBA.TRANSIENT):
logging.info("try running openhrp-model-loader")
plist.append(subprocess.Popen(["openhrp-model-loader"]))
for t in range(0, 6):
if t == 5:
logging.error("unable to find openhrp-model-loader")
raise CosNaming.NamingContext.NotFound
try:
self.resolveModelLoader()
self._loader.clearData()
except (CosNaming.NamingContext.NotFound, CORBA.TRANSIENT):
time.sleep(1)
else:
logging.info("resolved openhrp-model-loader")
break
try:
self._model = self._loader.loadBodyInfo(f)
except CORBA.TRANSIENT:
logging.error('unable to connect to model loader corba service (is "openhrp-model-loader" running?)')
raise
bm = model.BodyModel()
bm.name = self._model._get_name()
self._joints = []
self._links = []
self._materials = []
self._sensors = []
self._hrplinks = self._model._get_links()
self._hrpshapes = self._model._get_shapes()
self._hrpapperances = self._model._get_appearances()
self._hrpmaterials = self._model._get_materials()
self._hrptextures = self._model._get_textures()
self._hrpextrajoints = self._model._get_extraJoints()
mid = 0
for a in self._hrpmaterials:
m = model.MaterialModel()
m.name = "material-%i" % mid
mid = mid + 1
m.ambient = a.ambientIntensity
m.diffuse = a.diffuseColor + [1.0]
m.specular = a.specularColor + [1.0]
m.emission = a.emissiveColor + [1.0]
m.shininess = a.shininess
m.transparency = a.transparency
self._materials.append(m)
root = self._hrplinks[0]
bm.trans = numpy.array(root.translation)
if root.jointType == 'fixed':
world = model.JointModel()
world.name = 'world'
self.readChild(world, root)
else:
lm = self.readLink(root)
self._links.append(lm)
jm = model.JointModel()
jm.name = root.name
for c in root.childIndices:
self.readChild(jm, self._hrplinks[c])
for j in self._hrpextrajoints:
# extra joint for closed link models
m = model.JointModel()
m.jointType = model.JointModel.J_REVOLUTE
m.parent = j.link[0]
m.child = j.link[1]
m.name = j.name
m.axis = model.AxisData()
m.axis.axis = numpy.array(j.axis)
m.trans = numpy.array(j.point[1])
m.offsetPosition = True
self._joints.append(m)
bm.links = self._links
bm.joints = self._joints
for j in bm.joints:
j.parent = self._linknamemap[j.parent]
j.child = self._linknamemap[j.child]
bm.sensors = self._sensors
return bm
def readLink(self, m):
lm = model.LinkModel()
if len(m.segments) > 0:
lm.name = m.segments[0].name
else:
lm.name = m.name
self._linknamemap[m.name] = lm.name
lm.mass = m.mass
lm.centerofmass = numpy.array(m.centerOfMass)
lm.inertia = numpy.array(m.inertia).reshape(3, 3)
lm.visuals = []
for s in m.sensors:
sm = model.SensorModel()
sm.name = s.name
sm.parent = lm.name
sm.trans = numpy.array(s.translation)
# sensors in OpenHRP is defined based on Z-axis up. so we
# will rotate them to X-axis up here.
# see http://www.openrtp.jp/openhrp3/jp/create_model.html
sm.rot = tf.quaternion_about_axis(s.rotation[3], s.rotation[0:3])
if s.type == 'Vision':
sm.rot = tf.quaternion_multiply(sm.rot, tf.quaternion_about_axis(math.pi, [1, 0, 0]))
sm.sensorType = model.SensorModel.SS_CAMERA
sm.data = model.CameraData()
sm.data.near = s.specValues[0]
sm.data.far = s.specValues[1]
sm.data.fov = s.specValues[2]
if s.specValues[3] == 1:
sm.data.cameraType = model.CameraData.CS_COLOR
elif s.specValues[3] == 2:
sm.data.cameraType = model.CameraData.CS_MONO
elif s.specValues[3] == 3:
sm.data.cameraType = model.CameraData.CS_DEPTH
elif s.specValues[3] == 4:
sm.data.cameraType = model.CameraData.CS_RGBD
else:
raise Exception('unsupported camera type: %i' % s.specValues[3])
sm.data.width = s.specValues[4]
sm.data.height = s.specValues[5]
sm.rate = s.specValues[6]
elif s.type == 'Range':
rot = tf.quaternion_multiply(sm.rot, tf.quaternion_about_axis(-math.pi/2, [0, 0, 1]))
rot = tf.quaternion_multiply(rot, tf.quaternion_about_axis(math.pi/2, [0, 1, 0]))
sm.rot = tf.quaternion_multiply(rot, tf.quaternion_about_axis(math.pi, [1, 0, 0]))
sm.sensorType = model.SensorModel.SS_RAY
sm.data = model.RayData()
(scanangle, scanstep, scanrate, maxdistance) = s.specValues
sm.data.min_angle = - scanangle / 2
sm.data.max_angle = scanangle / 2
sm.data.min_range = 0.08
sm.data.max_range = maxdistance
sm.rate = scanrate
self._sensors.append(sm)
for s in m.shapeIndices:
sm = model.ShapeModel()
sm.name = lm.name + "-shape-%i" % s.shapeIndex
sm.matrix = numpy.matrix(s.transformMatrix+[0, 0, 0, 1]).reshape(4, 4)
sdata = self._hrpshapes[s.shapeIndex]
if sdata.primitiveType == OpenHRP.SP_MESH:
sm.shapeType = model.ShapeModel.SP_MESH
sm.data = self.readMesh(sdata)
elif sdata.primitiveType == OpenHRP.SP_SPHERE and numpy.allclose(sm.matrix, numpy.identity(4)):
sm.shapeType = model.ShapeModel.SP_SPHERE
sm.data = model.SphereData()
sm.data.radius = sdata.primitiveParameters[0]
sm.data.material = self._materials[sdata.appearanceIndex]
elif sdata.primitiveType == OpenHRP.SP_CYLINDER and numpy.allclose(sm.matrix, numpy.identity(4)):
sm.shapeType = model.ShapeModel.SP_CYLINDER
sm.data = model.CylinderData()
sm.data.radius = sdata.primitiveParameters[0]
sm.data.height = sdata.primitiveParameters[1]
sm.data.material = self._materials[sdata.appearanceIndex]
elif sdata.primitiveType == OpenHRP.SP_BOX and numpy.allclose(sm.matrix, numpy.identity(4)):
sm.shapeType = model.ShapeModel.SP_BOX
sm.data = model.BoxData()
sm.data.x = sdata.primitiveParameters[0]
sm.data.y = sdata.primitiveParameters[1]
sm.data.z = sdata.primitiveParameters[2]
sm.data.material = self._materials[sdata.appearanceIndex]
else:
# raise Exception('unsupported shape primitive: %s' % sdata.primitiveType)
sm.shapeType = model.ShapeModel.SP_MESH
sm.data = self.readMesh(sdata)
lm.visuals.append(sm)
lm.collisions.append(sm)
return lm
def readMesh(self, sdata):
data = model.MeshData()
data.vertex = numpy.array(sdata.vertices).reshape(len(sdata.vertices)/3, 3)
data.vertex_index = numpy.array(sdata.triangles).reshape(len(sdata.triangles)/3, 3)
adata = self._hrpapperances[sdata.appearanceIndex]
if adata.normalPerVertex is True:
data.normal = numpy.array(adata.normals).reshape(len(adata.normals)/3, 3)
if len(adata.normalIndices) > 0:
data.normal_index = numpy.array(adata.normalIndices).reshape(len(adata.normalIndices)/3, 3)
else:
data.normal_index = data.vertex_index
else:
data.normal = numpy.array(adata.normals).reshape(len(adata.normals)/3, 3)
if len(adata.normalIndices) > 0:
idx = []
for i in adata.normalIndices:
idx.append(i)
idx.append(i)
idx.append(i)
data.normal_index = numpy.array(idx).reshape(len(idx)/3, 3)
else:
idx = []
for i in range(0, len(adata.normals)/3):
idx.append(i)
idx.append(i)
idx.append(i)
data.normal_index = numpy.array(idx).reshape(len(idx)/3, 3)
# if len(data.vertex_index) != len(data.normal_index):
# raise Exception('vertex length and normal length not match')
if adata.materialIndex >= 0:
data.material = self._materials[adata.materialIndex]
if data.material is not None and adata.textureIndex >= 0:
fname = self._hrptextures[adata.textureIndex].url
if self._assethandler:
data.material.texture = self._assethandler(fname)
else:
data.material.texture = fname
data.uvmap = numpy.array(adata.textureCoordinate).reshape(len(adata.textureCoordinate)/2, 2)
data.uvmap_index = numpy.array(adata.textureCoordIndices).reshape(len(adata.textureCoordIndices)/3, 3)
return data
def readChild(self, parent, child):
# first, create joint pairs
jm = model.JointModel()
jm.parent = parent.name
jm.child = child.name
jm.name = child.name
jm.jointId = child.jointId
jm.axis = model.AxisData()
try:
jm.axis.limit = [child.ulimit[0], child.llimit[0]]
except IndexError:
pass
try:
jm.axis.velocitylimit = [child.uvlimit[0], child.lvlimit[0]]
except IndexError:
pass
try:
jm.axis.effortlimit = [child.climit[0]*child.gearRatio*child.torqueConst]
except IndexError:
pass
jm.axis.axis = child.jointAxis
if child.jointType == 'fixed':
jm.jointType = model.JointModel.J_FIXED
elif child.jointType == 'rotate':
if jm.axis.limit is None or (jm.axis.limit[0] is None and jm.axis.limit[1] is None):
jm.jointType = model.JointModel.J_CONTINUOUS
else:
jm.jointType = model.JointModel.J_REVOLUTE
elif child.jointType == 'slide':
jm.jointType = model.JointModel.J_PRISMATIC
elif child.jointType == 'crawler':
jm.jointType = model.JointModel.J_CRAWLER
elif child.jointType == 'pseudoContinuousTrack':
jm.jointType = model.JointModel.J_CRAWLER
else:
raise Exception('unsupported joint type: %s' % child.jointType)
jm.trans = numpy.array(child.translation)
jm.rot = tf.quaternion_about_axis(child.rotation[3], child.rotation[0:3])
# convert to absolute position
jm.matrix = numpy.dot(parent.getmatrix(), jm.getmatrix())
jm.trans = None
jm.rot = None
self._joints.append(jm)
# then, convert link shape information
lm = self.readLink(child)
lm.matrix = jm.getmatrix()
lm.trans = None
lm.rot = None
self._links.append(lm)
for c in child.childIndices:
self.readChild(jm, self._hrplinks[c])
def resolveModelLoader(self):
nsobj = self._orb.resolve_initial_references("NameService")
self._ns = nsobj._narrow(CosNaming.NamingContext)
try:
obj = self._ns.resolve([CosNaming.NameComponent("ModelLoader", "")])
self._loader = obj._narrow(OpenHRP.ModelLoader)
except CosNaming.NamingContext.NotFound:
logging.error("unable to resolve OpenHRP model loader on CORBA name service")
raise
class VRMLWriter(object):
'''
VRML writer class
'''
def __init__(self):
self._linkmap = {}
self._roots = []
self._ignore = []
self._options = None
def write(self, mdata, fname, options=None):
'''
Write simulation model in VRML format
'''
self._options = options
fpath, fext = os.path.splitext(fname)
basename = os.path.basename(fpath)
dirname = os.path.dirname(fname)
if mdata.name is None or mdata.name == '':
mdata.name = basename
# convert revolute2 joint to two revolute joints (with a link
# in between)
for j in mdata.joints:
if j.jointType == model.JointModel.J_REVOLUTE2:
logging.info("converting revolute2 joint to two revolute joints")
nl = model.LinkModel()
nl.name = j.name + "_REVOLUTE2_LINK"
nl.matrix = j.getmatrix()
nl.trans = None
nl.rot = None
nl.mass = 0.001 # assign very small mass
mdata.links.append(nl)
nj = copy.deepcopy(j)
nj.name = j.name + "_SECOND"
nj.jointType = model.JointModel.J_REVOLUTE
nj.parent = nl.name
nj.child = j.child
nj.axis = j.axis2
mdata.joints.append(nj)
j.jointType = model.JointModel.J_REVOLUTE
j.child = nl.name
# check for same names in visuals or collisions
usednames = {}
for l in mdata.links:
for v in l.visuals:
if v.name in usednames:
v.name = l.name + "-visual"
if v.name in usednames:
v.name = l.name + "-visual-" + str(uuid.uuid1()).replace('-', '')
usednames[v.name] = True
for c in l.collisions:
if c.name in usednames:
c.name = l.name + "-collision"
if c.name in usednames:
c.name = l.name + "-collision-" + str(uuid.uuid1()).replace('-', '')
usednames[c.name] = True
# find root joint (including local peaks)
self._roots = utils.findroot(mdata)
# render the data structure using template
loader = jinja2.PackageLoader(self.__module__, 'template')
env = jinja2.Environment(loader=loader, extensions=['jinja2.ext.do'])
self._linkmap['world'] = model.LinkModel()
for m in mdata.links:
self._linkmap[m.name] = m
# render shape vrml file for each links
shapefilemap = {}
for l in mdata.links:
shapes = copy.copy(l.visuals)
if options is not None and options.usecollision:
shapes = copy.copy(l.collisions)
if options is not None and options.useboth:
shapes.extend(copy.copy(l.collisions))
for v in shapes:
logging.info('writing shape of link: %s, type: %s' % (l.name, v.shapeType))
if v.shapeType == model.ShapeModel.SP_MESH:
template = env.get_template('vrml-mesh.wrl')
if isinstance(v.data, model.MeshTransformData):
v.data.pretranslate()
m = {}
m['children'] = [v.data]
shapefname = (mdata.name + "-" + l.name + "-" + v.name + ".wrl").replace('::', '_')
with open(os.path.join(dirname, shapefname), 'w') as ofile:
ofile.write(template.render({
'name': v.name,
'ShapeModel': model.ShapeModel,
'mesh': m
}))
shapefilemap[v.name] = shapefname
# render main vrml file for each bodies
template = env.get_template('vrml.wrl')
roots = []
modelfiles = {}
for root in self._roots:
if root == 'world':
for r in utils.findchildren(mdata, root):
roots.append((r.child, "fixed"))
else:
roots.append((root, "free"))
for r in roots:
logging.info('writing model for %s' % r[0])
if len(roots) == 1:
mfname = fname
else:
mfname = (mdata.name + "-" + r[0] + ".wrl").replace('::', '_')
self.renderchildren(mdata, r[0], r[1], os.path.join(dirname, mfname), shapefilemap, template)
modelfiles[mfname] = self._linkmap[r[0]]
# render openhrp project
template = env.get_template('openhrp-project.xml')
with open(fname.replace('.wrl', '-project.xml'), 'w') as ofile:
ofile.write(template.render({
'models': modelfiles,
}))
# render choreonoid project
template = env.get_template('choreonoid-project.yaml')
with open(fname.replace('.wrl', '-project.cnoid'), 'w') as ofile:
ofile.write(template.render({
'models': modelfiles,
}))
def convertchildren(self, mdata, pjoint, joints, links):
children = []
plink = self._linkmap[pjoint.child]
for cjoint in utils.findchildren(mdata, pjoint.child):
nmodel = {}
try:
clink = self._linkmap[cjoint.child]
except KeyError:
logging.warning("unable to find child link %s" % cjoint.child)
(cchildren, joints, links) = self.convertchildren(mdata, cjoint, joints, links)
pjointinv = numpy.linalg.pinv(pjoint.getmatrix())
cjointinv = numpy.linalg.pinv(cjoint.getmatrix())
cjoint2 = copy.deepcopy(cjoint)
cjoint2.matrix = numpy.dot(pjointinv, cjoint.getmatrix())
cjoint2.trans = None
cjoint2.rot = None
clink2 = copy.deepcopy(clink)
clink2.matrix = numpy.dot(cjointinv, clink.getmatrix())
clink2.trans = None
clink2.rot = None
if clink2.mass == 0:
logging.warning("detect link with mass zero, assigning small (0.001) mass.")
clink2.mass = 0.001
if not numpy.allclose(clink2.getmatrix(), numpy.identity(4)):
clink2.translate(clink2.getmatrix())
nmodel['joint'] = cjoint2
nmodel['jointtype'] = self.convertjointtype(cjoint.jointType)
nmodel['link'] = clink2
nmodel['children'] = cchildren
children.append(nmodel)
joints.append(cjoint.name)
links.append(cjoint.child)
return (children, joints, links)
def renderchildren(self, mdata, root, jointtype, fname, shapefilemap, template):
nmodel = {}
rootlink = self._linkmap[root]
rootjoint = model.JointModel()
rootjoint.name = root
rootjoint.jointType = jointtype
rootjoint.matrix = rootlink.getmatrix()
rootjoint.trans = None
rootjoint.rot = None
rootjoint.child = root
(children, joints, links) = self.convertchildren(mdata, rootjoint, [], [])
nmodel['link'] = rootlink
nmodel['joint'] = rootjoint
nmodel['jointtype'] = rootjoint.jointType
nmodel['children'] = children
# assign jointId
if jointtype in ['free', 'fixed']:
jointmap = {}
jointcount = 0
else:
jointmap = {root: 0}
jointcount = 1
for j in joints:
jointmap[j] = 0
for j in joints:
jointmap[j] = jointcount
jointcount = jointcount + 1
with open(fname, 'w') as ofile:
ofile.write(template.render({
'model': {'name':rootlink.name, 'children':[nmodel]},
'body': mdata,
'links': links,
'joints': joints,
'jointmap': jointmap,
'ShapeModel': model.ShapeModel,
'shapefilemap': shapefilemap,
'options': self._options
}))
def convertjointtype(self, t):
if t == model.JointModel.J_FIXED:
return "fixed"
elif t == model.JointModel.J_REVOLUTE:
return "rotate"
elif t == model.JointModel.J_PRISMATIC:
return "slide"
elif t == model.JointModel.J_SCREW:
return "rotate"
elif t == model.JointModel.J_CONTINUOUS:
return "rotate"
else:
raise Exception('unsupported joint type: %s' % t)
class VRMLMeshWriter(object):
'''
VRML mesh writer class
'''
def __init__(self):
self._linkmap = {}
self._roots = []
self._ignore = []
def write(self, m, fname, options=None):
'''
Write mesh in VRML format
'''
fpath, fext = os.path.splitext(fname)
basename = os.path.basename(fpath)
dirname = os.path.dirname(fname)
# render the data structure using template
loader = jinja2.PackageLoader(self.__module__, 'template')
env = jinja2.Environment(loader=loader, extensions=['jinja2.ext.do'])
template = env.get_template('vrml-mesh.wrl')
if m.shapeType == model.ShapeModel.SP_MESH:
if isinstance(m.data, model.MeshTransformData):
m.data.pretranslate()
nm = {}
nm['children'] = [m.data]
with open(fname, 'w') as ofile:
ofile.write(template.render({
'name': basename,
'ShapeModel': model.ShapeModel,
'mesh': nm
}))
| # -*- coding:utf-8 -*-
"""Reader and writer for VRML format
:Organization:
AIST
Requirements
------------
* numpy
* omniorb-python
* jinja2 template engine
Examples
--------
Read vrml model data given the file path
>>> r = VRMLReader()
>>> m = r.read(os.path.expandvars('$OPENHRP_MODEL_PATH/closed-link-sample.wrl'))
Write simulation model in VRML format
>>> import subprocess
>>> subprocess.call('rosrun xacro xacro.py `rospack find atlas_description`/robots/atlas_v3.urdf.xacro > /tmp/atlas.urdf', shell=True)
0
>>> from . import urdf
>>> r = urdf.URDFReader()
>>> m = r.read('/tmp/atlas.urdf')
>>> w = VRMLWriter()
>>> w.write(m, '/tmp/atlas.wrl')
>>> from . import sdf
>>> r = sdf.SDFReader()
>>> m = r.read('model://pr2/model.sdf')
>>> w = VRMLWriter()
>>> w.write(m, '/tmp/pr2.wrl')
"""
from . import model
from . import utils
import os
import sys
import time
import subprocess
import atexit
import logging
import warnings
with warnings.catch_warnings():
warnings.simplefilter('ignore')
from .thirdparty import transformations as tf
import math
import numpy
import copy
import jinja2
import uuid
try:
import CORBA
import CosNaming
import OpenHRP
except ImportError:
print "Unable to find CORBA and OpenHRP library."
print "You can install the library by:"
print "$ sudo add-apt-repository ppa:hrg/daily"
print "$ sudo apt-get update"
print "$ sudo apt-get install openhrp openrtm-aist-python"
pass
plist = []
def terminator():
global plist
for p in plist:
p.terminate()
atexit.register(terminator)
class VRMLReader(object):
'''
VRML reader class
'''
def __init__(self):
self._orb = CORBA.ORB_init([sys.argv[0],
"-ORBInitRef",
"NameService=corbaloc::localhost:2809/NameService"],
CORBA.ORB_ID)
self._loader = None
self._ns = None
self._model = None
self._joints = []
self._links = []
self._linknamemap = {}
self._linknamemap['world'] = 'world'
self._materials = []
self._sensors = []
self._assethandler = None
def read(self, f, assethandler=None, options=None):
'''
Read vrml model data given the file path
'''
self._assethandler = assethandler
try:
self.resolveModelLoader()
self._loader.clearData()
except (CosNaming.NamingContext.NotFound, CORBA.TRANSIENT):
logging.info("try running openhrp-model-loader")
plist.append(subprocess.Popen(["openhrp-model-loader"]))
for t in range(0, 6):
if t == 5:
logging.error("unable to find openhrp-model-loader")
raise CosNaming.NamingContext.NotFound
try:
self.resolveModelLoader()
self._loader.clearData()
except (CosNaming.NamingContext.NotFound, CORBA.TRANSIENT):
time.sleep(1)
else:
logging.info("resolved openhrp-model-loader")
break
try:
self._model = self._loader.loadBodyInfo(f)
except CORBA.TRANSIENT:
logging.error('unable to connect to model loader corba service (is "openhrp-model-loader" running?)')
raise
bm = model.BodyModel()
bm.name = self._model._get_name()
self._joints = []
self._links = []
self._materials = []
self._sensors = []
self._hrplinks = self._model._get_links()
self._hrpshapes = self._model._get_shapes()
self._hrpapperances = self._model._get_appearances()
self._hrpmaterials = self._model._get_materials()
self._hrptextures = self._model._get_textures()
self._hrpextrajoints = self._model._get_extraJoints()
mid = 0
for a in self._hrpmaterials:
m = model.MaterialModel()
m.name = "material-%i" % mid
mid = mid + 1
m.ambient = a.ambientIntensity
m.diffuse = a.diffuseColor + [1.0]
m.specular = a.specularColor + [1.0]
m.emission = a.emissiveColor + [1.0]
m.shininess = a.shininess
m.transparency = a.transparency
self._materials.append(m)
root = self._hrplinks[0]
bm.trans = numpy.array(root.translation)
if root.jointType == 'fixed':
world = model.JointModel()
world.name = 'world'
self.readChild(world, root)
else:
lm = self.readLink(root)
self._links.append(lm)
jm = model.JointModel()
jm.name = root.name
for c in root.childIndices:
self.readChild(jm, self._hrplinks[c])
for j in self._hrpextrajoints:
# extra joint for closed link models
m = model.JointModel()
m.jointType = model.JointModel.J_REVOLUTE
m.parent = j.link[0]
m.child = j.link[1]
m.name = j.name
m.axis = model.AxisData()
m.axis.axis = numpy.array(j.axis)
m.trans = numpy.array(j.point[1])
m.offsetPosition = True
self._joints.append(m)
bm.links = self._links
bm.joints = self._joints
for j in bm.joints:
j.parent = self._linknamemap[j.parent]
j.child = self._linknamemap[j.child]
bm.sensors = self._sensors
return bm
def readLink(self, m):
lm = model.LinkModel()
if len(m.segments) > 0:
lm.name = m.segments[0].name
else:
lm.name = m.name
self._linknamemap[m.name] = lm.name
lm.mass = m.mass
lm.centerofmass = numpy.array(m.centerOfMass)
lm.inertia = numpy.array(m.inertia).reshape(3, 3)
lm.visuals = []
for s in m.sensors:
sm = model.SensorModel()
sm.name = s.name
sm.parent = lm.name
sm.trans = numpy.array(s.translation)
# sensors in OpenHRP is defined based on Z-axis up. so we
# will rotate them to X-axis up here.
# see http://www.openrtp.jp/openhrp3/jp/create_model.html
sm.rot = tf.quaternion_about_axis(s.rotation[3], s.rotation[0:3])
if s.type == 'Vision':
sm.rot = tf.quaternion_multiply(sm.rot, tf.quaternion_about_axis(math.pi, [1, 0, 0]))
sm.sensorType = model.SensorModel.SS_CAMERA
sm.data = model.CameraData()
sm.data.near = s.specValues[0]
sm.data.far = s.specValues[1]
sm.data.fov = s.specValues[2]
if s.specValues[3] == 1:
sm.data.cameraType = model.CameraData.CS_COLOR
elif s.specValues[3] == 2:
sm.data.cameraType = model.CameraData.CS_MONO
elif s.specValues[3] == 3:
sm.data.cameraType = model.CameraData.CS_DEPTH
elif s.specValues[3] == 4:
sm.data.cameraType = model.CameraData.CS_RGBD
else:
raise Exception('unsupported camera type: %i' % s.specValues[3])
sm.data.width = s.specValues[4]
sm.data.height = s.specValues[5]
sm.rate = s.specValues[6]
elif s.type == 'Range':
rot = tf.quaternion_multiply(sm.rot, tf.quaternion_about_axis(-math.pi/2, [0, 0, 1]))
rot = tf.quaternion_multiply(rot, tf.quaternion_about_axis(math.pi/2, [0, 1, 0]))
sm.rot = tf.quaternion_multiply(rot, tf.quaternion_about_axis(math.pi, [1, 0, 0]))
sm.sensorType = model.SensorModel.SS_RAY
sm.data = model.RayData()
(scanangle, scanstep, scanrate, maxdistance) = s.specValues
sm.data.min_angle = - scanangle / 2
sm.data.max_angle = scanangle / 2
sm.data.min_range = 0.08
sm.data.max_range = maxdistance
sm.rate = scanrate
self._sensors.append(sm)
for s in m.shapeIndices:
sm = model.ShapeModel()
sm.name = lm.name + "-shape-%i" % s.shapeIndex
sm.matrix = numpy.matrix(s.transformMatrix+[0, 0, 0, 1]).reshape(4, 4)
sdata = self._hrpshapes[s.shapeIndex]
if sdata.primitiveType == OpenHRP.SP_MESH:
sm.shapeType = model.ShapeModel.SP_MESH
sm.data = self.readMesh(sdata)
elif sdata.primitiveType == OpenHRP.SP_SPHERE and numpy.allclose(sm.matrix, numpy.identity(4)):
sm.shapeType = model.ShapeModel.SP_SPHERE
sm.data = model.SphereData()
sm.data.radius = sdata.primitiveParameters[0]
sm.data.material = self._materials[sdata.appearanceIndex]
elif sdata.primitiveType == OpenHRP.SP_CYLINDER and numpy.allclose(sm.matrix, numpy.identity(4)):
sm.shapeType = model.ShapeModel.SP_CYLINDER
sm.data = model.CylinderData()
sm.data.radius = sdata.primitiveParameters[0]
sm.data.height = sdata.primitiveParameters[1]
sm.data.material = self._materials[sdata.appearanceIndex]
elif sdata.primitiveType == OpenHRP.SP_BOX and numpy.allclose(sm.matrix, numpy.identity(4)):
sm.shapeType = model.ShapeModel.SP_BOX
sm.data = model.BoxData()
sm.data.x = sdata.primitiveParameters[0]
sm.data.y = sdata.primitiveParameters[1]
sm.data.z = sdata.primitiveParameters[2]
sm.data.material = self._materials[sdata.appearanceIndex]
else:
# raise Exception('unsupported shape primitive: %s' % sdata.primitiveType)
sm.shapeType = model.ShapeModel.SP_MESH
sm.data = self.readMesh(sdata)
lm.visuals.append(sm)
lm.collisions.append(sm)
return lm
def readMesh(self, sdata):
data = model.MeshData()
data.vertex = numpy.array(sdata.vertices).reshape(len(sdata.vertices)/3, 3)
data.vertex_index = numpy.array(sdata.triangles).reshape(len(sdata.triangles)/3, 3)
adata = self._hrpapperances[sdata.appearanceIndex]
if adata.normalPerVertex is True:
data.normal = numpy.array(adata.normals).reshape(len(adata.normals)/3, 3)
if len(adata.normalIndices) > 0:
data.normal_index = numpy.array(adata.normalIndices).reshape(len(adata.normalIndices)/3, 3)
else:
data.normal_index = data.vertex_index
else:
data.normal = numpy.array(adata.normals).reshape(len(adata.normals)/3, 3)
if len(adata.normalIndices) > 0:
idx = []
for i in adata.normalIndices:
idx.append(i)
idx.append(i)
idx.append(i)
data.normal_index = numpy.array(idx).reshape(len(idx)/3, 3)
else:
idx = []
for i in range(0, len(adata.normals)/3):
idx.append(i)
idx.append(i)
idx.append(i)
data.normal_index = numpy.array(idx).reshape(len(idx)/3, 3)
# if len(data.vertex_index) != len(data.normal_index):
# raise Exception('vertex length and normal length not match')
if adata.materialIndex >= 0:
data.material = self._materials[adata.materialIndex]
if data.material is not None and adata.textureIndex >= 0:
fname = self._hrptextures[adata.textureIndex].url
if self._assethandler:
data.material.texture = self._assethandler(fname)
else:
data.material.texture = fname
data.uvmap = numpy.array(adata.textureCoordinate).reshape(len(adata.textureCoordinate)/2, 2)
data.uvmap_index = numpy.array(adata.textureCoordIndices).reshape(len(adata.textureCoordIndices)/3, 3)
return data
def readChild(self, parent, child):
# first, create joint pairs
jm = model.JointModel()
jm.parent = parent.name
jm.child = child.name
jm.name = child.name
jm.jointId = child.jointId
jm.axis = model.AxisData()
try:
jm.axis.limit = [child.ulimit[0], child.llimit[0]]
except IndexError:
pass
try:
jm.axis.velocitylimit = [child.uvlimit[0], child.lvlimit[0]]
except IndexError:
pass
try:
jm.axis.effortlimit = [child.climit[0]*child.gearRatio*child.torqueConst]
except IndexError:
pass
jm.axis.axis = child.jointAxis
if child.jointType == 'fixed':
jm.jointType = model.JointModel.J_FIXED
elif child.jointType == 'rotate':
if jm.axis.limit is None or (jm.axis.limit[0] is None and jm.axis.limit[1] is None):
jm.jointType = model.JointModel.J_CONTINUOUS
else:
jm.jointType = model.JointModel.J_REVOLUTE
elif child.jointType == 'slide':
jm.jointType = model.JointModel.J_PRISMATIC
elif child.jointType == 'crawler':
jm.jointType = model.JointModel.J_CRAWLER
elif child.jointType == 'pseudoContinuousTrack':
jm.jointType = model.JointModel.J_CRAWLER
else:
raise Exception('unsupported joint type: %s' % child.jointType)
jm.trans = numpy.array(child.translation)
jm.rot = tf.quaternion_about_axis(child.rotation[3], child.rotation[0:3])
# convert to absolute position
jm.matrix = numpy.dot(parent.getmatrix(), jm.getmatrix())
jm.trans = None
jm.rot = None
self._joints.append(jm)
# then, convert link shape information
lm = self.readLink(child)
lm.matrix = jm.getmatrix()
lm.trans = None
lm.rot = None
self._links.append(lm)
for c in child.childIndices:
self.readChild(jm, self._hrplinks[c])
def resolveModelLoader(self):
nsobj = self._orb.resolve_initial_references("NameService")
self._ns = nsobj._narrow(CosNaming.NamingContext)
try:
obj = self._ns.resolve([CosNaming.NameComponent("ModelLoader", "")])
self._loader = obj._narrow(OpenHRP.ModelLoader)
except CosNaming.NamingContext.NotFound:
logging.error("unable to resolve OpenHRP model loader on CORBA name service")
raise
class VRMLWriter(object):
'''
VRML writer class
'''
def __init__(self):
self._linkmap = {}
self._roots = []
self._ignore = []
self._options = None
def write(self, mdata, fname, options=None):
'''
Write simulation model in VRML format
'''
self._options = options
fpath, fext = os.path.splitext(fname)
basename = os.path.basename(fpath)
dirname = os.path.dirname(fname)
if mdata.name is None or mdata.name == '':
mdata.name = basename
# convert revolute2 joint to two revolute joints (with a link
# in between)
for j in mdata.joints:
if j.jointType == model.JointModel.J_REVOLUTE2:
logging.info("converting revolute2 joint to two revolute joints")
nl = model.LinkModel()
nl.name = j.name + "_REVOLUTE2_LINK"
nl.matrix = j.getmatrix()
nl.trans = None
nl.rot = None
nl.mass = 0.001 # assign very small mass
mdata.links.append(nl)
nj = copy.deepcopy(j)
nj.name = j.name + "_SECOND"
nj.jointType = model.JointModel.J_REVOLUTE
nj.parent = nl.name
nj.child = j.child
nj.axis = j.axis2
mdata.joints.append(nj)
j.jointType = model.JointModel.J_REVOLUTE
j.child = nl.name
# check for same names in visuals or collisions
usednames = {}
for l in mdata.links:
for v in l.visuals:
if v.name in usednames:
v.name = l.name + "-visual"
if v.name in usednames:
v.name = l.name + "-visual-" + str(uuid.uuid1()).replace('-', '')
usednames[v.name] = True
for c in l.collisions:
if c.name in usednames:
c.name = l.name + "-collision"
if c.name in usednames:
c.name = l.name + "-collision-" + str(uuid.uuid1()).replace('-', '')
usednames[c.name] = True
# find root joint (including local peaks)
self._roots = utils.findroot(mdata)
# render the data structure using template
loader = jinja2.PackageLoader(self.__module__, 'template')
env = jinja2.Environment(loader=loader, extensions=['jinja2.ext.do'])
self._linkmap['world'] = model.LinkModel()
for m in mdata.links:
self._linkmap[m.name] = m
# render shape vrml file for each links
shapefilemap = {}
for l in mdata.links:
shapes = copy.copy(l.visuals)
if options is not None and options.usecollision:
shapes = copy.copy(l.collisions)
if options is not None and options.useboth:
shapes.extend(copy.copy(l.collisions))
for v in shapes:
logging.info('writing shape of link: %s, type: %s' % (l.name, v.shapeType))
if v.shapeType == model.ShapeModel.SP_MESH:
template = env.get_template('vrml-mesh.wrl')
if isinstance(v.data, model.MeshTransformData):
v.data.pretranslate()
m = {}
m['children'] = [v.data]
shapefname = (mdata.name + "-" + l.name + "-" + v.name + ".wrl").replace('::', '_')
with open(os.path.join(dirname, shapefname), 'w') as ofile:
ofile.write(template.render({
'name': v.name,
'ShapeModel': model.ShapeModel,
'mesh': m
}))
shapefilemap[v.name] = shapefname
# render main vrml file for each bodies
template = env.get_template('vrml.wrl')
roots = []
modelfiles = {}
for root in self._roots:
if root == 'world':
for r in utils.findchildren(mdata, root):
roots.append((r.child, "fixed"))
else:
roots.append((root, "free"))
for r in roots:
logging.info('writing model for %s' % r[0])
if len(roots) == 1:
mfname = fname
else:
mfname = (mdata.name + "-" + r[0] + ".wrl").replace('::', '_')
self.renderchildren(mdata, r[0], r[1], os.path.join(dirname, mfname), shapefilemap, template)
modelfiles[mfname] = self._linkmap[r[0]]
# render openhrp project
template = env.get_template('openhrp-project.xml')
with open(fname.replace('.wrl', '-project.xml'), 'w') as ofile:
ofile.write(template.render({
'models': modelfiles,
}))
# render choreonoid project
template = env.get_template('choreonoid-project.yaml')
with open(fname.replace('.wrl', '-project.cnoid'), 'w') as ofile:
ofile.write(template.render({
'models': modelfiles,
}))
def convertchildren(self, mdata, pjoint, joints, links):
children = []
plink = self._linkmap[pjoint.child]
for cjoint in utils.findchildren(mdata, pjoint.child):
nmodel = {}
try:
clink = self._linkmap[cjoint.child]
except KeyError:
logging.warning("unable to find child link %s" % cjoint.child)
(cchildren, joints, links) = self.convertchildren(mdata, cjoint, joints, links)
pjointinv = numpy.linalg.pinv(pjoint.getmatrix())
cjointinv = numpy.linalg.pinv(cjoint.getmatrix())
cjoint2 = copy.deepcopy(cjoint)
cjoint2.matrix = numpy.dot(pjointinv, cjoint.getmatrix())
cjoint2.trans = None
cjoint2.rot = None
clink2 = copy.deepcopy(clink)
clink2.matrix = numpy.dot(cjointinv, clink.getmatrix())
clink2.trans = None
clink2.rot = None
if clink2.mass == 0:
logging.warning("detect link with mass zero, assigning small (0.001) mass.")
clink2.mass = 0.001
if not numpy.allclose(clink2.getmatrix(), numpy.identity(4)):
clink2.translate(clink2.getmatrix())
nmodel['joint'] = cjoint2
nmodel['jointtype'] = self.convertjointtype(cjoint.jointType)
nmodel['link'] = clink2
nmodel['children'] = cchildren
children.append(nmodel)
joints.append(cjoint.name)
links.append(cjoint.child)
return (children, joints, links)
def renderchildren(self, mdata, root, jointtype, fname, shapefilemap, template):
nmodel = {}
rootlink = self._linkmap[root]
rootjoint = model.JointModel()
rootjoint.name = root
rootjoint.jointType = jointtype
rootjoint.matrix = rootlink.getmatrix()
rootjoint.trans = None
rootjoint.rot = None
rootjoint.child = root
(children, joints, links) = self.convertchildren(mdata, rootjoint, [], [])
nmodel['link'] = rootlink
nmodel['joint'] = rootjoint
nmodel['jointtype'] = rootjoint.jointType
nmodel['children'] = children
# assign jointId
if jointtype in ['free', 'fixed']:
jointmap = {}
jointcount = 0
else:
jointmap = {root: 0}
jointcount = 1
for j in joints:
jointmap[j] = 0
for j in joints:
jointmap[j] = jointcount
jointcount = jointcount + 1
with open(fname, 'w') as ofile:
ofile.write(template.render({
'model': {'name':rootlink.name, 'children':[nmodel]},
'body': mdata,
'links': links,
'joints': joints,
'jointmap': jointmap,
'ShapeModel': model.ShapeModel,
'shapefilemap': shapefilemap,
'options': self._options
}))
def convertjointtype(self, t):
if t == model.JointModel.J_FIXED:
return "fixed"
elif t == model.JointModel.J_REVOLUTE:
return "rotate"
elif t == model.JointModel.J_PRISMATIC:
return "slide"
elif t == model.JointModel.J_SCREW:
return "rotate"
elif t == model.JointModel.J_CONTINUOUS:
return "rotate"
else:
raise Exception('unsupported joint type: %s' % t)
class VRMLMeshWriter(object):
'''
VRML mesh writer class
'''
def __init__(self):
self._linkmap = {}
self._roots = []
self._ignore = []
def write(self, m, fname, options=None):
'''
Write mesh in VRML format
'''
fpath, fext = os.path.splitext(fname)
basename = os.path.basename(fpath)
dirname = os.path.dirname(fname)
# render the data structure using template
loader = jinja2.PackageLoader(self.__module__, 'template')
env = jinja2.Environment(loader=loader, extensions=['jinja2.ext.do'])
template = env.get_template('vrml-mesh.wrl')
if m.shapeType == model.ShapeModel.SP_MESH:
if isinstance(m.data, model.MeshTransformData):
m.data.pretranslate()
nm = {}
nm['children'] = [m.data]
with open(fname, 'w') as ofile:
ofile.write(template.render({
'name': basename,
'ShapeModel': model.ShapeModel,
'mesh': nm
}))
| en | 0.655723 | # -*- coding:utf-8 -*- Reader and writer for VRML format :Organization: AIST Requirements ------------ * numpy * omniorb-python * jinja2 template engine Examples -------- Read vrml model data given the file path >>> r = VRMLReader() >>> m = r.read(os.path.expandvars('$OPENHRP_MODEL_PATH/closed-link-sample.wrl')) Write simulation model in VRML format >>> import subprocess >>> subprocess.call('rosrun xacro xacro.py `rospack find atlas_description`/robots/atlas_v3.urdf.xacro > /tmp/atlas.urdf', shell=True) 0 >>> from . import urdf >>> r = urdf.URDFReader() >>> m = r.read('/tmp/atlas.urdf') >>> w = VRMLWriter() >>> w.write(m, '/tmp/atlas.wrl') >>> from . import sdf >>> r = sdf.SDFReader() >>> m = r.read('model://pr2/model.sdf') >>> w = VRMLWriter() >>> w.write(m, '/tmp/pr2.wrl') VRML reader class Read vrml model data given the file path # extra joint for closed link models # sensors in OpenHRP is defined based on Z-axis up. so we # will rotate them to X-axis up here. # see http://www.openrtp.jp/openhrp3/jp/create_model.html # raise Exception('unsupported shape primitive: %s' % sdata.primitiveType) # if len(data.vertex_index) != len(data.normal_index): # raise Exception('vertex length and normal length not match') # first, create joint pairs # convert to absolute position # then, convert link shape information VRML writer class Write simulation model in VRML format # convert revolute2 joint to two revolute joints (with a link # in between) # assign very small mass # check for same names in visuals or collisions # find root joint (including local peaks) # render the data structure using template # render shape vrml file for each links # render main vrml file for each bodies # render openhrp project # render choreonoid project # assign jointId VRML mesh writer class Write mesh in VRML format # render the data structure using template | 2.317942 | 2 |
bot_logging_server/api/create_user_post.py | WesBAn/LoggerBotLoggingServer | 0 | 6612785 | <gh_stars>0
import asyncio
import logging
import typing
import quart
from bot_logging_server.models.http import requests, headers
from bot_logging_server.models.http import utils as http_utils
from bot_logging_server.storage import create_user
from bot_logging_server.models.mysql import db_connection
logger = logging.getLogger("quart.serving") # pylint: disable=C0103
async def handle(
quart_request: quart.request, mysql_user: str, mysql_password: str, api_key: str
) -> typing.Tuple[typing.Dict, int, typing.Dict]:
"""
Base function to handle /create_user POST request
Checks if user is authorized and write logs to mysql db
Throws:
quart.exceptions.Forbidden
quart.exceptions.BadRequest
:param mysql_user:
:param mysql_password:
:param quart_request:
:param api_key:
:return: 200 response
"""
try:
request = await requests.CreateUserPostRequest.build(quart_request)
if request.headers.x_api_key != api_key:
raise quart.exceptions.Forbidden
mysql_worker = db_connection.MysqlWorker(
asyncio.get_event_loop(), user=mysql_user, password=<PASSWORD>
)
await create_user.create_user(request=request, mysql_worker=mysql_worker)
return (
{"code": 200, "message": "User added"},
200,
{"Content-Type": headers.JSON_CONTENT_TYPE},
)
except http_utils.RequestParsingFailedError as parsed_err:
logger.error("Request is incorrect")
raise quart.exceptions.BadRequest from parsed_err
| import asyncio
import logging
import typing
import quart
from bot_logging_server.models.http import requests, headers
from bot_logging_server.models.http import utils as http_utils
from bot_logging_server.storage import create_user
from bot_logging_server.models.mysql import db_connection
logger = logging.getLogger("quart.serving") # pylint: disable=C0103
async def handle(
quart_request: quart.request, mysql_user: str, mysql_password: str, api_key: str
) -> typing.Tuple[typing.Dict, int, typing.Dict]:
"""
Base function to handle /create_user POST request
Checks if user is authorized and write logs to mysql db
Throws:
quart.exceptions.Forbidden
quart.exceptions.BadRequest
:param mysql_user:
:param mysql_password:
:param quart_request:
:param api_key:
:return: 200 response
"""
try:
request = await requests.CreateUserPostRequest.build(quart_request)
if request.headers.x_api_key != api_key:
raise quart.exceptions.Forbidden
mysql_worker = db_connection.MysqlWorker(
asyncio.get_event_loop(), user=mysql_user, password=<PASSWORD>
)
await create_user.create_user(request=request, mysql_worker=mysql_worker)
return (
{"code": 200, "message": "User added"},
200,
{"Content-Type": headers.JSON_CONTENT_TYPE},
)
except http_utils.RequestParsingFailedError as parsed_err:
logger.error("Request is incorrect")
raise quart.exceptions.BadRequest from parsed_err | en | 0.609019 | # pylint: disable=C0103 Base function to handle /create_user POST request Checks if user is authorized and write logs to mysql db Throws: quart.exceptions.Forbidden quart.exceptions.BadRequest :param mysql_user: :param mysql_password: :param quart_request: :param api_key: :return: 200 response | 2.383046 | 2 |
setup.py | Contextualist/shx | 7 | 6612786 | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="shx",
version="0.3.0",
author="Contextualist",
description="For writing async script with Python",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/Contextualist/shx",
packages=setuptools.find_packages(),
python_requires=">=3.6",
install_requires=[
"aiocontextvars>=0.2.2; python_version < '3.7'",
],
tests_require = [
"pytest",
"pytest-asyncio",
],
entry_points = {
"console_scripts": ["shx=shx.shx:main"],
},
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: AsyncIO",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Topic :: System :: Shells",
"Topic :: Utilities",
],
)
| import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="shx",
version="0.3.0",
author="Contextualist",
description="For writing async script with Python",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/Contextualist/shx",
packages=setuptools.find_packages(),
python_requires=">=3.6",
install_requires=[
"aiocontextvars>=0.2.2; python_version < '3.7'",
],
tests_require = [
"pytest",
"pytest-asyncio",
],
entry_points = {
"console_scripts": ["shx=shx.shx:main"],
},
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: AsyncIO",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Topic :: System :: Shells",
"Topic :: Utilities",
],
)
| none | 1 | 1.414821 | 1 | |
tests/test_c.py | TexZK/cbytesparse | 1 | 6612787 | <reponame>TexZK/cbytesparse
# Copyright (c) 2020-2022, <NAME>.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import importlib
import inspect
import sys
from typing import Type
import pytest
from _common import *
# noinspection PyUnresolvedReferences
from cbytesparse.c import Memory as _Memory
from cbytesparse.c import bytesparse as _bytesparse
# Patch inspect.isfunction() to allow Cython functions to be discovered
@pytest.mark.skip
def _patch_inspect_isfunction():
isfunction_ = inspect.isfunction
def isfunction(obj):
return (isfunction_(obj)
or type(obj).__name__ == 'cython_function_or_method')
isfunction.isfunction_ = isfunction_
inspect.isfunction = isfunction
_patch_inspect_isfunction()
def _load_cython_tests():
# List of Cython modules containing tests
cython_test_modules = ['_test_c']
for mod in cython_test_modules:
try:
# For each callable in `mod` with name `test_*`,
# set the result as an attribute of this module.
mod = importlib.import_module(mod)
for name in dir(mod):
item = getattr(mod, name)
if callable(item) and name.startswith('test_'):
setattr(sys.modules[__name__], name, item)
except ImportError:
pass
_load_cython_tests()
class TestMemory(BaseMemorySuite):
Memory: type = _Memory
ADDR_NEG: bool = False
class TestBytesparse(BaseBytearraySuite, BaseMemorySuite):
bytesparse: Type['_bytesparse'] = _bytesparse
# Reuse some of BaseMemorySuite methods
Memory: Type['_Memory'] = _bytesparse
ADDR_NEG: bool = False
| # Copyright (c) 2020-2022, <NAME>.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import importlib
import inspect
import sys
from typing import Type
import pytest
from _common import *
# noinspection PyUnresolvedReferences
from cbytesparse.c import Memory as _Memory
from cbytesparse.c import bytesparse as _bytesparse
# Patch inspect.isfunction() to allow Cython functions to be discovered
@pytest.mark.skip
def _patch_inspect_isfunction():
isfunction_ = inspect.isfunction
def isfunction(obj):
return (isfunction_(obj)
or type(obj).__name__ == 'cython_function_or_method')
isfunction.isfunction_ = isfunction_
inspect.isfunction = isfunction
_patch_inspect_isfunction()
def _load_cython_tests():
# List of Cython modules containing tests
cython_test_modules = ['_test_c']
for mod in cython_test_modules:
try:
# For each callable in `mod` with name `test_*`,
# set the result as an attribute of this module.
mod = importlib.import_module(mod)
for name in dir(mod):
item = getattr(mod, name)
if callable(item) and name.startswith('test_'):
setattr(sys.modules[__name__], name, item)
except ImportError:
pass
_load_cython_tests()
class TestMemory(BaseMemorySuite):
Memory: type = _Memory
ADDR_NEG: bool = False
class TestBytesparse(BaseBytearraySuite, BaseMemorySuite):
bytesparse: Type['_bytesparse'] = _bytesparse
# Reuse some of BaseMemorySuite methods
Memory: Type['_Memory'] = _bytesparse
ADDR_NEG: bool = False | en | 0.687526 | # Copyright (c) 2020-2022, <NAME>. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # noinspection PyUnresolvedReferences # Patch inspect.isfunction() to allow Cython functions to be discovered # List of Cython modules containing tests # For each callable in `mod` with name `test_*`, # set the result as an attribute of this module. # Reuse some of BaseMemorySuite methods | 1.4849 | 1 |
describe/pdutils.py | xxxspy/describe | 0 | 6612788 | import pandas as pd
def loc(df, s1, s2=None):
if isinstance(df, pd.Series):
return df.loc[s1]
if s2 is None:
return df.loc[s1]
return df.loc[s1, s2]
| import pandas as pd
def loc(df, s1, s2=None):
if isinstance(df, pd.Series):
return df.loc[s1]
if s2 is None:
return df.loc[s1]
return df.loc[s1, s2]
| none | 1 | 2.963003 | 3 | |
mandelbrot.py | foxfluff/mandelbrot-py | 0 | 6612789 | <reponame>foxfluff/mandelbrot-py<gh_stars>0
class mandelbrot(object):
def __init__(self, iterations=50, cache=False):
self.iterations = iterations
self.cache = cache
def calc_point(self, coordinate):
z, iterinit = self.cache_retr(coordinate)
# I sense that my future is full of grey hair and edge cases
for iteration in xrange(iterinit, self.iterations):
z = z**2 + coordinate
if abs(z.real) > 2: break
self.cache_point(coordinate, iteration, z) #inb4 edge cases
if iteration == self.iterations - 1 and z.real < 2:
result = True
else:
result = False
#print 'false, %i' %iteration
return result, iteration
def cache_point(self, coordinate, iterations, value):
if not self.cache:
return False
# actual caching implementation goes here :V
def cache_retr(self, coordinate):
# no cache exists yet sooooooooooo gonna just return as if nothing is
# cached
return complex(0, 0), 0
# Properties
@property
def iterations(self):
return self._iterations
@iterations.setter
def iterations(self, other):
if not isinstance(other, int):
raise TypeError
self._iterations = other
@property
def cache(self):
return self._cache
@cache.setter
def cache(self, other):
if not isinstance(other, bool):
raise TypeError
self._cache = other | class mandelbrot(object):
def __init__(self, iterations=50, cache=False):
self.iterations = iterations
self.cache = cache
def calc_point(self, coordinate):
z, iterinit = self.cache_retr(coordinate)
# I sense that my future is full of grey hair and edge cases
for iteration in xrange(iterinit, self.iterations):
z = z**2 + coordinate
if abs(z.real) > 2: break
self.cache_point(coordinate, iteration, z) #inb4 edge cases
if iteration == self.iterations - 1 and z.real < 2:
result = True
else:
result = False
#print 'false, %i' %iteration
return result, iteration
def cache_point(self, coordinate, iterations, value):
if not self.cache:
return False
# actual caching implementation goes here :V
def cache_retr(self, coordinate):
# no cache exists yet sooooooooooo gonna just return as if nothing is
# cached
return complex(0, 0), 0
# Properties
@property
def iterations(self):
return self._iterations
@iterations.setter
def iterations(self, other):
if not isinstance(other, int):
raise TypeError
self._iterations = other
@property
def cache(self):
return self._cache
@cache.setter
def cache(self, other):
if not isinstance(other, bool):
raise TypeError
self._cache = other | en | 0.614878 | # I sense that my future is full of grey hair and edge cases #inb4 edge cases #print 'false, %i' %iteration # actual caching implementation goes here :V # no cache exists yet sooooooooooo gonna just return as if nothing is # cached # Properties | 3.147624 | 3 |
hypothesis_generator/pra/io_util/evaluate.py | bigghost2054/KIDS | 1 | 6612790 | <filename>hypothesis_generator/pra/io_util/evaluate.py
"""
Filename: evaluate.py
Authors:
<NAME> - <EMAIL>
Description:
Perform evaluation.
To-do:
"""
# standard imports
import argparse
import logging as log
import os
import sys
DIRECTORY = os.path.dirname(__file__)
ABS_PATH_METRICS = os.path.join(DIRECTORY, '../../utils')
sys.path.insert(0, ABS_PATH_METRICS)
# third party imports
import numpy as np
from sklearn.metrics import roc_curve, auc, average_precision_score, accuracy_score
from sklearn.metrics import f1_score, confusion_matrix, precision_score, recall_score
# local imports
from kids_log import set_logging
from metrics import plot_roc, plot_pr, roc_auc_stats, pr_stats
from utils import save_results
def parse_argument():
"""
Parse input arguments.
Returns:
- parsed arguments
"""
parser = argparse.ArgumentParser(description='evaluate the results')
parser.add_argument(
'--dir',
metavar='dir',
nargs='?',
default='./',
help='base directory')
parser.add_argument(
'--final_model',
default=False,
action='store_true',
help='Set when training the final model')
return parser.parse_args()
def main():
"""
Main function.
"""
args = parse_argument()
set_logging()
with open('./selected_relations') as _file:
relations = _file.readlines()
relations = [x.strip() for x in relations]
index = 0
predicates_dic = {}
for relation in relations:
predicates_dic[relation] = index
index += 1
combined_scores_array = None
combined_predicates_array = None
combined_labels_array = None
combined_classifications_array = None
start = 0
for k, v in predicates_dic.items():
_file = open(args.dir + '/scores/' + k, "r")
l_file = open(args.dir + '/queriesR_labels/' + k, "r")
if not args.final_model:
c_file = open(args.dir + '/classifications/' + k, "r")
scores = _file.readlines()
_file.close()
scores = [x.strip().split('\t')[0] for x in scores]
labels = l_file.readlines()
l_file.close()
labels = [x.strip().split('\t')[2] for x in labels]
if not args.final_model:
classifications = c_file.readlines()
c_file.close()
classifications = [x.strip().split('\t')[0] for x in classifications]
predicates = [v for x in scores]
predicates_array = np.array(predicates)
scores_array = np.array(scores)
labels_array = np.array(labels)
if not args.final_model:
classifications_array = np.array(classifications)
if start == 0:
combined_scores_array = scores_array
combined_predicates_array = predicates_array
combined_labels_array = labels_array
if not args.final_model:
combined_classifications_array = classifications_array
start += 1
else:
combined_scores_array = np.append(combined_scores_array, scores_array)
combined_predicates_array = np.append(combined_predicates_array, predicates_array)
combined_labels_array = np.append(combined_labels_array, labels_array)
if not args.final_model:
combined_classifications_array = np.append(
combined_classifications_array, classifications_array)
combined_scores_array = np.transpose(combined_scores_array).astype(float)
combined_predicates_array = np.transpose(combined_predicates_array).astype(int)
combined_labels_array = np.transpose(combined_labels_array).astype(int)
combined_labels_array[:][combined_labels_array[:] == -1] = 0
if not args.final_model:
combined_classifications_array = np.transpose(combined_classifications_array).astype(int)
results = {}
results['predicate'] = {}
for i in range(len(predicates_dic)):
for key, value in predicates_dic.items():
if value == i:
pred_name = key
indices, = np.where(combined_predicates_array == i)
labels_predicate = combined_labels_array[indices]
predicate_predictions = combined_scores_array[indices]
if not args.final_model:
classifications_predicate = combined_classifications_array[indices]
classifications_predicate[:][classifications_predicate[:] == -1] = 0
f1_measure_predicate = f1_score(labels_predicate, classifications_predicate)
accuracy_predicate = accuracy_score(labels_predicate, classifications_predicate)
recall_predicate = recall_score(labels_predicate, classifications_predicate)
precision_predicate = precision_score(labels_predicate, classifications_predicate)
confusion_predicate = confusion_matrix(labels_predicate, classifications_predicate)
log.debug(' - test f1 measure for %s: %f', pred_name, f1_measure_predicate)
log.debug(' - test accuracy for %s: %f', pred_name, accuracy_predicate)
log.debug(' - test precision for %s: %f', pred_name, precision_predicate)
log.debug(' - test recall for %s: %f', pred_name, recall_predicate)
log.debug(' - test confusion matrix for %s:', pred_name)
log.debug(str(confusion_predicate))
fpr_pred, tpr_pred, _ = roc_curve(labels_predicate.ravel(), predicate_predictions.ravel())
roc_auc_pred = auc(fpr_pred, tpr_pred)
ap_pred = average_precision_score(labels_predicate.ravel(), predicate_predictions.ravel())
results['predicate'][pred_name] = {
'map': ap_pred,
'roc_auc': roc_auc_pred,
}
if not args.final_model:
results['predicate'][pred_name]['f1'] = f1_measure_predicate
results['predicate'][pred_name]['accuracy'] = accuracy_predicate
results['predicate'][pred_name]['cm'] = confusion_predicate
results['predicate'][pred_name]['precision'] = precision_predicate
results['predicate'][pred_name]['recall'] = recall_predicate
mean_average_precision_test = pr_stats(len(relations), combined_labels_array, combined_scores_array, combined_predicates_array, predicates_dic)
roc_auc_test = roc_auc_stats(len(relations), combined_labels_array, combined_scores_array, combined_predicates_array, predicates_dic)
if not args.final_model:
f1_measure_test = f1_score(combined_labels_array, combined_classifications_array)
accuracy_test = accuracy_score(combined_labels_array, combined_classifications_array)
recall_test = recall_score(combined_labels_array, combined_classifications_array)
precision_test = precision_score(combined_labels_array, combined_classifications_array)
confusion_test = confusion_matrix(combined_labels_array, combined_classifications_array)
plot_pr(len(relations), combined_labels_array, combined_scores_array, combined_predicates_array, predicates_dic, args.dir, name_of_file='pra_not_calibrated')
plot_roc(len(relations), combined_labels_array, combined_scores_array, combined_predicates_array, predicates_dic, args.dir, name_of_file='pra_not_calibrated')
results['overall'] = {
'map': mean_average_precision_test,
'roc_auc': roc_auc_test,
}
if not args.final_model:
results['overall']['f1'] = f1_measure_test
results['overall']['accuracy'] = accuracy_test
results['overall']['cm'] = confusion_test
results['overall']['precision'] = precision_test
results['overall']['recall'] = recall_test
log.debug('test mean average precision: %f', mean_average_precision_test)
log.debug('test roc auc: %f', roc_auc_test)
if not args.final_model:
log.debug('test f1 measure: %f', f1_measure_test)
log.debug('test accuracy: %f', accuracy_test)
log.debug('test precision: %f', precision_test)
log.debug('test recall: %f', recall_test)
log.debug('test confusion matrix:')
log.debug(str(confusion_test))
save_results(results, args.dir)
if not args.final_model:
with open(args.dir + "/classifications_pra.txt", 'w') as t_f:
for row in classifications:
t_f.write(str(row) + '\n')
if __name__ == "__main__":
main()
| <filename>hypothesis_generator/pra/io_util/evaluate.py
"""
Filename: evaluate.py
Authors:
<NAME> - <EMAIL>
Description:
Perform evaluation.
To-do:
"""
# standard imports
import argparse
import logging as log
import os
import sys
DIRECTORY = os.path.dirname(__file__)
ABS_PATH_METRICS = os.path.join(DIRECTORY, '../../utils')
sys.path.insert(0, ABS_PATH_METRICS)
# third party imports
import numpy as np
from sklearn.metrics import roc_curve, auc, average_precision_score, accuracy_score
from sklearn.metrics import f1_score, confusion_matrix, precision_score, recall_score
# local imports
from kids_log import set_logging
from metrics import plot_roc, plot_pr, roc_auc_stats, pr_stats
from utils import save_results
def parse_argument():
"""
Parse input arguments.
Returns:
- parsed arguments
"""
parser = argparse.ArgumentParser(description='evaluate the results')
parser.add_argument(
'--dir',
metavar='dir',
nargs='?',
default='./',
help='base directory')
parser.add_argument(
'--final_model',
default=False,
action='store_true',
help='Set when training the final model')
return parser.parse_args()
def main():
"""
Main function.
"""
args = parse_argument()
set_logging()
with open('./selected_relations') as _file:
relations = _file.readlines()
relations = [x.strip() for x in relations]
index = 0
predicates_dic = {}
for relation in relations:
predicates_dic[relation] = index
index += 1
combined_scores_array = None
combined_predicates_array = None
combined_labels_array = None
combined_classifications_array = None
start = 0
for k, v in predicates_dic.items():
_file = open(args.dir + '/scores/' + k, "r")
l_file = open(args.dir + '/queriesR_labels/' + k, "r")
if not args.final_model:
c_file = open(args.dir + '/classifications/' + k, "r")
scores = _file.readlines()
_file.close()
scores = [x.strip().split('\t')[0] for x in scores]
labels = l_file.readlines()
l_file.close()
labels = [x.strip().split('\t')[2] for x in labels]
if not args.final_model:
classifications = c_file.readlines()
c_file.close()
classifications = [x.strip().split('\t')[0] for x in classifications]
predicates = [v for x in scores]
predicates_array = np.array(predicates)
scores_array = np.array(scores)
labels_array = np.array(labels)
if not args.final_model:
classifications_array = np.array(classifications)
if start == 0:
combined_scores_array = scores_array
combined_predicates_array = predicates_array
combined_labels_array = labels_array
if not args.final_model:
combined_classifications_array = classifications_array
start += 1
else:
combined_scores_array = np.append(combined_scores_array, scores_array)
combined_predicates_array = np.append(combined_predicates_array, predicates_array)
combined_labels_array = np.append(combined_labels_array, labels_array)
if not args.final_model:
combined_classifications_array = np.append(
combined_classifications_array, classifications_array)
combined_scores_array = np.transpose(combined_scores_array).astype(float)
combined_predicates_array = np.transpose(combined_predicates_array).astype(int)
combined_labels_array = np.transpose(combined_labels_array).astype(int)
combined_labels_array[:][combined_labels_array[:] == -1] = 0
if not args.final_model:
combined_classifications_array = np.transpose(combined_classifications_array).astype(int)
results = {}
results['predicate'] = {}
for i in range(len(predicates_dic)):
for key, value in predicates_dic.items():
if value == i:
pred_name = key
indices, = np.where(combined_predicates_array == i)
labels_predicate = combined_labels_array[indices]
predicate_predictions = combined_scores_array[indices]
if not args.final_model:
classifications_predicate = combined_classifications_array[indices]
classifications_predicate[:][classifications_predicate[:] == -1] = 0
f1_measure_predicate = f1_score(labels_predicate, classifications_predicate)
accuracy_predicate = accuracy_score(labels_predicate, classifications_predicate)
recall_predicate = recall_score(labels_predicate, classifications_predicate)
precision_predicate = precision_score(labels_predicate, classifications_predicate)
confusion_predicate = confusion_matrix(labels_predicate, classifications_predicate)
log.debug(' - test f1 measure for %s: %f', pred_name, f1_measure_predicate)
log.debug(' - test accuracy for %s: %f', pred_name, accuracy_predicate)
log.debug(' - test precision for %s: %f', pred_name, precision_predicate)
log.debug(' - test recall for %s: %f', pred_name, recall_predicate)
log.debug(' - test confusion matrix for %s:', pred_name)
log.debug(str(confusion_predicate))
fpr_pred, tpr_pred, _ = roc_curve(labels_predicate.ravel(), predicate_predictions.ravel())
roc_auc_pred = auc(fpr_pred, tpr_pred)
ap_pred = average_precision_score(labels_predicate.ravel(), predicate_predictions.ravel())
results['predicate'][pred_name] = {
'map': ap_pred,
'roc_auc': roc_auc_pred,
}
if not args.final_model:
results['predicate'][pred_name]['f1'] = f1_measure_predicate
results['predicate'][pred_name]['accuracy'] = accuracy_predicate
results['predicate'][pred_name]['cm'] = confusion_predicate
results['predicate'][pred_name]['precision'] = precision_predicate
results['predicate'][pred_name]['recall'] = recall_predicate
mean_average_precision_test = pr_stats(len(relations), combined_labels_array, combined_scores_array, combined_predicates_array, predicates_dic)
roc_auc_test = roc_auc_stats(len(relations), combined_labels_array, combined_scores_array, combined_predicates_array, predicates_dic)
if not args.final_model:
f1_measure_test = f1_score(combined_labels_array, combined_classifications_array)
accuracy_test = accuracy_score(combined_labels_array, combined_classifications_array)
recall_test = recall_score(combined_labels_array, combined_classifications_array)
precision_test = precision_score(combined_labels_array, combined_classifications_array)
confusion_test = confusion_matrix(combined_labels_array, combined_classifications_array)
plot_pr(len(relations), combined_labels_array, combined_scores_array, combined_predicates_array, predicates_dic, args.dir, name_of_file='pra_not_calibrated')
plot_roc(len(relations), combined_labels_array, combined_scores_array, combined_predicates_array, predicates_dic, args.dir, name_of_file='pra_not_calibrated')
results['overall'] = {
'map': mean_average_precision_test,
'roc_auc': roc_auc_test,
}
if not args.final_model:
results['overall']['f1'] = f1_measure_test
results['overall']['accuracy'] = accuracy_test
results['overall']['cm'] = confusion_test
results['overall']['precision'] = precision_test
results['overall']['recall'] = recall_test
log.debug('test mean average precision: %f', mean_average_precision_test)
log.debug('test roc auc: %f', roc_auc_test)
if not args.final_model:
log.debug('test f1 measure: %f', f1_measure_test)
log.debug('test accuracy: %f', accuracy_test)
log.debug('test precision: %f', precision_test)
log.debug('test recall: %f', recall_test)
log.debug('test confusion matrix:')
log.debug(str(confusion_test))
save_results(results, args.dir)
if not args.final_model:
with open(args.dir + "/classifications_pra.txt", 'w') as t_f:
for row in classifications:
t_f.write(str(row) + '\n')
if __name__ == "__main__":
main()
| en | 0.416992 | Filename: evaluate.py Authors: <NAME> - <EMAIL> Description: Perform evaluation. To-do: # standard imports # third party imports # local imports Parse input arguments. Returns: - parsed arguments Main function. | 2.482211 | 2 |
PythonExercicios/ex008.py | cedricgenaro/Python | 0 | 6612791 | print('\n {:=^100}'.format('Convertendo'))
mt = float(input('\n Digite o valor de metros a ser convertido: '))
cm = mt * 100
mm = mt * 1000
km = mt / 1000
hm = mt / 100
dam = mt / 10
dm = mt * 10
print('-'*50)
print('A medida de {}m corresponde a \n {:.3f}km \n {:.2f}hm \n {:.1f}dam \n {:.0f}dm \n {:.0f}cm \n {:.0f}mm '.format(mt, km, hm, dam, dm, cm, mm))
| print('\n {:=^100}'.format('Convertendo'))
mt = float(input('\n Digite o valor de metros a ser convertido: '))
cm = mt * 100
mm = mt * 1000
km = mt / 1000
hm = mt / 100
dam = mt / 10
dm = mt * 10
print('-'*50)
print('A medida de {}m corresponde a \n {:.3f}km \n {:.2f}hm \n {:.1f}dam \n {:.0f}dm \n {:.0f}cm \n {:.0f}mm '.format(mt, km, hm, dam, dm, cm, mm))
| none | 1 | 3.843109 | 4 | |
test_jfb.py | Typal-Research/jacobian_free_backprop | 7 | 6612792 | <reponame>Typal-Research/jacobian_free_backprop<filename>test_jfb.py
import torch
import torch.nn as nn
from utils import mnist_loaders, compute_fixed_point
import copy
import numpy as np
from BatchCG import cg_batch
# ------------------------------------------------
# small test network
# ------------------------------------------------
classification = torch.tensor
latent_variable = torch.tensor
image = torch.tensor
class test_net(nn.Module):
def __init__(self, latent_features):
super().__init__()
self.fc_d = nn.Linear(28*28, latent_features)
self.fc_latent = nn.Linear(latent_features, latent_features)
self.fc_y = nn.Linear(latent_features, 10)
self.leaky_relu = nn.LeakyReLU(0.1)
def forward(self, d: image, eps=1.0e-6, max_depth=1000):
self.depth = 0.0
Qd = self.data_space_forward(d)
u = torch.zeros(Qd.shape, device=self.device())
u_prev = np.Inf*torch.ones(u.shape, device=self.device())
all_samp_conv = False
while not all_samp_conv and self.depth < max_depth:
u_prev = u.clone()
u = self.latent_space_forward(u, Qd)
res_norm = torch.max(torch.norm(u - u_prev, dim=1))
self.depth += 1.0
all_samp_conv = res_norm <= eps
return self.map_latent_to_inference(u)
def device(self):
return next(self.parameters()).data.device
def data_space_forward(self, d: image) -> latent_variable:
''' Transform images into feature vectors in latent space
The data space operator does *not* need to be 1-Lipschitz; however,
bounding the singular values can improve generalization. A
multiplicative factor is added in each update to control the
Lipschitz constant.
'''
n_samples = d.shape[0]
d = d.view(n_samples, -1)
Qd = self.leaky_relu(self.fc_d(d))
return Qd
def latent_space_forward(self, u: latent_variable,
v: latent_variable) -> latent_variable:
''' Fixed point operator on latent space (when v is fixed)
R(u,v) is used in fixed point iteration of FPN to
find u* satisfying u* = R(u*, v).
To make R be a contraction in u, we estimate a
Lipschitz constant and normalize updates using this.
'''
uv = u + v
uv = self.leaky_relu(self.fc_latent(uv))
R_uv = 0.5*uv
return R_uv
def map_latent_to_inference(self, u: latent_variable) -> classification:
''' Transform feature vectors into a classification
This is the final step of FPN, which flattens and
then applies affine mappings to input. Operations do *not* need to
be 1-Lipschitz.
'''
y = self.fc_y(u)
return y
def normalize_lip_const(self, u, Qd):
return
# ------------------------------------------------
# test JJT symmetry
# ------------------------------------------------
def v_JJT_matvec(v, u, Ru):
# inputs:
# v = vector to be multiplied by JJT
# u = fixed point vector u (requires grad)
# Ru = R applied to u (requires grad)
# assumes one rhs: x (n_samples, n_dim, n_rhs) -> (n_samples, n_dim)
v = v.squeeze(2) # squeeze number of rhs
v = v.view(Ru.shape) # reshape to filter space
v.requires_grad = True
# compute v*J = v*(I - dRdu)
v_dRdu = torch.autograd.grad(outputs=Ru, inputs=u,
grad_outputs=v,
retain_graph=True,
create_graph=True,
only_inputs=True)[0]
v_J = v - v_dRdu
# compute v_JJT
v_JJT = torch.autograd.grad(outputs=v_J, inputs=v,
grad_outputs=v_J,
retain_graph=True,
create_graph=True,
only_inputs=True)[0]
v = v.detach()
v_J = v_J.detach()
Amv = v_JJT.detach()
Amv = Amv.view(Ru.shape[0], -1)
Amv = Amv.unsqueeze(2).detach()
return Amv
def test_symmetry_of_Jacobians():
n_features = 10
u = torch.randn(1, n_features)
u.requires_grad = True
fc = torch.nn.Linear(n_features, n_features)
relu = torch.nn.ReLU()
Ru = relu(fc(u))
JJT_mat = torch.zeros(n_features, n_features)
for i in range(n_features):
temp_vec = torch.zeros(n_features)
temp_vec[i] = 1.0
# reshape to match dimensions of v_JJT_matvec function
temp_vec = temp_vec.view(1, n_features, 1)
v_JJT = v_JJT_matvec(temp_vec, u, Ru)
v_JJT = v_JJT.view(n_features)
JJT_mat[i, :] = v_JJT
assert(torch.norm(JJT_mat - JJT_mat.transpose(1, 0)) < 1e-6)
print('--------- symmetry test passed! ---------')
def test_Neumann_approximation():
n_features = 3
A = torch.randn(n_features, n_features)/10
Id = torch.eye(n_features, n_features)
J = Id - A
x = torch.randn(3)
x.requires_grad = True
y = A.matmul(x)
dldu = torch.randn(3)
true_sol = dldu.matmul(torch.inverse(J))
dldu_Jinv_approx = dldu
dldu_dfdx_k = dldu.clone().detach()
neumann_order=50
# Approximate Jacobian inverse with Neumann series
# expansion up to neumann_order terms
for i in range(1, neumann_order):
dldu_dfdx_k.requires_grad = True
# compute dldu_dfdx_k * dfdx = dldu_dfdx_k+1
dfdu_kplus1 = torch.autograd.grad(outputs=y,
inputs = x,
grad_outputs=dldu_dfdx_k,
retain_graph=True,
create_graph = True,
only_inputs=True)[0]
dldu_Jinv_approx = dldu_Jinv_approx + dfdu_kplus1.detach()
dldu_dfdx_k = dfdu_kplus1.detach()
assert(torch.norm(dldu_Jinv_approx - true_sol) < 1e-6)
print('---- Neumann test passed! ----')
| import torch
import torch.nn as nn
from utils import mnist_loaders, compute_fixed_point
import copy
import numpy as np
from BatchCG import cg_batch
# ------------------------------------------------
# small test network
# ------------------------------------------------
classification = torch.tensor
latent_variable = torch.tensor
image = torch.tensor
class test_net(nn.Module):
def __init__(self, latent_features):
super().__init__()
self.fc_d = nn.Linear(28*28, latent_features)
self.fc_latent = nn.Linear(latent_features, latent_features)
self.fc_y = nn.Linear(latent_features, 10)
self.leaky_relu = nn.LeakyReLU(0.1)
def forward(self, d: image, eps=1.0e-6, max_depth=1000):
self.depth = 0.0
Qd = self.data_space_forward(d)
u = torch.zeros(Qd.shape, device=self.device())
u_prev = np.Inf*torch.ones(u.shape, device=self.device())
all_samp_conv = False
while not all_samp_conv and self.depth < max_depth:
u_prev = u.clone()
u = self.latent_space_forward(u, Qd)
res_norm = torch.max(torch.norm(u - u_prev, dim=1))
self.depth += 1.0
all_samp_conv = res_norm <= eps
return self.map_latent_to_inference(u)
def device(self):
return next(self.parameters()).data.device
def data_space_forward(self, d: image) -> latent_variable:
''' Transform images into feature vectors in latent space
The data space operator does *not* need to be 1-Lipschitz; however,
bounding the singular values can improve generalization. A
multiplicative factor is added in each update to control the
Lipschitz constant.
'''
n_samples = d.shape[0]
d = d.view(n_samples, -1)
Qd = self.leaky_relu(self.fc_d(d))
return Qd
def latent_space_forward(self, u: latent_variable,
v: latent_variable) -> latent_variable:
''' Fixed point operator on latent space (when v is fixed)
R(u,v) is used in fixed point iteration of FPN to
find u* satisfying u* = R(u*, v).
To make R be a contraction in u, we estimate a
Lipschitz constant and normalize updates using this.
'''
uv = u + v
uv = self.leaky_relu(self.fc_latent(uv))
R_uv = 0.5*uv
return R_uv
def map_latent_to_inference(self, u: latent_variable) -> classification:
''' Transform feature vectors into a classification
This is the final step of FPN, which flattens and
then applies affine mappings to input. Operations do *not* need to
be 1-Lipschitz.
'''
y = self.fc_y(u)
return y
def normalize_lip_const(self, u, Qd):
return
# ------------------------------------------------
# test JJT symmetry
# ------------------------------------------------
def v_JJT_matvec(v, u, Ru):
# inputs:
# v = vector to be multiplied by JJT
# u = fixed point vector u (requires grad)
# Ru = R applied to u (requires grad)
# assumes one rhs: x (n_samples, n_dim, n_rhs) -> (n_samples, n_dim)
v = v.squeeze(2) # squeeze number of rhs
v = v.view(Ru.shape) # reshape to filter space
v.requires_grad = True
# compute v*J = v*(I - dRdu)
v_dRdu = torch.autograd.grad(outputs=Ru, inputs=u,
grad_outputs=v,
retain_graph=True,
create_graph=True,
only_inputs=True)[0]
v_J = v - v_dRdu
# compute v_JJT
v_JJT = torch.autograd.grad(outputs=v_J, inputs=v,
grad_outputs=v_J,
retain_graph=True,
create_graph=True,
only_inputs=True)[0]
v = v.detach()
v_J = v_J.detach()
Amv = v_JJT.detach()
Amv = Amv.view(Ru.shape[0], -1)
Amv = Amv.unsqueeze(2).detach()
return Amv
def test_symmetry_of_Jacobians():
n_features = 10
u = torch.randn(1, n_features)
u.requires_grad = True
fc = torch.nn.Linear(n_features, n_features)
relu = torch.nn.ReLU()
Ru = relu(fc(u))
JJT_mat = torch.zeros(n_features, n_features)
for i in range(n_features):
temp_vec = torch.zeros(n_features)
temp_vec[i] = 1.0
# reshape to match dimensions of v_JJT_matvec function
temp_vec = temp_vec.view(1, n_features, 1)
v_JJT = v_JJT_matvec(temp_vec, u, Ru)
v_JJT = v_JJT.view(n_features)
JJT_mat[i, :] = v_JJT
assert(torch.norm(JJT_mat - JJT_mat.transpose(1, 0)) < 1e-6)
print('--------- symmetry test passed! ---------')
def test_Neumann_approximation():
n_features = 3
A = torch.randn(n_features, n_features)/10
Id = torch.eye(n_features, n_features)
J = Id - A
x = torch.randn(3)
x.requires_grad = True
y = A.matmul(x)
dldu = torch.randn(3)
true_sol = dldu.matmul(torch.inverse(J))
dldu_Jinv_approx = dldu
dldu_dfdx_k = dldu.clone().detach()
neumann_order=50
# Approximate Jacobian inverse with Neumann series
# expansion up to neumann_order terms
for i in range(1, neumann_order):
dldu_dfdx_k.requires_grad = True
# compute dldu_dfdx_k * dfdx = dldu_dfdx_k+1
dfdu_kplus1 = torch.autograd.grad(outputs=y,
inputs = x,
grad_outputs=dldu_dfdx_k,
retain_graph=True,
create_graph = True,
only_inputs=True)[0]
dldu_Jinv_approx = dldu_Jinv_approx + dfdu_kplus1.detach()
dldu_dfdx_k = dfdu_kplus1.detach()
assert(torch.norm(dldu_Jinv_approx - true_sol) < 1e-6)
print('---- Neumann test passed! ----') | en | 0.707366 | # ------------------------------------------------ # small test network # ------------------------------------------------ Transform images into feature vectors in latent space The data space operator does *not* need to be 1-Lipschitz; however, bounding the singular values can improve generalization. A multiplicative factor is added in each update to control the Lipschitz constant. Fixed point operator on latent space (when v is fixed) R(u,v) is used in fixed point iteration of FPN to find u* satisfying u* = R(u*, v). To make R be a contraction in u, we estimate a Lipschitz constant and normalize updates using this. Transform feature vectors into a classification This is the final step of FPN, which flattens and then applies affine mappings to input. Operations do *not* need to be 1-Lipschitz. # ------------------------------------------------ # test JJT symmetry # ------------------------------------------------ # inputs: # v = vector to be multiplied by JJT # u = fixed point vector u (requires grad) # Ru = R applied to u (requires grad) # assumes one rhs: x (n_samples, n_dim, n_rhs) -> (n_samples, n_dim) # squeeze number of rhs # reshape to filter space # compute v*J = v*(I - dRdu) # compute v_JJT # reshape to match dimensions of v_JJT_matvec function # Approximate Jacobian inverse with Neumann series # expansion up to neumann_order terms # compute dldu_dfdx_k * dfdx = dldu_dfdx_k+1 | 2.273331 | 2 |
DM900-USB-ArduinoNano/DM900-Enigmalight-Service/usr/lib/enigma2/python/Plugins/Extensions/EnigmaLight/remote/models/info.py | NeedfulThings/hyperionconfigs | 0 | 6612793 | # -*- coding: utf-8 -*-
##############################################################################
# 2011 E2OpenPlugins #
# #
# This file is open source software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License version 2 as #
# published by the Free Software Foundation. #
# #
##############################################################################
from Plugins.Extensions.EnigmaLight.__init__ import _
from Components.config import config
from Tools.Directories import fileExists, pathExists
from time import time, localtime, strftime
from twisted.web import version
import os
import sys
import time
import string
def formatIp(ip):
if ip is None or len(ip) != 4:
return "0.0.0.0"
return "%d.%d.%d.%d" % (ip[0], ip[1], ip[2], ip[3])
def getBasePath():
path = os.path.dirname(sys.modules[__name__].__file__)
chunks = path.split("/")
chunks.pop()
chunks.pop()
return "/".join(chunks)
def getPublicPath(file = ""):
return getBasePath() + "/remote/public/" + file
def getViewsPath(file = ""):
return getBasePath() + "/remote/views/" + file
def getCurrentTime():
t = time.localtime()
return {
"status": True,
"time": "%2d:%02d:%02d" % (t.tm_hour, t.tm_min, t.tm_sec)
}
def getFrontendStatus():
return {}
def getStatusInfo(self, controller):
statusinfo = {}
statusinfo['lights_onoff'] = controller.getOptionValue("lights_onoff")
statusinfo['current_mode'] = controller.getOptionValue("mode")
#Options
statusinfo['option_brightness'] = controller.getOptionValue("brightness")
statusinfo['option_brightnessmin'] = controller.getOptionValue("brightnessmin")
statusinfo['option_brightnessmax'] = controller.getOptionValue("brightnessmax")
statusinfo['option_saturation'] = controller.getOptionValue("saturation")
statusinfo['option_saturationmin'] = controller.getOptionValue("saturationmin")
statusinfo['option_saturationmax'] = controller.getOptionValue("saturationmax")
statusinfo['option_speed'] = controller.getOptionValue("speed")
statusinfo['option_gamma'] = controller.getOptionValue("gamma")
return statusinfo
| # -*- coding: utf-8 -*-
##############################################################################
# 2011 E2OpenPlugins #
# #
# This file is open source software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License version 2 as #
# published by the Free Software Foundation. #
# #
##############################################################################
from Plugins.Extensions.EnigmaLight.__init__ import _
from Components.config import config
from Tools.Directories import fileExists, pathExists
from time import time, localtime, strftime
from twisted.web import version
import os
import sys
import time
import string
def formatIp(ip):
if ip is None or len(ip) != 4:
return "0.0.0.0"
return "%d.%d.%d.%d" % (ip[0], ip[1], ip[2], ip[3])
def getBasePath():
path = os.path.dirname(sys.modules[__name__].__file__)
chunks = path.split("/")
chunks.pop()
chunks.pop()
return "/".join(chunks)
def getPublicPath(file = ""):
return getBasePath() + "/remote/public/" + file
def getViewsPath(file = ""):
return getBasePath() + "/remote/views/" + file
def getCurrentTime():
t = time.localtime()
return {
"status": True,
"time": "%2d:%02d:%02d" % (t.tm_hour, t.tm_min, t.tm_sec)
}
def getFrontendStatus():
return {}
def getStatusInfo(self, controller):
statusinfo = {}
statusinfo['lights_onoff'] = controller.getOptionValue("lights_onoff")
statusinfo['current_mode'] = controller.getOptionValue("mode")
#Options
statusinfo['option_brightness'] = controller.getOptionValue("brightness")
statusinfo['option_brightnessmin'] = controller.getOptionValue("brightnessmin")
statusinfo['option_brightnessmax'] = controller.getOptionValue("brightnessmax")
statusinfo['option_saturation'] = controller.getOptionValue("saturation")
statusinfo['option_saturationmin'] = controller.getOptionValue("saturationmin")
statusinfo['option_saturationmax'] = controller.getOptionValue("saturationmax")
statusinfo['option_speed'] = controller.getOptionValue("speed")
statusinfo['option_gamma'] = controller.getOptionValue("gamma")
return statusinfo
| de | 0.324748 | # -*- coding: utf-8 -*- ############################################################################## # 2011 E2OpenPlugins # # # # This file is open source software; you can redistribute it and/or modify # # it under the terms of the GNU General Public License version 2 as # # published by the Free Software Foundation. # # # ############################################################################## #Options | 1.984669 | 2 |
tests/test_convenience_Horizons.py | Smithsonian/cheby_checker | 1 | 6612794 | """
Tests of the "convenience_Horizons" routines that are used for testing.
Some of these tests really function as demos/documentation to
remind myself/ourselves of how these Horizons functions are
intended to work
"""
# Import standard packages
# --------------------------------------------------------------
import numpy as np
# Import the module to be tested
# --------------------------------------------------------------
import convenience_Horizons as Horizons
def test_read_Horizons_state_from_text():
"""
This is NOT testing a built-in Horizons function
This is just testing a little convenience routine created by MJP
This convenience routine is ONLY used as part of the testing code for Cheby Checker
"""
# input text
lines = """
X =-2.590350154796811E+00 Y =-7.949342693459856E-02 Z = 1.245107691757731E-01
VX=-1.454708370733871E-03 VY=-9.503445860627428E-03 VZ=-3.846514535533382E-03
""".split('\n')[1:-1]
# use the target function to extract the coordinaets
result = Horizons.read_Horizons_state_from_text( lines )
# check that the results are as expected
expected_array = np.array([ float('-2.590350154796811E+00'), float('-7.949342693459856E-02'), float('1.245107691757731E-01'),
float('-1.454708370733871E-03'), float('-9.503445860627428E-03'), float('-3.846514535533382E-03') ] )
assert np.allclose(expected_array, result, rtol=1e-08, atol=1e-08)
def test_extract_first_state_from_text():
"""
This is NOT testing a built-in Horizons function
This is just testing a little convenience routine created by MJP
This convenience routine is ONLY used as part of the testing code for Cheby Checker
"""
# input text
lines = """
*******************************************************************************
JPL/HORIZONS 12345 (1993 FT8) 2022-Jan-28 14:39:42
Rec #: 12345 (+COV) Soln.date: 2021-Nov-10_08:38:58 # obs: 1959 (1993-2021)
IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs):
EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812
EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915
OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945
A= 2.448750742541829 MA= 14.99600220651154 ADIST= 2.841293789229623
PER= 3.832 N= .25720961 ANGMOM= .02657056
DAN= 2.14602 DDN= 2.68596 L= 60.6968709
B= -2.401858 MOID= 1.06974006 TP= 2015-Jan-27.6973502915
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= 1.506 ROTPER= n.a.
H= 14.52 G= .150 B-V= n.a.
ALBEDO= .407 STYP= n.a.
ASTEROID comments:
1: soln ref.= JPL#32, OCC=0
2: source=ORB
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 14:39:42 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: 12345 (1993 FT8) {source: JPL#32}
Center body name: Sun (10) {source: DE441}
Center-site name: BODY CENTER
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 0.00000000,0.00000000,0.0000000 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 0.00000000,0.00000000,0.0000000 {E-lon(deg),Dxy(km),Dz(km)}
Center radii : 696000.0 x 696000.0 x 696000.0 k{Equator, meridian, pole}
Small perturbers: Yes {source: SB441-N16}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
Reference frame : ICRF
*******************************************************************************
Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.):
EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812
EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915
OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945
Equivalent ICRF heliocentric cartesian coordinates (au, au/d):
X= 3.047919278950221E-01 Y= 1.902892265722551E+00 Z= 7.692605770652556E-01
VX=-1.255238959074424E-02 VY= 2.052146789677108E-03 VZ= 1.612315394505861E-03
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= 1.506 ROTPER= n.a.
H= 14.52 G= .150 B-V= n.a.
ALBEDO= .407 STYP= n.a.
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183915 s]
X =-2.590350154796811E+00 Y =-7.949342693459856E-02 Z = 1.245107691757731E-01
VX=-1.454708370733871E-03 VY=-9.503445860627428E-03 VZ=-3.846514535533382E-03
LT= 1.498492268422344E-02 RG= 2.594558933811760E+00 RR= 1.558928955626413E-03
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
International Celestial Reference Frame (ICRF)
The ICRF is an adopted reference frame whose axes are defined relative to
fixed extragalactic radio sources distributed across the sky.
The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02
arcsecond level but is not identical and has no associated standard epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : <EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
#print(lines)
# use the target function to extract the coordinaets
result = Horizons.extract_first_state_from_text( lines )
# check that the results are as expected
expected_array = np.array([ float('-2.590350154796811E+00'), float('-7.949342693459856E-02'), float('1.245107691757731E-01'),
float('-1.454708370733871E-03'), float('-9.503445860627428E-03'), float('-3.846514535533382E-03') ] )
assert np.allclose(expected_array, result, rtol=1e-08, atol=1e-08)
def test_nice_Horizons_A():
"""
Testing Mike A's convenience wrapper around Horizon query functionality
- Much of this test is being done to provide some reminder
to myself/ourselves as to how to use the Horizons tool
Deliberately *not* using all of the functionalities of pytest here.
Just want to keep it simple and keep it obvious what everything is supposed to be doing.
Here we extract the
HELIOCENTRIC state
for
Asteroid number 12345 (== 1993 FT8)
in an
EQUATORIAL FRAME (refplane='earth')
"""
# Define the variables that will be used in the query
target = '12345' # <<-- Asteroid number 12345 == 1993 FT8
centre = '500@10'
epochs = '2458850.0'
id_type = 'smallbody'
refplane= 'earth'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
JPL/HORIZONS 12345 (1993 FT8) 2022-Jan-28 14:39:42
Rec #: 12345 (+COV) Soln.date: 2021-Nov-10_08:38:58 # obs: 1959 (1993-2021)
IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs):
EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812
EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915
OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945
A= 2.448750742541829 MA= 14.99600220651154 ADIST= 2.841293789229623
PER= 3.832 N= .25720961 ANGMOM= .02657056
DAN= 2.14602 DDN= 2.68596 L= 60.6968709
B= -2.401858 MOID= 1.06974006 TP= 2015-Jan-27.6973502915
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= 1.506 ROTPER= n.a.
H= 14.52 G= .150 B-V= n.a.
ALBEDO= .407 STYP= n.a.
ASTEROID comments:
1: soln ref.= JPL#32, OCC=0
2: source=ORB
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 14:39:42 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: 12345 (1993 FT8) {source: JPL#32}
Center body name: Sun (10) {source: DE441}
Center-site name: BODY CENTER
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 0.00000000,0.00000000,0.0000000 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 0.00000000,0.00000000,0.0000000 {E-lon(deg),Dxy(km),Dz(km)}
Center radii : 696000.0 x 696000.0 x 696000.0 k{Equator, meridian, pole}
Small perturbers: Yes {source: SB441-N16}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
Reference frame : ICRF
*******************************************************************************
Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.):
EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812
EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915
OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945
Equivalent ICRF heliocentric cartesian coordinates (au, au/d):
X= 3.047919278950221E-01 Y= 1.902892265722551E+00 Z= 7.692605770652556E-01
VX=-1.255238959074424E-02 VY= 2.052146789677108E-03 VZ= 1.612315394505861E-03
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= 1.506 ROTPER= n.a.
H= 14.52 G= .150 B-V= n.a.
ALBEDO= .407 STYP= n.a.
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183915 s]
X =-2.590350154796811E+00 Y =-7.949342693459856E-02 Z = 1.245107691757731E-01
VX=-1.454708370733871E-03 VY=-9.503445860627428E-03 VZ=-3.846514535533382E-03
LT= 1.498492268422344E-02 RG= 2.594558933811760E+00 RR= 1.558928955626413E-03
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
International Celestial Reference Frame (ICRF)
The ICRF is an adopted reference frame whose axes are defined relative to
fixed extragalactic radio sources distributed across the sky.
The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02
arcsecond level but is not identical and has no associated standard epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : <EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane )
# Check that the results are as expected
# - Lowered the accuracy from 1e-11 to 1e-8 as the discrepany grows when JPL re-fits the orbit,
# and I don't want to keep hand-pasting different sets of results
assert np.allclose(expected_array, result, rtol=1e-8, atol=1e-8)
def test_nice_Horizons_B():
"""
Testing Mike A's convenience wrapper around Horizon query functionality
- Much of this test is being done to provide some reminder
to myself/ourselves as to how to use the Horizons tool
Deliberately *not* using all of the functionalities of pytest here.
Just want to keep it simple and keep it obvious what everything is supposed to be doing.
Here we extract the
HELIOCENTRIC state
for
GEOCENTER
in an
EQUATORIAL FRAME (refplane='earth')
"""
# Define the variables that will be used in the query
target = '399' # Earth
centre = '500@10'
epochs = '2458850.0'
id_type = 'majorbody'
refplane= 'earth'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
Revised: April 12, 2021 Earth 399
GEOPHYSICAL PROPERTIES (revised Aug 15, 2018):
Vol. Mean Radius (km) = 6371.01+-0.02 Mass x10^24 (kg)= 5.97219+-0.0006
Equ. radius, km = 6378.137 Mass layers:
Polar axis, km = 6356.752 Atmos = 5.1 x 10^18 kg
Flattening = 1/298.257223563 oceans = 1.4 x 10^21 kg
Density, g/cm^3 = 5.51 crust = 2.6 x 10^22 kg
J2 (IERS 2010) = 0.00108262545 mantle = 4.043 x 10^24 kg
g_p, m/s^2 (polar) = 9.8321863685 outer core = 1.835 x 10^24 kg
g_e, m/s^2 (equatorial) = 9.7803267715 inner core = 9.675 x 10^22 kg
g_o, m/s^2 = 9.82022 Fluid core rad = 3480 km
GM, km^3/s^2 = 398600.435436 Inner core rad = 1215 km
GM 1-sigma, km^3/s^2 = 0.0014 Escape velocity = 11.186 km/s
Rot. Rate (rad/s) = 0.00007292115 Surface area:
Mean sidereal day, hr = 23.9344695944 land = 1.48 x 10^8 km
Mean solar day 2000.0, s = 86400.002 sea = 3.62 x 10^8 km
Mean solar day 1820.0, s = 86400.0 Love no., k2 = 0.299
Moment of inertia = 0.3308 Atm. pressure = 1.0 bar
Mean temperature, K = 270 Volume, km^3 = 1.08321 x 10^12
Mean effect. IR temp, K = 255 Magnetic moment = 0.61 gauss Rp^3
Geometric albedo = 0.367 Vis. mag. V(1,0)= -3.86
Solar Constant (W/m^2) = 1367.6 (mean), 1414 (perihelion), 1322 (aphelion)
HELIOCENTRIC ORBIT CHARACTERISTICS:
Obliquity to orbit, deg = 23.4392911 Sidereal orb period = 1.0000174 y
Orbital speed, km/s = 29.79 Sidereal orb period = 365.25636 d
Mean daily motion, deg/d = 0.9856474 Hill's sphere radius = 234.9
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 16:02:02 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: Earth (399) {source: DE441}
Center body name: Sun (10) {source: DE441}
Center-site name: BODY CENTER
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 0.00000000,0.00000000,0.0000000 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 0.00000000,0.00000000,0.0000000 {E-lon(deg),Dxy(km),Dz(km)}
Center radii : 696000.0 x 696000.0 x 696000.0 k{Equator, meridian, pole}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
Reference frame : ICRF
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183915 s]
X =-1.749585912701602E-01 Y = 8.877645495087018E-01 Z = 3.848482875671789E-01
VX=-1.721190438300784E-02 VY=-2.874039035670773E-03 VZ=-1.245648654352060E-03
LT= 5.678966496273616E-03 RG= 9.832825679666131E-01 RR=-1.981645766688001E-05
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
International Celestial Reference Frame (ICRF)
The ICRF is an adopted reference frame whose axes are defined relative to
fixed extragalactic radio sources distributed across the sky.
The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02
arcsecond level but is not identical and has no associated standard epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : <EMAIL>.Giorg<EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane)
print('result=\n' , result)
# Check that the results are as expected
assert np.allclose(expected_array, result, rtol=1e-10, atol=1e-10)
def test_nice_Horizons_C():
"""
Testing Mike A's convenience wrapper around Horizon query functionality
- Much of this test is being done to provide some reminder
to myself/ourselves as to how to use the Horizons tool
Deliberately *not* using all of the functionalities of pytest here.
Just want to keep it simple and keep it obvious what everything is supposed to be doing.
Here we extract the
TOPOCENTRIC (F51) state
for
Asteroid number 54321 (== 2000 JA81)
in an
EQUATORIAL FRAME (refplane='earth')
"""
# Define the variables that will be used in the query
target = '54321' # <<-- Asteroid number 54321 == 2000 JA81
centre = 'F51'
epochs = '2458850.0'
id_type = 'smallbody'
refplane= 'earth'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
JPL/HORIZONS 54321 (2000 JA81) 2022-Jan-28 16:08:57
Rec #: 54321 (+COV) Soln.date: 2021-Oct-08_04:39:24 # obs: 1315 (1979-2021)
IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs):
EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282
EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138
OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867
A= 2.58760024089671 MA= 143.3507250314229 ADIST= 3.236789307546094
PER= 4.1625 N= .236787236 ANGMOM= .026786318
DAN= 2.43937 DDN= 2.41026 L= 182.707997
B= 6.7671807 MOID= .95572901 TP= 2012-Jun-14.6011463138
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= n.a. ROTPER= n.a.
H= 14.45 G= .150 B-V= n.a.
ALBEDO= n.a. STYP= n.a.
ASTEROID comments:
1: soln ref.= JPL#33, OCC=0
2: source=ORB
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 16:08:57 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: 54321 (2000 JA81) {source: JPL#33}
Center body name: Earth (399) {source: DE441}
Center-site name: Pan-STARRS 1, Haleakala
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)}
Center pole/equ : ITRF93 {East-longitude positive}
Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole}
Small perturbers: Yes {source: SB441-N16}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
EOP file : eop.220127.p220422
EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21
Reference frame : ICRF
*******************************************************************************
Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.):
EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282
EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138
OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867
Equivalent ICRF heliocentric cartesian coordinates (au, au/d):
X= 2.934573285149345E+00 Y=-8.702901499041770E-01 Z=-7.535748078855007E-01
VX= 3.948600090813408E-03 VY= 7.155151609877323E-03 VZ= 2.568700850735469E-03
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= n.a. ROTPER= n.a.
H= 14.45 G= .150 B-V= n.a.
ALBEDO= n.a. STYP= n.a.
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s]
X = 3.140272938432556E-01 Y = 1.401450872643150E+00 Z = 5.824305212783573E-01
VX= 6.595793009138184E-03 VY= 5.366428257622971E-04 VZ= 1.577496239642071E-03
LT= 8.950941094980980E-03 RG= 1.549807407979245E+00 RR= 2.414570690380698E-03
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
International Celestial Reference Frame (ICRF)
The ICRF is an adopted reference frame whose axes are defined relative to
fixed extragalactic radio sources distributed across the sky.
The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02
arcsecond level but is not identical and has no associated standard epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : <EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane )
print('result=\n' , result)
# Check that the results are as expected
assert np.allclose(expected_array, result, rtol=1e-11, atol=1e-11)
def test_nice_Horizons_D():
"""
Similar to test_nice_Horizons_C, but ECLIPTIC insted of equatorial
Here we extract the
TOPOCENTRIC (F51) state
for
Asteroid number 54321 (== 2000 JA81)
in an
ECLIPTIC FRAME (refplane='ecliptic')
"""
# Define the variables that will be used in the query
target = '54321' # <<-- Asteroid number 54321 == 2000 JA81
centre = 'F51'
epochs = '2458850.0'
id_type = 'smallbody'
refplane= 'ecliptic'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
JPL/HORIZONS 54321 (2000 JA81) 2022-Jan-28 16:19:21
Rec #: 54321 (+COV) Soln.date: 2021-Oct-08_04:39:24 # obs: 1315 (1979-2021)
IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs):
EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282
EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138
OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867
A= 2.58760024089671 MA= 143.3507250314229 ADIST= 3.236789307546094
PER= 4.1625 N= .236787236 ANGMOM= .026786318
DAN= 2.43937 DDN= 2.41026 L= 182.707997
B= 6.7671807 MOID= .95572901 TP= 2012-Jun-14.6011463138
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= n.a. ROTPER= n.a.
H= 14.45 G= .150 B-V= n.a.
ALBEDO= n.a. STYP= n.a.
ASTEROID comments:
1: soln ref.= JPL#33, OCC=0
2: source=ORB
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 16:19:22 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: 54321 (2000 JA81) {source: JPL#33}
Center body name: Earth (399) {source: DE441}
Center-site name: Pan-STARRS 1, Haleakala
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)}
Center pole/equ : ITRF93 {East-longitude positive}
Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole}
Small perturbers: Yes {source: SB441-N16}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
EOP file : eop.220127.p220422
EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21
Reference frame : Ecliptic of J2000.0
*******************************************************************************
Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.):
EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282
EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138
OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867
Equivalent ICRF heliocentric cartesian coordinates (au, au/d):
X= 2.934573285149345E+00 Y=-8.702901499041770E-01 Z=-7.535748078855007E-01
VX= 3.948600090813408E-03 VY= 7.155151609877323E-03 VZ= 2.568700850735469E-03
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= n.a. ROTPER= n.a.
H= 14.45 G= .150 B-V= n.a.
ALBEDO= n.a. STYP= n.a.
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s]
X = 3.140272938432556E-01 Y = 1.517483592803339E+00 Z =-2.309558662379520E-02
VX= 6.595793009138184E-03 VY= 1.119852134073137E-03 VZ= 1.233860245870196E-03
LT= 8.950941094980980E-03 RG= 1.549807407979244E+00 RR= 2.414570690380698E-03
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
Ecliptic at the standard reference epoch
Reference epoch: J2000.0
X-Y plane: adopted Earth orbital plane at the reference epoch
Note: IAU76 obliquity of 84381.448 arcseconds wrt ICRF X-Y plane
X-axis : ICRF
Z-axis : perpendicular to the X-Y plane in the directional (+ or -) sense
of Earth's north pole at the reference epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : Jon.D.Giorg<EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane)
print('result=\n' , result)
# Check that the results are as expected
assert np.allclose(expected_array, result, rtol=1e-11, atol=1e-11)
def test_nice_Horizons_E():
"""
Here we use Horizons to get the Heliocentric EQUATORIAL position of the Observatory
(NB we use a hack, setting the target as the Sun, and the center as the observatory)
Here we extract the
TOPOCENTRIC (F51) state
for
The Sun
in an
EQUATORIAL FRAME (refplane='earth')
"""
# Define the variables that will be used in the query
target = '10'
centre = 'F51'
epochs = '2458850.0'
id_type = 'majorbody'
refplane='earth'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
Revised: July 31, 2013 Sun 10
PHYSICAL PROPERTIES (updated 2018-Aug-15):
GM, km^3/s^2 = 132712440041.93938 Mass, 10^24 kg = ~1988500
Vol. mean radius, km = 695700 Volume, 10^12 km^3 = 1412000
Solar radius (IAU) = 696000 km Mean density, g/cm^3 = 1.408
Radius (photosphere) = 696500 km Angular diam at 1 AU = 1919.3"
Photosphere temp., K = 6600 (bottom) Photosphere temp., K = 4400(top)
Photospheric depth = ~500 km Chromospheric depth = ~2500 km
Flatness, f = 0.00005 Adopted sid. rot. per.= 25.38 d
Surface gravity = 274.0 m/s^2 Escape speed, km/s = 617.7
Pole (RA,DEC), deg. = (286.13, 63.87) Obliquity to ecliptic = 7.25 deg.
Solar constant (1 AU) = 1367.6 W/m^2 Luminosity, 10^24 J/s = 382.8
Mass-energy conv rate = 4.260 x 10^9 kg/s Effective temp, K = 5772
Sunspot cycle = 11.4 yr Cycle 24 sunspot min. = 2008 A.D.
Motion relative to nearby stars = apex : R.A.= 271 deg.; DEC.= +30 deg.
speed: 19.4 km/s (0.0112 au/day)
Motion relative to 2.73K BB/CBR = apex : l= 264.7 +- 0.8; b= 48.2 +- 0.5 deg.
speed: 369 +-11 km/s
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 16:31:17 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: Sun (10) {source: DE441}
Center body name: Earth (399) {source: DE441}
Center-site name: Pan-STARRS 1, Haleakala
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)}
Center pole/equ : ITRF93 {East-longitude positive}
Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
EOP file : eop.220127.p220422
EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21
Reference frame : ICRF
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s]
X = 1.749807755042866E-01 Y =-8.877977147373203E-01 Z =-3.848633185157228E-01
VX= 1.742085896638510E-02 VY= 3.013989047237847E-03 VZ= 1.245251026128347E-03
LT= 5.679196211202660E-03 RG= 9.833223418736220E-01 RR=-1.085591308641175E-04
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
International Celestial Reference Frame (ICRF)
The ICRF is an adopted reference frame whose axes are defined relative to
fixed extragalactic radio sources distributed across the sky.
The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02
arcsecond level but is not identical and has no associated standard epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : <EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane)
print('result=\n' , result)
# Check that the results are as expected
assert np.allclose(expected_array, result, rtol=1e-11, atol=1e-11)
def test_nice_Horizons_F():
"""
Similar to test_nice_Horizons_E, but ECLIPTIC instead of equatorial
Here we use Horizons to get the Heliocentric ECLIPTIC position of the Observatory
(NB we use a hack, setting the target as the Sun, and the center as the observatory)
Here we extract the
TOPOCENTRIC (F51) state
for
The Sun
in an
ECLIPTIC FRAME (refplane='ecliptic')
"""
# Define the variables that will be used in the query
target = '10'
centre = 'F51'
epochs = '2458850.0'
id_type = 'majorbody'
refplane='ecliptic'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
Revised: July 31, 2013 Sun 10
PHYSICAL PROPERTIES (updated 2018-Aug-15):
GM, km^3/s^2 = 132712440041.93938 Mass, 10^24 kg = ~1988500
Vol. mean radius, km = 695700 Volume, 10^12 km^3 = 1412000
Solar radius (IAU) = 696000 km Mean density, g/cm^3 = 1.408
Radius (photosphere) = 696500 km Angular diam at 1 AU = 1919.3"
Photosphere temp., K = 6600 (bottom) Photosphere temp., K = 4400(top)
Photospheric depth = ~500 km Chromospheric depth = ~2500 km
Flatness, f = 0.00005 Adopted sid. rot. per.= 25.38 d
Surface gravity = 274.0 m/s^2 Escape speed, km/s = 617.7
Pole (RA,DEC), deg. = (286.13, 63.87) Obliquity to ecliptic = 7.25 deg.
Solar constant (1 AU) = 1367.6 W/m^2 Luminosity, 10^24 J/s = 382.8
Mass-energy conv rate = 4.260 x 10^9 kg/s Effective temp, K = 5772
Sunspot cycle = 11.4 yr Cycle 24 sunspot min. = 2008 A.D.
Motion relative to nearby stars = apex : R.A.= 271 deg.; DEC.= +30 deg.
speed: 19.4 km/s (0.0112 au/day)
Motion relative to 2.73K BB/CBR = apex : l= 264.7 +- 0.8; b= 48.2 +- 0.5 deg.
speed: 369 +-11 km/s
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 16:43:25 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: Sun (10) {source: DE441}
Center body name: Earth (399) {source: DE441}
Center-site name: Pan-STARRS 1, Haleakala
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)}
Center pole/equ : ITRF93 {East-longitude positive}
Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
EOP file : eop.220127.p220422
EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21
Reference frame : Ecliptic of J2000.0
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s]
X = 1.749807755042866E-01 Y =-9.676283142792063E-01 Z = 4.045892447000447E-05
VX= 1.742085896638510E-02 VY= 3.260613297708340E-03 VZ=-5.640051197420877E-05
LT= 5.679196211202660E-03 RG= 9.833223418736221E-01 RR=-1.085591308641179E-04
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
Ecliptic at the standard reference epoch
Reference epoch: J2000.0
X-Y plane: adopted Earth orbital plane at the reference epoch
Note: IAU76 obliquity of 84381.448 arcseconds wrt ICRF X-Y plane
X-axis : ICRF
Z-axis : perpendicular to the X-Y plane in the directional (+ or -) sense
of Earth's north pole at the reference epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : Jon.D.Giorgini<EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane)
print('result=\n' , result)
# Check that the results are as expected
assert np.allclose(expected_array, result, rtol=1e-11, atol=1e-11)
| """
Tests of the "convenience_Horizons" routines that are used for testing.
Some of these tests really function as demos/documentation to
remind myself/ourselves of how these Horizons functions are
intended to work
"""
# Import standard packages
# --------------------------------------------------------------
import numpy as np
# Import the module to be tested
# --------------------------------------------------------------
import convenience_Horizons as Horizons
def test_read_Horizons_state_from_text():
"""
This is NOT testing a built-in Horizons function
This is just testing a little convenience routine created by MJP
This convenience routine is ONLY used as part of the testing code for Cheby Checker
"""
# input text
lines = """
X =-2.590350154796811E+00 Y =-7.949342693459856E-02 Z = 1.245107691757731E-01
VX=-1.454708370733871E-03 VY=-9.503445860627428E-03 VZ=-3.846514535533382E-03
""".split('\n')[1:-1]
# use the target function to extract the coordinaets
result = Horizons.read_Horizons_state_from_text( lines )
# check that the results are as expected
expected_array = np.array([ float('-2.590350154796811E+00'), float('-7.949342693459856E-02'), float('1.245107691757731E-01'),
float('-1.454708370733871E-03'), float('-9.503445860627428E-03'), float('-3.846514535533382E-03') ] )
assert np.allclose(expected_array, result, rtol=1e-08, atol=1e-08)
def test_extract_first_state_from_text():
"""
This is NOT testing a built-in Horizons function
This is just testing a little convenience routine created by MJP
This convenience routine is ONLY used as part of the testing code for Cheby Checker
"""
# input text
lines = """
*******************************************************************************
JPL/HORIZONS 12345 (1993 FT8) 2022-Jan-28 14:39:42
Rec #: 12345 (+COV) Soln.date: 2021-Nov-10_08:38:58 # obs: 1959 (1993-2021)
IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs):
EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812
EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915
OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945
A= 2.448750742541829 MA= 14.99600220651154 ADIST= 2.841293789229623
PER= 3.832 N= .25720961 ANGMOM= .02657056
DAN= 2.14602 DDN= 2.68596 L= 60.6968709
B= -2.401858 MOID= 1.06974006 TP= 2015-Jan-27.6973502915
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= 1.506 ROTPER= n.a.
H= 14.52 G= .150 B-V= n.a.
ALBEDO= .407 STYP= n.a.
ASTEROID comments:
1: soln ref.= JPL#32, OCC=0
2: source=ORB
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 14:39:42 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: 12345 (1993 FT8) {source: JPL#32}
Center body name: Sun (10) {source: DE441}
Center-site name: BODY CENTER
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 0.00000000,0.00000000,0.0000000 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 0.00000000,0.00000000,0.0000000 {E-lon(deg),Dxy(km),Dz(km)}
Center radii : 696000.0 x 696000.0 x 696000.0 k{Equator, meridian, pole}
Small perturbers: Yes {source: SB441-N16}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
Reference frame : ICRF
*******************************************************************************
Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.):
EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812
EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915
OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945
Equivalent ICRF heliocentric cartesian coordinates (au, au/d):
X= 3.047919278950221E-01 Y= 1.902892265722551E+00 Z= 7.692605770652556E-01
VX=-1.255238959074424E-02 VY= 2.052146789677108E-03 VZ= 1.612315394505861E-03
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= 1.506 ROTPER= n.a.
H= 14.52 G= .150 B-V= n.a.
ALBEDO= .407 STYP= n.a.
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183915 s]
X =-2.590350154796811E+00 Y =-7.949342693459856E-02 Z = 1.245107691757731E-01
VX=-1.454708370733871E-03 VY=-9.503445860627428E-03 VZ=-3.846514535533382E-03
LT= 1.498492268422344E-02 RG= 2.594558933811760E+00 RR= 1.558928955626413E-03
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
International Celestial Reference Frame (ICRF)
The ICRF is an adopted reference frame whose axes are defined relative to
fixed extragalactic radio sources distributed across the sky.
The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02
arcsecond level but is not identical and has no associated standard epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : <EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
#print(lines)
# use the target function to extract the coordinaets
result = Horizons.extract_first_state_from_text( lines )
# check that the results are as expected
expected_array = np.array([ float('-2.590350154796811E+00'), float('-7.949342693459856E-02'), float('1.245107691757731E-01'),
float('-1.454708370733871E-03'), float('-9.503445860627428E-03'), float('-3.846514535533382E-03') ] )
assert np.allclose(expected_array, result, rtol=1e-08, atol=1e-08)
def test_nice_Horizons_A():
"""
Testing Mike A's convenience wrapper around Horizon query functionality
- Much of this test is being done to provide some reminder
to myself/ourselves as to how to use the Horizons tool
Deliberately *not* using all of the functionalities of pytest here.
Just want to keep it simple and keep it obvious what everything is supposed to be doing.
Here we extract the
HELIOCENTRIC state
for
Asteroid number 12345 (== 1993 FT8)
in an
EQUATORIAL FRAME (refplane='earth')
"""
# Define the variables that will be used in the query
target = '12345' # <<-- Asteroid number 12345 == 1993 FT8
centre = '500@10'
epochs = '2458850.0'
id_type = 'smallbody'
refplane= 'earth'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
JPL/HORIZONS 12345 (1993 FT8) 2022-Jan-28 14:39:42
Rec #: 12345 (+COV) Soln.date: 2021-Nov-10_08:38:58 # obs: 1959 (1993-2021)
IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs):
EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812
EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915
OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945
A= 2.448750742541829 MA= 14.99600220651154 ADIST= 2.841293789229623
PER= 3.832 N= .25720961 ANGMOM= .02657056
DAN= 2.14602 DDN= 2.68596 L= 60.6968709
B= -2.401858 MOID= 1.06974006 TP= 2015-Jan-27.6973502915
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= 1.506 ROTPER= n.a.
H= 14.52 G= .150 B-V= n.a.
ALBEDO= .407 STYP= n.a.
ASTEROID comments:
1: soln ref.= JPL#32, OCC=0
2: source=ORB
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 14:39:42 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: 12345 (1993 FT8) {source: JPL#32}
Center body name: Sun (10) {source: DE441}
Center-site name: BODY CENTER
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 0.00000000,0.00000000,0.0000000 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 0.00000000,0.00000000,0.0000000 {E-lon(deg),Dxy(km),Dz(km)}
Center radii : 696000.0 x 696000.0 x 696000.0 k{Equator, meridian, pole}
Small perturbers: Yes {source: SB441-N16}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
Reference frame : ICRF
*******************************************************************************
Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.):
EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812
EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915
OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945
Equivalent ICRF heliocentric cartesian coordinates (au, au/d):
X= 3.047919278950221E-01 Y= 1.902892265722551E+00 Z= 7.692605770652556E-01
VX=-1.255238959074424E-02 VY= 2.052146789677108E-03 VZ= 1.612315394505861E-03
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= 1.506 ROTPER= n.a.
H= 14.52 G= .150 B-V= n.a.
ALBEDO= .407 STYP= n.a.
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183915 s]
X =-2.590350154796811E+00 Y =-7.949342693459856E-02 Z = 1.245107691757731E-01
VX=-1.454708370733871E-03 VY=-9.503445860627428E-03 VZ=-3.846514535533382E-03
LT= 1.498492268422344E-02 RG= 2.594558933811760E+00 RR= 1.558928955626413E-03
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
International Celestial Reference Frame (ICRF)
The ICRF is an adopted reference frame whose axes are defined relative to
fixed extragalactic radio sources distributed across the sky.
The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02
arcsecond level but is not identical and has no associated standard epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : <EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane )
# Check that the results are as expected
# - Lowered the accuracy from 1e-11 to 1e-8 as the discrepany grows when JPL re-fits the orbit,
# and I don't want to keep hand-pasting different sets of results
assert np.allclose(expected_array, result, rtol=1e-8, atol=1e-8)
def test_nice_Horizons_B():
"""
Testing Mike A's convenience wrapper around Horizon query functionality
- Much of this test is being done to provide some reminder
to myself/ourselves as to how to use the Horizons tool
Deliberately *not* using all of the functionalities of pytest here.
Just want to keep it simple and keep it obvious what everything is supposed to be doing.
Here we extract the
HELIOCENTRIC state
for
GEOCENTER
in an
EQUATORIAL FRAME (refplane='earth')
"""
# Define the variables that will be used in the query
target = '399' # Earth
centre = '500@10'
epochs = '2458850.0'
id_type = 'majorbody'
refplane= 'earth'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
Revised: April 12, 2021 Earth 399
GEOPHYSICAL PROPERTIES (revised Aug 15, 2018):
Vol. Mean Radius (km) = 6371.01+-0.02 Mass x10^24 (kg)= 5.97219+-0.0006
Equ. radius, km = 6378.137 Mass layers:
Polar axis, km = 6356.752 Atmos = 5.1 x 10^18 kg
Flattening = 1/298.257223563 oceans = 1.4 x 10^21 kg
Density, g/cm^3 = 5.51 crust = 2.6 x 10^22 kg
J2 (IERS 2010) = 0.00108262545 mantle = 4.043 x 10^24 kg
g_p, m/s^2 (polar) = 9.8321863685 outer core = 1.835 x 10^24 kg
g_e, m/s^2 (equatorial) = 9.7803267715 inner core = 9.675 x 10^22 kg
g_o, m/s^2 = 9.82022 Fluid core rad = 3480 km
GM, km^3/s^2 = 398600.435436 Inner core rad = 1215 km
GM 1-sigma, km^3/s^2 = 0.0014 Escape velocity = 11.186 km/s
Rot. Rate (rad/s) = 0.00007292115 Surface area:
Mean sidereal day, hr = 23.9344695944 land = 1.48 x 10^8 km
Mean solar day 2000.0, s = 86400.002 sea = 3.62 x 10^8 km
Mean solar day 1820.0, s = 86400.0 Love no., k2 = 0.299
Moment of inertia = 0.3308 Atm. pressure = 1.0 bar
Mean temperature, K = 270 Volume, km^3 = 1.08321 x 10^12
Mean effect. IR temp, K = 255 Magnetic moment = 0.61 gauss Rp^3
Geometric albedo = 0.367 Vis. mag. V(1,0)= -3.86
Solar Constant (W/m^2) = 1367.6 (mean), 1414 (perihelion), 1322 (aphelion)
HELIOCENTRIC ORBIT CHARACTERISTICS:
Obliquity to orbit, deg = 23.4392911 Sidereal orb period = 1.0000174 y
Orbital speed, km/s = 29.79 Sidereal orb period = 365.25636 d
Mean daily motion, deg/d = 0.9856474 Hill's sphere radius = 234.9
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 16:02:02 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: Earth (399) {source: DE441}
Center body name: Sun (10) {source: DE441}
Center-site name: BODY CENTER
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 0.00000000,0.00000000,0.0000000 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 0.00000000,0.00000000,0.0000000 {E-lon(deg),Dxy(km),Dz(km)}
Center radii : 696000.0 x 696000.0 x 696000.0 k{Equator, meridian, pole}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
Reference frame : ICRF
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183915 s]
X =-1.749585912701602E-01 Y = 8.877645495087018E-01 Z = 3.848482875671789E-01
VX=-1.721190438300784E-02 VY=-2.874039035670773E-03 VZ=-1.245648654352060E-03
LT= 5.678966496273616E-03 RG= 9.832825679666131E-01 RR=-1.981645766688001E-05
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
International Celestial Reference Frame (ICRF)
The ICRF is an adopted reference frame whose axes are defined relative to
fixed extragalactic radio sources distributed across the sky.
The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02
arcsecond level but is not identical and has no associated standard epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : <EMAIL>.Giorg<EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane)
print('result=\n' , result)
# Check that the results are as expected
assert np.allclose(expected_array, result, rtol=1e-10, atol=1e-10)
def test_nice_Horizons_C():
"""
Testing Mike A's convenience wrapper around Horizon query functionality
- Much of this test is being done to provide some reminder
to myself/ourselves as to how to use the Horizons tool
Deliberately *not* using all of the functionalities of pytest here.
Just want to keep it simple and keep it obvious what everything is supposed to be doing.
Here we extract the
TOPOCENTRIC (F51) state
for
Asteroid number 54321 (== 2000 JA81)
in an
EQUATORIAL FRAME (refplane='earth')
"""
# Define the variables that will be used in the query
target = '54321' # <<-- Asteroid number 54321 == 2000 JA81
centre = 'F51'
epochs = '2458850.0'
id_type = 'smallbody'
refplane= 'earth'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
JPL/HORIZONS 54321 (2000 JA81) 2022-Jan-28 16:08:57
Rec #: 54321 (+COV) Soln.date: 2021-Oct-08_04:39:24 # obs: 1315 (1979-2021)
IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs):
EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282
EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138
OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867
A= 2.58760024089671 MA= 143.3507250314229 ADIST= 3.236789307546094
PER= 4.1625 N= .236787236 ANGMOM= .026786318
DAN= 2.43937 DDN= 2.41026 L= 182.707997
B= 6.7671807 MOID= .95572901 TP= 2012-Jun-14.6011463138
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= n.a. ROTPER= n.a.
H= 14.45 G= .150 B-V= n.a.
ALBEDO= n.a. STYP= n.a.
ASTEROID comments:
1: soln ref.= JPL#33, OCC=0
2: source=ORB
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 16:08:57 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: 54321 (2000 JA81) {source: JPL#33}
Center body name: Earth (399) {source: DE441}
Center-site name: Pan-STARRS 1, Haleakala
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)}
Center pole/equ : ITRF93 {East-longitude positive}
Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole}
Small perturbers: Yes {source: SB441-N16}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
EOP file : eop.220127.p220422
EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21
Reference frame : ICRF
*******************************************************************************
Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.):
EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282
EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138
OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867
Equivalent ICRF heliocentric cartesian coordinates (au, au/d):
X= 2.934573285149345E+00 Y=-8.702901499041770E-01 Z=-7.535748078855007E-01
VX= 3.948600090813408E-03 VY= 7.155151609877323E-03 VZ= 2.568700850735469E-03
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= n.a. ROTPER= n.a.
H= 14.45 G= .150 B-V= n.a.
ALBEDO= n.a. STYP= n.a.
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s]
X = 3.140272938432556E-01 Y = 1.401450872643150E+00 Z = 5.824305212783573E-01
VX= 6.595793009138184E-03 VY= 5.366428257622971E-04 VZ= 1.577496239642071E-03
LT= 8.950941094980980E-03 RG= 1.549807407979245E+00 RR= 2.414570690380698E-03
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
International Celestial Reference Frame (ICRF)
The ICRF is an adopted reference frame whose axes are defined relative to
fixed extragalactic radio sources distributed across the sky.
The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02
arcsecond level but is not identical and has no associated standard epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : <EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane )
print('result=\n' , result)
# Check that the results are as expected
assert np.allclose(expected_array, result, rtol=1e-11, atol=1e-11)
def test_nice_Horizons_D():
"""
Similar to test_nice_Horizons_C, but ECLIPTIC insted of equatorial
Here we extract the
TOPOCENTRIC (F51) state
for
Asteroid number 54321 (== 2000 JA81)
in an
ECLIPTIC FRAME (refplane='ecliptic')
"""
# Define the variables that will be used in the query
target = '54321' # <<-- Asteroid number 54321 == 2000 JA81
centre = 'F51'
epochs = '2458850.0'
id_type = 'smallbody'
refplane= 'ecliptic'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
JPL/HORIZONS 54321 (2000 JA81) 2022-Jan-28 16:19:21
Rec #: 54321 (+COV) Soln.date: 2021-Oct-08_04:39:24 # obs: 1315 (1979-2021)
IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs):
EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282
EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138
OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867
A= 2.58760024089671 MA= 143.3507250314229 ADIST= 3.236789307546094
PER= 4.1625 N= .236787236 ANGMOM= .026786318
DAN= 2.43937 DDN= 2.41026 L= 182.707997
B= 6.7671807 MOID= .95572901 TP= 2012-Jun-14.6011463138
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= n.a. ROTPER= n.a.
H= 14.45 G= .150 B-V= n.a.
ALBEDO= n.a. STYP= n.a.
ASTEROID comments:
1: soln ref.= JPL#33, OCC=0
2: source=ORB
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 16:19:22 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: 54321 (2000 JA81) {source: JPL#33}
Center body name: Earth (399) {source: DE441}
Center-site name: Pan-STARRS 1, Haleakala
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)}
Center pole/equ : ITRF93 {East-longitude positive}
Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole}
Small perturbers: Yes {source: SB441-N16}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
EOP file : eop.220127.p220422
EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21
Reference frame : Ecliptic of J2000.0
*******************************************************************************
Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.):
EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282
EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138
OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867
Equivalent ICRF heliocentric cartesian coordinates (au, au/d):
X= 2.934573285149345E+00 Y=-8.702901499041770E-01 Z=-7.535748078855007E-01
VX= 3.948600090813408E-03 VY= 7.155151609877323E-03 VZ= 2.568700850735469E-03
Asteroid physical parameters (km, seconds, rotational period in hours):
GM= n.a. RAD= n.a. ROTPER= n.a.
H= 14.45 G= .150 B-V= n.a.
ALBEDO= n.a. STYP= n.a.
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s]
X = 3.140272938432556E-01 Y = 1.517483592803339E+00 Z =-2.309558662379520E-02
VX= 6.595793009138184E-03 VY= 1.119852134073137E-03 VZ= 1.233860245870196E-03
LT= 8.950941094980980E-03 RG= 1.549807407979244E+00 RR= 2.414570690380698E-03
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
Ecliptic at the standard reference epoch
Reference epoch: J2000.0
X-Y plane: adopted Earth orbital plane at the reference epoch
Note: IAU76 obliquity of 84381.448 arcseconds wrt ICRF X-Y plane
X-axis : ICRF
Z-axis : perpendicular to the X-Y plane in the directional (+ or -) sense
of Earth's north pole at the reference epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : Jon.D.Giorg<EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane)
print('result=\n' , result)
# Check that the results are as expected
assert np.allclose(expected_array, result, rtol=1e-11, atol=1e-11)
def test_nice_Horizons_E():
"""
Here we use Horizons to get the Heliocentric EQUATORIAL position of the Observatory
(NB we use a hack, setting the target as the Sun, and the center as the observatory)
Here we extract the
TOPOCENTRIC (F51) state
for
The Sun
in an
EQUATORIAL FRAME (refplane='earth')
"""
# Define the variables that will be used in the query
target = '10'
centre = 'F51'
epochs = '2458850.0'
id_type = 'majorbody'
refplane='earth'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
Revised: July 31, 2013 Sun 10
PHYSICAL PROPERTIES (updated 2018-Aug-15):
GM, km^3/s^2 = 132712440041.93938 Mass, 10^24 kg = ~1988500
Vol. mean radius, km = 695700 Volume, 10^12 km^3 = 1412000
Solar radius (IAU) = 696000 km Mean density, g/cm^3 = 1.408
Radius (photosphere) = 696500 km Angular diam at 1 AU = 1919.3"
Photosphere temp., K = 6600 (bottom) Photosphere temp., K = 4400(top)
Photospheric depth = ~500 km Chromospheric depth = ~2500 km
Flatness, f = 0.00005 Adopted sid. rot. per.= 25.38 d
Surface gravity = 274.0 m/s^2 Escape speed, km/s = 617.7
Pole (RA,DEC), deg. = (286.13, 63.87) Obliquity to ecliptic = 7.25 deg.
Solar constant (1 AU) = 1367.6 W/m^2 Luminosity, 10^24 J/s = 382.8
Mass-energy conv rate = 4.260 x 10^9 kg/s Effective temp, K = 5772
Sunspot cycle = 11.4 yr Cycle 24 sunspot min. = 2008 A.D.
Motion relative to nearby stars = apex : R.A.= 271 deg.; DEC.= +30 deg.
speed: 19.4 km/s (0.0112 au/day)
Motion relative to 2.73K BB/CBR = apex : l= 264.7 +- 0.8; b= 48.2 +- 0.5 deg.
speed: 369 +-11 km/s
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 16:31:17 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: Sun (10) {source: DE441}
Center body name: Earth (399) {source: DE441}
Center-site name: Pan-STARRS 1, Haleakala
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)}
Center pole/equ : ITRF93 {East-longitude positive}
Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
EOP file : eop.220127.p220422
EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21
Reference frame : ICRF
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s]
X = 1.749807755042866E-01 Y =-8.877977147373203E-01 Z =-3.848633185157228E-01
VX= 1.742085896638510E-02 VY= 3.013989047237847E-03 VZ= 1.245251026128347E-03
LT= 5.679196211202660E-03 RG= 9.833223418736220E-01 RR=-1.085591308641175E-04
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
International Celestial Reference Frame (ICRF)
The ICRF is an adopted reference frame whose axes are defined relative to
fixed extragalactic radio sources distributed across the sky.
The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02
arcsecond level but is not identical and has no associated standard epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : <EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane)
print('result=\n' , result)
# Check that the results are as expected
assert np.allclose(expected_array, result, rtol=1e-11, atol=1e-11)
def test_nice_Horizons_F():
"""
Similar to test_nice_Horizons_E, but ECLIPTIC instead of equatorial
Here we use Horizons to get the Heliocentric ECLIPTIC position of the Observatory
(NB we use a hack, setting the target as the Sun, and the center as the observatory)
Here we extract the
TOPOCENTRIC (F51) state
for
The Sun
in an
ECLIPTIC FRAME (refplane='ecliptic')
"""
# Define the variables that will be used in the query
target = '10'
centre = 'F51'
epochs = '2458850.0'
id_type = 'majorbody'
refplane='ecliptic'
# Hardpaste the expected results from a by-hand query of horizons
hardpasted_results = """
*******************************************************************************
Revised: July 31, 2013 Sun 10
PHYSICAL PROPERTIES (updated 2018-Aug-15):
GM, km^3/s^2 = 132712440041.93938 Mass, 10^24 kg = ~1988500
Vol. mean radius, km = 695700 Volume, 10^12 km^3 = 1412000
Solar radius (IAU) = 696000 km Mean density, g/cm^3 = 1.408
Radius (photosphere) = 696500 km Angular diam at 1 AU = 1919.3"
Photosphere temp., K = 6600 (bottom) Photosphere temp., K = 4400(top)
Photospheric depth = ~500 km Chromospheric depth = ~2500 km
Flatness, f = 0.00005 Adopted sid. rot. per.= 25.38 d
Surface gravity = 274.0 m/s^2 Escape speed, km/s = 617.7
Pole (RA,DEC), deg. = (286.13, 63.87) Obliquity to ecliptic = 7.25 deg.
Solar constant (1 AU) = 1367.6 W/m^2 Luminosity, 10^24 J/s = 382.8
Mass-energy conv rate = 4.260 x 10^9 kg/s Effective temp, K = 5772
Sunspot cycle = 11.4 yr Cycle 24 sunspot min. = 2008 A.D.
Motion relative to nearby stars = apex : R.A.= 271 deg.; DEC.= +30 deg.
speed: 19.4 km/s (0.0112 au/day)
Motion relative to 2.73K BB/CBR = apex : l= 264.7 +- 0.8; b= 48.2 +- 0.5 deg.
speed: 369 +-11 km/s
*******************************************************************************
*******************************************************************************
Ephemeris / WWW_USER Fri Jan 28 16:43:25 2022 Pasadena, USA / Horizons
*******************************************************************************
Target body name: Sun (10) {source: DE441}
Center body name: Earth (399) {source: DE441}
Center-site name: Pan-STARRS 1, Haleakala
*******************************************************************************
Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB
Step-size : DISCRETE TIME-LIST
*******************************************************************************
Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)}
Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)}
Center pole/equ : ITRF93 {East-longitude positive}
Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole}
Output units : AU-D
Output type : GEOMETRIC cartesian states
Output format : 3 (position, velocity, LT, range, range-rate)
EOP file : eop.220127.p220422
EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21
Reference frame : Ecliptic of J2000.0
*******************************************************************************
JDTDB
X Y Z
VX VY VZ
LT RG RR
*******************************************************************************
$$SOE
2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s]
X = 1.749807755042866E-01 Y =-9.676283142792063E-01 Z = 4.045892447000447E-05
VX= 1.742085896638510E-02 VY= 3.260613297708340E-03 VZ=-5.640051197420877E-05
LT= 5.679196211202660E-03 RG= 9.833223418736221E-01 RR=-1.085591308641179E-04
$$EOE
*******************************************************************************
TIME
Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This
continuous relativistic coordinate time is equivalent to the relativistic
proper time of a clock at rest in a reference frame comoving with the
solar system barycenter but outside the system's gravity well. It is the
independent variable in the solar system relativistic equations of motion.
TDB runs at a uniform rate of one SI second per second and is independent
of irregularities in Earth's rotation.
Calendar dates prior to 1582-Oct-15 are in the Julian calendar system.
Later calendar dates are in the Gregorian system.
REFERENCE FRAME AND COORDINATES
Ecliptic at the standard reference epoch
Reference epoch: J2000.0
X-Y plane: adopted Earth orbital plane at the reference epoch
Note: IAU76 obliquity of 84381.448 arcseconds wrt ICRF X-Y plane
X-axis : ICRF
Z-axis : perpendicular to the X-Y plane in the directional (+ or -) sense
of Earth's north pole at the reference epoch.
Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]:
JDTDB Julian Day Number, Barycentric Dynamical Time
del_T Time-scale conversion difference TDB - UT (s)
X X-component of position vector (au)
Y Y-component of position vector (au)
Z Z-component of position vector (au)
VX X-component of velocity vector (au/day)
VY Y-component of velocity vector (au/day)
VZ Z-component of velocity vector (au/day)
LT One-way down-leg Newtonian light-time (day)
RG Range; distance from coordinate center (au)
RR Range-rate; radial velocity wrt coord. center (au/day)
ABERRATIONS AND CORRECTIONS
Geometric state vectors have NO corrections or aberrations applied.
Computations by ...
Solar System Dynamics Group, Horizons On-Line Ephemeris System
4800 Oak Grove Drive, Jet Propulsion Laboratory
Pasadena, CA 91109 USA
General site: https://ssd.jpl.nasa.gov/
Mailing list: https://ssd.jpl.nasa.gov/email_list.html
System news : https://ssd.jpl.nasa.gov/horizons/news.html
User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html
Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x
API https://ssd-api.jpl.nasa.gov/doc/horizons.html
command-line telnet ssd.jpl.nasa.gov 6775
e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt
scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS
Author : Jon.D.Giorgini<EMAIL>
*******************************************************************************
""".split('\n')[1:-1]
# Extract the hardpasted results into an array
# (using convenience func, "extract_first_state_from_text", tested above)
expected_array = Horizons.extract_first_state_from_text( hardpasted_results )
# Call the nice_Horizons function (i.e. the focus of the test)
result = Horizons.nice_Horizons(target, centre, epochs, id_type, refplane=refplane)
print('result=\n' , result)
# Check that the results are as expected
assert np.allclose(expected_array, result, rtol=1e-11, atol=1e-11)
| en | 0.562596 | Tests of the "convenience_Horizons" routines that are used for testing. Some of these tests really function as demos/documentation to remind myself/ourselves of how these Horizons functions are intended to work # Import standard packages # -------------------------------------------------------------- # Import the module to be tested # -------------------------------------------------------------- This is NOT testing a built-in Horizons function This is just testing a little convenience routine created by MJP This convenience routine is ONLY used as part of the testing code for Cheby Checker # input text X =-2.590350154796811E+00 Y =-7.949342693459856E-02 Z = 1.245107691757731E-01 VX=-1.454708370733871E-03 VY=-9.503445860627428E-03 VZ=-3.846514535533382E-03 # use the target function to extract the coordinaets # check that the results are as expected This is NOT testing a built-in Horizons function This is just testing a little convenience routine created by MJP This convenience routine is ONLY used as part of the testing code for Cheby Checker # input text ******************************************************************************* JPL/HORIZONS 12345 (1993 FT8) 2022-Jan-28 14:39:42 Rec #: 12345 (+COV) Soln.date: 2021-Nov-10_08:38:58 # obs: 1959 (1993-2021) IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs): EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812 EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915 OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945 A= 2.448750742541829 MA= 14.99600220651154 ADIST= 2.841293789229623 PER= 3.832 N= .25720961 ANGMOM= .02657056 DAN= 2.14602 DDN= 2.68596 L= 60.6968709 B= -2.401858 MOID= 1.06974006 TP= 2015-Jan-27.6973502915 Asteroid physical parameters (km, seconds, rotational period in hours): GM= n.a. RAD= 1.506 ROTPER= n.a. H= 14.52 G= .150 B-V= n.a. ALBEDO= .407 STYP= n.a. ASTEROID comments: 1: soln ref.= JPL#32, OCC=0 2: source=ORB ******************************************************************************* ******************************************************************************* Ephemeris / WWW_USER Fri Jan 28 14:39:42 2022 Pasadena, USA / Horizons ******************************************************************************* Target body name: 12345 (1993 FT8) {source: JPL#32} Center body name: Sun (10) {source: DE441} Center-site name: BODY CENTER ******************************************************************************* Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB Step-size : DISCRETE TIME-LIST ******************************************************************************* Center geodetic : 0.00000000,0.00000000,0.0000000 {E-lon(deg),Lat(deg),Alt(km)} Center cylindric: 0.00000000,0.00000000,0.0000000 {E-lon(deg),Dxy(km),Dz(km)} Center radii : 696000.0 x 696000.0 x 696000.0 k{Equator, meridian, pole} Small perturbers: Yes {source: SB441-N16} Output units : AU-D Output type : GEOMETRIC cartesian states Output format : 3 (position, velocity, LT, range, range-rate) Reference frame : ICRF ******************************************************************************* Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.): EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812 EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915 OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945 Equivalent ICRF heliocentric cartesian coordinates (au, au/d): X= 3.047919278950221E-01 Y= 1.902892265722551E+00 Z= 7.692605770652556E-01 VX=-1.255238959074424E-02 VY= 2.052146789677108E-03 VZ= 1.612315394505861E-03 Asteroid physical parameters (km, seconds, rotational period in hours): GM= n.a. RAD= 1.506 ROTPER= n.a. H= 14.52 G= .150 B-V= n.a. ALBEDO= .407 STYP= n.a. ******************************************************************************* JDTDB X Y Z VX VY VZ LT RG RR ******************************************************************************* $$SOE 2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183915 s] X =-2.590350154796811E+00 Y =-7.949342693459856E-02 Z = 1.245107691757731E-01 VX=-1.454708370733871E-03 VY=-9.503445860627428E-03 VZ=-3.846514535533382E-03 LT= 1.498492268422344E-02 RG= 2.594558933811760E+00 RR= 1.558928955626413E-03 $$EOE ******************************************************************************* TIME Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This continuous relativistic coordinate time is equivalent to the relativistic proper time of a clock at rest in a reference frame comoving with the solar system barycenter but outside the system's gravity well. It is the independent variable in the solar system relativistic equations of motion. TDB runs at a uniform rate of one SI second per second and is independent of irregularities in Earth's rotation. Calendar dates prior to 1582-Oct-15 are in the Julian calendar system. Later calendar dates are in the Gregorian system. REFERENCE FRAME AND COORDINATES International Celestial Reference Frame (ICRF) The ICRF is an adopted reference frame whose axes are defined relative to fixed extragalactic radio sources distributed across the sky. The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02 arcsecond level but is not identical and has no associated standard epoch. Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]: JDTDB Julian Day Number, Barycentric Dynamical Time del_T Time-scale conversion difference TDB - UT (s) X X-component of position vector (au) Y Y-component of position vector (au) Z Z-component of position vector (au) VX X-component of velocity vector (au/day) VY Y-component of velocity vector (au/day) VZ Z-component of velocity vector (au/day) LT One-way down-leg Newtonian light-time (day) RG Range; distance from coordinate center (au) RR Range-rate; radial velocity wrt coord. center (au/day) ABERRATIONS AND CORRECTIONS Geometric state vectors have NO corrections or aberrations applied. Computations by ... Solar System Dynamics Group, Horizons On-Line Ephemeris System 4800 Oak Grove Drive, Jet Propulsion Laboratory Pasadena, CA 91109 USA General site: https://ssd.jpl.nasa.gov/ Mailing list: https://ssd.jpl.nasa.gov/email_list.html System news : https://ssd.jpl.nasa.gov/horizons/news.html User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x API https://ssd-api.jpl.nasa.gov/doc/horizons.html command-line telnet ssd.jpl.nasa.gov 6775 e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS Author : <EMAIL> ******************************************************************************* #print(lines) # use the target function to extract the coordinaets # check that the results are as expected Testing Mike A's convenience wrapper around Horizon query functionality - Much of this test is being done to provide some reminder to myself/ourselves as to how to use the Horizons tool Deliberately *not* using all of the functionalities of pytest here. Just want to keep it simple and keep it obvious what everything is supposed to be doing. Here we extract the HELIOCENTRIC state for Asteroid number 12345 (== 1993 FT8) in an EQUATORIAL FRAME (refplane='earth') # Define the variables that will be used in the query # <<-- Asteroid number 12345 == 1993 FT8 # Hardpaste the expected results from a by-hand query of horizons ******************************************************************************* JPL/HORIZONS 12345 (1993 FT8) 2022-Jan-28 14:39:42 Rec #: 12345 (+COV) Soln.date: 2021-Nov-10_08:38:58 # obs: 1959 (1993-2021) IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs): EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812 EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915 OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945 A= 2.448750742541829 MA= 14.99600220651154 ADIST= 2.841293789229623 PER= 3.832 N= .25720961 ANGMOM= .02657056 DAN= 2.14602 DDN= 2.68596 L= 60.6968709 B= -2.401858 MOID= 1.06974006 TP= 2015-Jan-27.6973502915 Asteroid physical parameters (km, seconds, rotational period in hours): GM= n.a. RAD= 1.506 ROTPER= n.a. H= 14.52 G= .150 B-V= n.a. ALBEDO= .407 STYP= n.a. ASTEROID comments: 1: soln ref.= JPL#32, OCC=0 2: source=ORB ******************************************************************************* ******************************************************************************* Ephemeris / WWW_USER Fri Jan 28 14:39:42 2022 Pasadena, USA / Horizons ******************************************************************************* Target body name: 12345 (1993 FT8) {source: JPL#32} Center body name: Sun (10) {source: DE441} Center-site name: BODY CENTER ******************************************************************************* Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB Step-size : DISCRETE TIME-LIST ******************************************************************************* Center geodetic : 0.00000000,0.00000000,0.0000000 {E-lon(deg),Lat(deg),Alt(km)} Center cylindric: 0.00000000,0.00000000,0.0000000 {E-lon(deg),Dxy(km),Dz(km)} Center radii : 696000.0 x 696000.0 x 696000.0 k{Equator, meridian, pole} Small perturbers: Yes {source: SB441-N16} Output units : AU-D Output type : GEOMETRIC cartesian states Output format : 3 (position, velocity, LT, range, range-rate) Reference frame : ICRF ******************************************************************************* Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.): EPOCH= 2457108.5 ! 2015-Mar-27.00 (TDB) Residual RMS= .2812 EC= .1603033905689926 QR= 2.056207695854036 TP= 2457050.1973502915 OM= 106.4549280993016 W= 314.1929318541605 IN= 3.350816780296945 Equivalent ICRF heliocentric cartesian coordinates (au, au/d): X= 3.047919278950221E-01 Y= 1.902892265722551E+00 Z= 7.692605770652556E-01 VX=-1.255238959074424E-02 VY= 2.052146789677108E-03 VZ= 1.612315394505861E-03 Asteroid physical parameters (km, seconds, rotational period in hours): GM= n.a. RAD= 1.506 ROTPER= n.a. H= 14.52 G= .150 B-V= n.a. ALBEDO= .407 STYP= n.a. ******************************************************************************* JDTDB X Y Z VX VY VZ LT RG RR ******************************************************************************* $$SOE 2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183915 s] X =-2.590350154796811E+00 Y =-7.949342693459856E-02 Z = 1.245107691757731E-01 VX=-1.454708370733871E-03 VY=-9.503445860627428E-03 VZ=-3.846514535533382E-03 LT= 1.498492268422344E-02 RG= 2.594558933811760E+00 RR= 1.558928955626413E-03 $$EOE ******************************************************************************* TIME Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This continuous relativistic coordinate time is equivalent to the relativistic proper time of a clock at rest in a reference frame comoving with the solar system barycenter but outside the system's gravity well. It is the independent variable in the solar system relativistic equations of motion. TDB runs at a uniform rate of one SI second per second and is independent of irregularities in Earth's rotation. Calendar dates prior to 1582-Oct-15 are in the Julian calendar system. Later calendar dates are in the Gregorian system. REFERENCE FRAME AND COORDINATES International Celestial Reference Frame (ICRF) The ICRF is an adopted reference frame whose axes are defined relative to fixed extragalactic radio sources distributed across the sky. The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02 arcsecond level but is not identical and has no associated standard epoch. Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]: JDTDB Julian Day Number, Barycentric Dynamical Time del_T Time-scale conversion difference TDB - UT (s) X X-component of position vector (au) Y Y-component of position vector (au) Z Z-component of position vector (au) VX X-component of velocity vector (au/day) VY Y-component of velocity vector (au/day) VZ Z-component of velocity vector (au/day) LT One-way down-leg Newtonian light-time (day) RG Range; distance from coordinate center (au) RR Range-rate; radial velocity wrt coord. center (au/day) ABERRATIONS AND CORRECTIONS Geometric state vectors have NO corrections or aberrations applied. Computations by ... Solar System Dynamics Group, Horizons On-Line Ephemeris System 4800 Oak Grove Drive, Jet Propulsion Laboratory Pasadena, CA 91109 USA General site: https://ssd.jpl.nasa.gov/ Mailing list: https://ssd.jpl.nasa.gov/email_list.html System news : https://ssd.jpl.nasa.gov/horizons/news.html User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x API https://ssd-api.jpl.nasa.gov/doc/horizons.html command-line telnet ssd.jpl.nasa.gov 6775 e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS Author : <EMAIL> ******************************************************************************* # Extract the hardpasted results into an array # (using convenience func, "extract_first_state_from_text", tested above) # Call the nice_Horizons function (i.e. the focus of the test) # Check that the results are as expected # - Lowered the accuracy from 1e-11 to 1e-8 as the discrepany grows when JPL re-fits the orbit, # and I don't want to keep hand-pasting different sets of results Testing Mike A's convenience wrapper around Horizon query functionality - Much of this test is being done to provide some reminder to myself/ourselves as to how to use the Horizons tool Deliberately *not* using all of the functionalities of pytest here. Just want to keep it simple and keep it obvious what everything is supposed to be doing. Here we extract the HELIOCENTRIC state for GEOCENTER in an EQUATORIAL FRAME (refplane='earth') # Define the variables that will be used in the query # Earth # Hardpaste the expected results from a by-hand query of horizons ******************************************************************************* Revised: April 12, 2021 Earth 399 GEOPHYSICAL PROPERTIES (revised Aug 15, 2018): Vol. Mean Radius (km) = 6371.01+-0.02 Mass x10^24 (kg)= 5.97219+-0.0006 Equ. radius, km = 6378.137 Mass layers: Polar axis, km = 6356.752 Atmos = 5.1 x 10^18 kg Flattening = 1/298.257223563 oceans = 1.4 x 10^21 kg Density, g/cm^3 = 5.51 crust = 2.6 x 10^22 kg J2 (IERS 2010) = 0.00108262545 mantle = 4.043 x 10^24 kg g_p, m/s^2 (polar) = 9.8321863685 outer core = 1.835 x 10^24 kg g_e, m/s^2 (equatorial) = 9.7803267715 inner core = 9.675 x 10^22 kg g_o, m/s^2 = 9.82022 Fluid core rad = 3480 km GM, km^3/s^2 = 398600.435436 Inner core rad = 1215 km GM 1-sigma, km^3/s^2 = 0.0014 Escape velocity = 11.186 km/s Rot. Rate (rad/s) = 0.00007292115 Surface area: Mean sidereal day, hr = 23.9344695944 land = 1.48 x 10^8 km Mean solar day 2000.0, s = 86400.002 sea = 3.62 x 10^8 km Mean solar day 1820.0, s = 86400.0 Love no., k2 = 0.299 Moment of inertia = 0.3308 Atm. pressure = 1.0 bar Mean temperature, K = 270 Volume, km^3 = 1.08321 x 10^12 Mean effect. IR temp, K = 255 Magnetic moment = 0.61 gauss Rp^3 Geometric albedo = 0.367 Vis. mag. V(1,0)= -3.86 Solar Constant (W/m^2) = 1367.6 (mean), 1414 (perihelion), 1322 (aphelion) HELIOCENTRIC ORBIT CHARACTERISTICS: Obliquity to orbit, deg = 23.4392911 Sidereal orb period = 1.0000174 y Orbital speed, km/s = 29.79 Sidereal orb period = 365.25636 d Mean daily motion, deg/d = 0.9856474 Hill's sphere radius = 234.9 ******************************************************************************* ******************************************************************************* Ephemeris / WWW_USER Fri Jan 28 16:02:02 2022 Pasadena, USA / Horizons ******************************************************************************* Target body name: Earth (399) {source: DE441} Center body name: Sun (10) {source: DE441} Center-site name: BODY CENTER ******************************************************************************* Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB Step-size : DISCRETE TIME-LIST ******************************************************************************* Center geodetic : 0.00000000,0.00000000,0.0000000 {E-lon(deg),Lat(deg),Alt(km)} Center cylindric: 0.00000000,0.00000000,0.0000000 {E-lon(deg),Dxy(km),Dz(km)} Center radii : 696000.0 x 696000.0 x 696000.0 k{Equator, meridian, pole} Output units : AU-D Output type : GEOMETRIC cartesian states Output format : 3 (position, velocity, LT, range, range-rate) Reference frame : ICRF ******************************************************************************* JDTDB X Y Z VX VY VZ LT RG RR ******************************************************************************* $$SOE 2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183915 s] X =-1.749585912701602E-01 Y = 8.877645495087018E-01 Z = 3.848482875671789E-01 VX=-1.721190438300784E-02 VY=-2.874039035670773E-03 VZ=-1.245648654352060E-03 LT= 5.678966496273616E-03 RG= 9.832825679666131E-01 RR=-1.981645766688001E-05 $$EOE ******************************************************************************* TIME Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This continuous relativistic coordinate time is equivalent to the relativistic proper time of a clock at rest in a reference frame comoving with the solar system barycenter but outside the system's gravity well. It is the independent variable in the solar system relativistic equations of motion. TDB runs at a uniform rate of one SI second per second and is independent of irregularities in Earth's rotation. Calendar dates prior to 1582-Oct-15 are in the Julian calendar system. Later calendar dates are in the Gregorian system. REFERENCE FRAME AND COORDINATES International Celestial Reference Frame (ICRF) The ICRF is an adopted reference frame whose axes are defined relative to fixed extragalactic radio sources distributed across the sky. The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02 arcsecond level but is not identical and has no associated standard epoch. Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]: JDTDB Julian Day Number, Barycentric Dynamical Time del_T Time-scale conversion difference TDB - UT (s) X X-component of position vector (au) Y Y-component of position vector (au) Z Z-component of position vector (au) VX X-component of velocity vector (au/day) VY Y-component of velocity vector (au/day) VZ Z-component of velocity vector (au/day) LT One-way down-leg Newtonian light-time (day) RG Range; distance from coordinate center (au) RR Range-rate; radial velocity wrt coord. center (au/day) ABERRATIONS AND CORRECTIONS Geometric state vectors have NO corrections or aberrations applied. Computations by ... Solar System Dynamics Group, Horizons On-Line Ephemeris System 4800 Oak Grove Drive, Jet Propulsion Laboratory Pasadena, CA 91109 USA General site: https://ssd.jpl.nasa.gov/ Mailing list: https://ssd.jpl.nasa.gov/email_list.html System news : https://ssd.jpl.nasa.gov/horizons/news.html User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x API https://ssd-api.jpl.nasa.gov/doc/horizons.html command-line telnet ssd.jpl.nasa.gov 6775 e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS Author : <EMAIL>.Giorg<EMAIL> ******************************************************************************* # Extract the hardpasted results into an array # (using convenience func, "extract_first_state_from_text", tested above) # Call the nice_Horizons function (i.e. the focus of the test) # Check that the results are as expected Testing Mike A's convenience wrapper around Horizon query functionality - Much of this test is being done to provide some reminder to myself/ourselves as to how to use the Horizons tool Deliberately *not* using all of the functionalities of pytest here. Just want to keep it simple and keep it obvious what everything is supposed to be doing. Here we extract the TOPOCENTRIC (F51) state for Asteroid number 54321 (== 2000 JA81) in an EQUATORIAL FRAME (refplane='earth') # Define the variables that will be used in the query # <<-- Asteroid number 54321 == 2000 JA81 # Hardpaste the expected results from a by-hand query of horizons ******************************************************************************* JPL/HORIZONS 54321 (2000 JA81) 2022-Jan-28 16:08:57 Rec #: 54321 (+COV) Soln.date: 2021-Oct-08_04:39:24 # obs: 1315 (1979-2021) IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs): EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282 EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138 OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867 A= 2.58760024089671 MA= 143.3507250314229 ADIST= 3.236789307546094 PER= 4.1625 N= .236787236 ANGMOM= .026786318 DAN= 2.43937 DDN= 2.41026 L= 182.707997 B= 6.7671807 MOID= .95572901 TP= 2012-Jun-14.6011463138 Asteroid physical parameters (km, seconds, rotational period in hours): GM= n.a. RAD= n.a. ROTPER= n.a. H= 14.45 G= .150 B-V= n.a. ALBEDO= n.a. STYP= n.a. ASTEROID comments: 1: soln ref.= JPL#33, OCC=0 2: source=ORB ******************************************************************************* ******************************************************************************* Ephemeris / WWW_USER Fri Jan 28 16:08:57 2022 Pasadena, USA / Horizons ******************************************************************************* Target body name: 54321 (2000 JA81) {source: JPL#33} Center body name: Earth (399) {source: DE441} Center-site name: Pan-STARRS 1, Haleakala ******************************************************************************* Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB Step-size : DISCRETE TIME-LIST ******************************************************************************* Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)} Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)} Center pole/equ : ITRF93 {East-longitude positive} Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole} Small perturbers: Yes {source: SB441-N16} Output units : AU-D Output type : GEOMETRIC cartesian states Output format : 3 (position, velocity, LT, range, range-rate) EOP file : eop.220127.p220422 EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21 Reference frame : ICRF ******************************************************************************* Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.): EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282 EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138 OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867 Equivalent ICRF heliocentric cartesian coordinates (au, au/d): X= 2.934573285149345E+00 Y=-8.702901499041770E-01 Z=-7.535748078855007E-01 VX= 3.948600090813408E-03 VY= 7.155151609877323E-03 VZ= 2.568700850735469E-03 Asteroid physical parameters (km, seconds, rotational period in hours): GM= n.a. RAD= n.a. ROTPER= n.a. H= 14.45 G= .150 B-V= n.a. ALBEDO= n.a. STYP= n.a. ******************************************************************************* JDTDB X Y Z VX VY VZ LT RG RR ******************************************************************************* $$SOE 2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s] X = 3.140272938432556E-01 Y = 1.401450872643150E+00 Z = 5.824305212783573E-01 VX= 6.595793009138184E-03 VY= 5.366428257622971E-04 VZ= 1.577496239642071E-03 LT= 8.950941094980980E-03 RG= 1.549807407979245E+00 RR= 2.414570690380698E-03 $$EOE ******************************************************************************* TIME Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This continuous relativistic coordinate time is equivalent to the relativistic proper time of a clock at rest in a reference frame comoving with the solar system barycenter but outside the system's gravity well. It is the independent variable in the solar system relativistic equations of motion. TDB runs at a uniform rate of one SI second per second and is independent of irregularities in Earth's rotation. Calendar dates prior to 1582-Oct-15 are in the Julian calendar system. Later calendar dates are in the Gregorian system. REFERENCE FRAME AND COORDINATES International Celestial Reference Frame (ICRF) The ICRF is an adopted reference frame whose axes are defined relative to fixed extragalactic radio sources distributed across the sky. The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02 arcsecond level but is not identical and has no associated standard epoch. Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]: JDTDB Julian Day Number, Barycentric Dynamical Time del_T Time-scale conversion difference TDB - UT (s) X X-component of position vector (au) Y Y-component of position vector (au) Z Z-component of position vector (au) VX X-component of velocity vector (au/day) VY Y-component of velocity vector (au/day) VZ Z-component of velocity vector (au/day) LT One-way down-leg Newtonian light-time (day) RG Range; distance from coordinate center (au) RR Range-rate; radial velocity wrt coord. center (au/day) ABERRATIONS AND CORRECTIONS Geometric state vectors have NO corrections or aberrations applied. Computations by ... Solar System Dynamics Group, Horizons On-Line Ephemeris System 4800 Oak Grove Drive, Jet Propulsion Laboratory Pasadena, CA 91109 USA General site: https://ssd.jpl.nasa.gov/ Mailing list: https://ssd.jpl.nasa.gov/email_list.html System news : https://ssd.jpl.nasa.gov/horizons/news.html User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x API https://ssd-api.jpl.nasa.gov/doc/horizons.html command-line telnet ssd.jpl.nasa.gov 6775 e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS Author : <EMAIL> ******************************************************************************* # Extract the hardpasted results into an array # (using convenience func, "extract_first_state_from_text", tested above) # Call the nice_Horizons function (i.e. the focus of the test) # Check that the results are as expected Similar to test_nice_Horizons_C, but ECLIPTIC insted of equatorial Here we extract the TOPOCENTRIC (F51) state for Asteroid number 54321 (== 2000 JA81) in an ECLIPTIC FRAME (refplane='ecliptic') # Define the variables that will be used in the query # <<-- Asteroid number 54321 == 2000 JA81 # Hardpaste the expected results from a by-hand query of horizons ******************************************************************************* JPL/HORIZONS 54321 (2000 JA81) 2022-Jan-28 16:19:21 Rec #: 54321 (+COV) Soln.date: 2021-Oct-08_04:39:24 # obs: 1315 (1979-2021) IAU76/J2000 helio. ecliptic osc. elements (au, days, deg., period=Julian yrs): EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282 EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138 OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867 A= 2.58760024089671 MA= 143.3507250314229 ADIST= 3.236789307546094 PER= 4.1625 N= .236787236 ANGMOM= .026786318 DAN= 2.43937 DDN= 2.41026 L= 182.707997 B= 6.7671807 MOID= .95572901 TP= 2012-Jun-14.6011463138 Asteroid physical parameters (km, seconds, rotational period in hours): GM= n.a. RAD= n.a. ROTPER= n.a. H= 14.45 G= .150 B-V= n.a. ALBEDO= n.a. STYP= n.a. ASTEROID comments: 1: soln ref.= JPL#33, OCC=0 2: source=ORB ******************************************************************************* ******************************************************************************* Ephemeris / WWW_USER Fri Jan 28 16:19:22 2022 Pasadena, USA / Horizons ******************************************************************************* Target body name: 54321 (2000 JA81) {source: JPL#33} Center body name: Earth (399) {source: DE441} Center-site name: Pan-STARRS 1, Haleakala ******************************************************************************* Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB Step-size : DISCRETE TIME-LIST ******************************************************************************* Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)} Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)} Center pole/equ : ITRF93 {East-longitude positive} Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole} Small perturbers: Yes {source: SB441-N16} Output units : AU-D Output type : GEOMETRIC cartesian states Output format : 3 (position, velocity, LT, range, range-rate) EOP file : eop.220127.p220422 EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21 Reference frame : Ecliptic of J2000.0 ******************************************************************************* Initial IAU76/J2000 heliocentric ecliptic osculating elements (au, days, deg.): EPOCH= 2456698.5 ! 2014-Feb-10.00 (TDB) Residual RMS= .27282 EC= .2508846058943067 QR= 1.938411174247326 TP= 2456093.1011463138 OM= 91.32740861093403 W= 91.37096816741918 IN= 6.76912753748867 Equivalent ICRF heliocentric cartesian coordinates (au, au/d): X= 2.934573285149345E+00 Y=-8.702901499041770E-01 Z=-7.535748078855007E-01 VX= 3.948600090813408E-03 VY= 7.155151609877323E-03 VZ= 2.568700850735469E-03 Asteroid physical parameters (km, seconds, rotational period in hours): GM= n.a. RAD= n.a. ROTPER= n.a. H= 14.45 G= .150 B-V= n.a. ALBEDO= n.a. STYP= n.a. ******************************************************************************* JDTDB X Y Z VX VY VZ LT RG RR ******************************************************************************* $$SOE 2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s] X = 3.140272938432556E-01 Y = 1.517483592803339E+00 Z =-2.309558662379520E-02 VX= 6.595793009138184E-03 VY= 1.119852134073137E-03 VZ= 1.233860245870196E-03 LT= 8.950941094980980E-03 RG= 1.549807407979244E+00 RR= 2.414570690380698E-03 $$EOE ******************************************************************************* TIME Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This continuous relativistic coordinate time is equivalent to the relativistic proper time of a clock at rest in a reference frame comoving with the solar system barycenter but outside the system's gravity well. It is the independent variable in the solar system relativistic equations of motion. TDB runs at a uniform rate of one SI second per second and is independent of irregularities in Earth's rotation. Calendar dates prior to 1582-Oct-15 are in the Julian calendar system. Later calendar dates are in the Gregorian system. REFERENCE FRAME AND COORDINATES Ecliptic at the standard reference epoch Reference epoch: J2000.0 X-Y plane: adopted Earth orbital plane at the reference epoch Note: IAU76 obliquity of 84381.448 arcseconds wrt ICRF X-Y plane X-axis : ICRF Z-axis : perpendicular to the X-Y plane in the directional (+ or -) sense of Earth's north pole at the reference epoch. Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]: JDTDB Julian Day Number, Barycentric Dynamical Time del_T Time-scale conversion difference TDB - UT (s) X X-component of position vector (au) Y Y-component of position vector (au) Z Z-component of position vector (au) VX X-component of velocity vector (au/day) VY Y-component of velocity vector (au/day) VZ Z-component of velocity vector (au/day) LT One-way down-leg Newtonian light-time (day) RG Range; distance from coordinate center (au) RR Range-rate; radial velocity wrt coord. center (au/day) ABERRATIONS AND CORRECTIONS Geometric state vectors have NO corrections or aberrations applied. Computations by ... Solar System Dynamics Group, Horizons On-Line Ephemeris System 4800 Oak Grove Drive, Jet Propulsion Laboratory Pasadena, CA 91109 USA General site: https://ssd.jpl.nasa.gov/ Mailing list: https://ssd.jpl.nasa.gov/email_list.html System news : https://ssd.jpl.nasa.gov/horizons/news.html User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x API https://ssd-api.jpl.nasa.gov/doc/horizons.html command-line telnet ssd.jpl.nasa.gov 6775 e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS Author : Jon.D.Giorg<EMAIL> ******************************************************************************* # Extract the hardpasted results into an array # (using convenience func, "extract_first_state_from_text", tested above) # Call the nice_Horizons function (i.e. the focus of the test) # Check that the results are as expected Here we use Horizons to get the Heliocentric EQUATORIAL position of the Observatory (NB we use a hack, setting the target as the Sun, and the center as the observatory) Here we extract the TOPOCENTRIC (F51) state for The Sun in an EQUATORIAL FRAME (refplane='earth') # Define the variables that will be used in the query # Hardpaste the expected results from a by-hand query of horizons ******************************************************************************* Revised: July 31, 2013 Sun 10 PHYSICAL PROPERTIES (updated 2018-Aug-15): GM, km^3/s^2 = 132712440041.93938 Mass, 10^24 kg = ~1988500 Vol. mean radius, km = 695700 Volume, 10^12 km^3 = 1412000 Solar radius (IAU) = 696000 km Mean density, g/cm^3 = 1.408 Radius (photosphere) = 696500 km Angular diam at 1 AU = 1919.3" Photosphere temp., K = 6600 (bottom) Photosphere temp., K = 4400(top) Photospheric depth = ~500 km Chromospheric depth = ~2500 km Flatness, f = 0.00005 Adopted sid. rot. per.= 25.38 d Surface gravity = 274.0 m/s^2 Escape speed, km/s = 617.7 Pole (RA,DEC), deg. = (286.13, 63.87) Obliquity to ecliptic = 7.25 deg. Solar constant (1 AU) = 1367.6 W/m^2 Luminosity, 10^24 J/s = 382.8 Mass-energy conv rate = 4.260 x 10^9 kg/s Effective temp, K = 5772 Sunspot cycle = 11.4 yr Cycle 24 sunspot min. = 2008 A.D. Motion relative to nearby stars = apex : R.A.= 271 deg.; DEC.= +30 deg. speed: 19.4 km/s (0.0112 au/day) Motion relative to 2.73K BB/CBR = apex : l= 264.7 +- 0.8; b= 48.2 +- 0.5 deg. speed: 369 +-11 km/s ******************************************************************************* ******************************************************************************* Ephemeris / WWW_USER Fri Jan 28 16:31:17 2022 Pasadena, USA / Horizons ******************************************************************************* Target body name: Sun (10) {source: DE441} Center body name: Earth (399) {source: DE441} Center-site name: Pan-STARRS 1, Haleakala ******************************************************************************* Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB Step-size : DISCRETE TIME-LIST ******************************************************************************* Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)} Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)} Center pole/equ : ITRF93 {East-longitude positive} Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole} Output units : AU-D Output type : GEOMETRIC cartesian states Output format : 3 (position, velocity, LT, range, range-rate) EOP file : eop.220127.p220422 EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21 Reference frame : ICRF ******************************************************************************* JDTDB X Y Z VX VY VZ LT RG RR ******************************************************************************* $$SOE 2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s] X = 1.749807755042866E-01 Y =-8.877977147373203E-01 Z =-3.848633185157228E-01 VX= 1.742085896638510E-02 VY= 3.013989047237847E-03 VZ= 1.245251026128347E-03 LT= 5.679196211202660E-03 RG= 9.833223418736220E-01 RR=-1.085591308641175E-04 $$EOE ******************************************************************************* TIME Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This continuous relativistic coordinate time is equivalent to the relativistic proper time of a clock at rest in a reference frame comoving with the solar system barycenter but outside the system's gravity well. It is the independent variable in the solar system relativistic equations of motion. TDB runs at a uniform rate of one SI second per second and is independent of irregularities in Earth's rotation. Calendar dates prior to 1582-Oct-15 are in the Julian calendar system. Later calendar dates are in the Gregorian system. REFERENCE FRAME AND COORDINATES International Celestial Reference Frame (ICRF) The ICRF is an adopted reference frame whose axes are defined relative to fixed extragalactic radio sources distributed across the sky. The ICRF was aligned with the prior FK5/J2000 dynamical system at the ~0.02 arcsecond level but is not identical and has no associated standard epoch. Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]: JDTDB Julian Day Number, Barycentric Dynamical Time del_T Time-scale conversion difference TDB - UT (s) X X-component of position vector (au) Y Y-component of position vector (au) Z Z-component of position vector (au) VX X-component of velocity vector (au/day) VY Y-component of velocity vector (au/day) VZ Z-component of velocity vector (au/day) LT One-way down-leg Newtonian light-time (day) RG Range; distance from coordinate center (au) RR Range-rate; radial velocity wrt coord. center (au/day) ABERRATIONS AND CORRECTIONS Geometric state vectors have NO corrections or aberrations applied. Computations by ... Solar System Dynamics Group, Horizons On-Line Ephemeris System 4800 Oak Grove Drive, Jet Propulsion Laboratory Pasadena, CA 91109 USA General site: https://ssd.jpl.nasa.gov/ Mailing list: https://ssd.jpl.nasa.gov/email_list.html System news : https://ssd.jpl.nasa.gov/horizons/news.html User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x API https://ssd-api.jpl.nasa.gov/doc/horizons.html command-line telnet ssd.jpl.nasa.gov 6775 e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS Author : <EMAIL> ******************************************************************************* # Extract the hardpasted results into an array # (using convenience func, "extract_first_state_from_text", tested above) # Call the nice_Horizons function (i.e. the focus of the test) # Check that the results are as expected Similar to test_nice_Horizons_E, but ECLIPTIC instead of equatorial Here we use Horizons to get the Heliocentric ECLIPTIC position of the Observatory (NB we use a hack, setting the target as the Sun, and the center as the observatory) Here we extract the TOPOCENTRIC (F51) state for The Sun in an ECLIPTIC FRAME (refplane='ecliptic') # Define the variables that will be used in the query # Hardpaste the expected results from a by-hand query of horizons ******************************************************************************* Revised: July 31, 2013 Sun 10 PHYSICAL PROPERTIES (updated 2018-Aug-15): GM, km^3/s^2 = 132712440041.93938 Mass, 10^24 kg = ~1988500 Vol. mean radius, km = 695700 Volume, 10^12 km^3 = 1412000 Solar radius (IAU) = 696000 km Mean density, g/cm^3 = 1.408 Radius (photosphere) = 696500 km Angular diam at 1 AU = 1919.3" Photosphere temp., K = 6600 (bottom) Photosphere temp., K = 4400(top) Photospheric depth = ~500 km Chromospheric depth = ~2500 km Flatness, f = 0.00005 Adopted sid. rot. per.= 25.38 d Surface gravity = 274.0 m/s^2 Escape speed, km/s = 617.7 Pole (RA,DEC), deg. = (286.13, 63.87) Obliquity to ecliptic = 7.25 deg. Solar constant (1 AU) = 1367.6 W/m^2 Luminosity, 10^24 J/s = 382.8 Mass-energy conv rate = 4.260 x 10^9 kg/s Effective temp, K = 5772 Sunspot cycle = 11.4 yr Cycle 24 sunspot min. = 2008 A.D. Motion relative to nearby stars = apex : R.A.= 271 deg.; DEC.= +30 deg. speed: 19.4 km/s (0.0112 au/day) Motion relative to 2.73K BB/CBR = apex : l= 264.7 +- 0.8; b= 48.2 +- 0.5 deg. speed: 369 +-11 km/s ******************************************************************************* ******************************************************************************* Ephemeris / WWW_USER Fri Jan 28 16:43:25 2022 Pasadena, USA / Horizons ******************************************************************************* Target body name: Sun (10) {source: DE441} Center body name: Earth (399) {source: DE441} Center-site name: Pan-STARRS 1, Haleakala ******************************************************************************* Start time : A.D. 2020-Jan-01 12:00:00.0000 TDB Stop time : A.D. 2020-Jan-01 12:00:00.0000 TDB Step-size : DISCRETE TIME-LIST ******************************************************************************* Center geodetic : 203.744100,20.7071888,3.0763821 {E-lon(deg),Lat(deg),Alt(km)} Center cylindric: 203.744100,5971.48324,2242.1878 {E-lon(deg),Dxy(km),Dz(km)} Center pole/equ : ITRF93 {East-longitude positive} Center radii : 6378.1 x 6378.1 x 6356.8 km {Equator, meridian, pole} Output units : AU-D Output type : GEOMETRIC cartesian states Output format : 3 (position, velocity, LT, range, range-rate) EOP file : eop.220127.p220422 EOP coverage : DATA-BASED 1962-JAN-20 TO 2022-JAN-27. PREDICTS-> 2022-APR-21 Reference frame : Ecliptic of J2000.0 ******************************************************************************* JDTDB X Y Z VX VY VZ LT RG RR ******************************************************************************* $$SOE 2458850.000000000 = A.D. 2020-Jan-01 12:00:00.0000 TDB [del_T= 69.183916 s] X = 1.749807755042866E-01 Y =-9.676283142792063E-01 Z = 4.045892447000447E-05 VX= 1.742085896638510E-02 VY= 3.260613297708340E-03 VZ=-5.640051197420877E-05 LT= 5.679196211202660E-03 RG= 9.833223418736221E-01 RR=-1.085591308641179E-04 $$EOE ******************************************************************************* TIME Barycentric Dynamical Time ("TDB" or T_eph) output was requested. This continuous relativistic coordinate time is equivalent to the relativistic proper time of a clock at rest in a reference frame comoving with the solar system barycenter but outside the system's gravity well. It is the independent variable in the solar system relativistic equations of motion. TDB runs at a uniform rate of one SI second per second and is independent of irregularities in Earth's rotation. Calendar dates prior to 1582-Oct-15 are in the Julian calendar system. Later calendar dates are in the Gregorian system. REFERENCE FRAME AND COORDINATES Ecliptic at the standard reference epoch Reference epoch: J2000.0 X-Y plane: adopted Earth orbital plane at the reference epoch Note: IAU76 obliquity of 84381.448 arcseconds wrt ICRF X-Y plane X-axis : ICRF Z-axis : perpendicular to the X-Y plane in the directional (+ or -) sense of Earth's north pole at the reference epoch. Symbol meaning [1 au= 149597870.700 km, 1 day= 86400.0 s]: JDTDB Julian Day Number, Barycentric Dynamical Time del_T Time-scale conversion difference TDB - UT (s) X X-component of position vector (au) Y Y-component of position vector (au) Z Z-component of position vector (au) VX X-component of velocity vector (au/day) VY Y-component of velocity vector (au/day) VZ Z-component of velocity vector (au/day) LT One-way down-leg Newtonian light-time (day) RG Range; distance from coordinate center (au) RR Range-rate; radial velocity wrt coord. center (au/day) ABERRATIONS AND CORRECTIONS Geometric state vectors have NO corrections or aberrations applied. Computations by ... Solar System Dynamics Group, Horizons On-Line Ephemeris System 4800 Oak Grove Drive, Jet Propulsion Laboratory Pasadena, CA 91109 USA General site: https://ssd.jpl.nasa.gov/ Mailing list: https://ssd.jpl.nasa.gov/email_list.html System news : https://ssd.jpl.nasa.gov/horizons/news.html User Guide : https://ssd.jpl.nasa.gov/horizons/manual.html Connect : browser https://ssd.jpl.nasa.gov/horizons/app.html#/x API https://ssd-api.jpl.nasa.gov/doc/horizons.html command-line telnet ssd.jpl.nasa.gov 6775 e-mail/batch https://ssd.jpl.nasa.gov/ftp/ssd/hrzn_batch.txt scripts https://ssd.jpl.nasa.gov/ftp/ssd/SCRIPTS Author : Jon.D.Giorgini<EMAIL> ******************************************************************************* # Extract the hardpasted results into an array # (using convenience func, "extract_first_state_from_text", tested above) # Call the nice_Horizons function (i.e. the focus of the test) # Check that the results are as expected | 2.616387 | 3 |
data.py | CherylYueWang/ditou-duplicated | 1 | 6612795 | <filename>data.py
import numpy as np
np.random.seed(0)
def sigmoid(xvec):
""" Compute the sigmoid function """
# Cap -xvec, to avoid overflow
# Undeflow is okay, since it get set to zero
if isinstance(xvec, (list, np.ndarray)):
xvec[xvec < -100] = -100
elif xvec < -100:
xvec = -100
vecsig = 1.0 / (1.0 + np.exp(np.negative(xvec)))
return vecsig
def gen1(length, feature_num=25, cts_A=True, cts_B=True, cts_C=True):
mu = [1, 2, 3]
sigma = [1, 1, 1]
A = np.random.normal(mu[0], sigma[0], length[0] * feature_num)
B = np.random.normal(mu[1], sigma[1], length[1] * feature_num)
C = np.random.normal(mu[2], sigma[2], length[2] * feature_num)
############################################## 2
# weights for X_i
weight_mu = [0.2, 0.4, 0.4]
weight_sigma = [0.2, 0.4, 0.4]
sample = []
for i in xrange(feature_num):
sample.append(
np.average([(np.average(i),np.var(i)) for i in [A[i * length[0]:(i + 1) * length[0]], B[i * length[1]:(i + 1) * length[1]], C[i * length[2]:(i + 1) * length[2]]]],axis=0))
sample = np.reshape(sample, (25,2))
X = []
X.append( np.random.normal(sample[:,0], sample[:,1],25))
X = np.reshape(X,(1,25))
############################################## 3
# weights for T
weights = [0.3, 0.7]
# GAI
p = sigmoid(np.dot(weights, mu[0:2]))
t = np.random.binomial(1, p, 1)
############################################## 4
if t == 0:
weights_alpha = [0.3, 0.7]
weights_beta = [0.2, 0.8]
a = np.dot([np.average(B), np.average(C)], weights_alpha)
b = np.dot([np.var(B), np.var(C)], weights_beta)
yf = np.random.gamma(np.divide(np.square(a), b)-1, np.divide(b, a), 1)
ycf = np.random.gamma(np.divide(np.square(a), b), np.divide(b, a), 1)
else:
weights_alpha = [0.3, 0.7]
weights_beta = [0.2, 0.8]
a = np.dot([np.average(B), np.average(C)], weights_alpha)
b = np.dot([np.var(B), np.var(C)], weights_beta)
yf = np.random.gamma(np.divide(np.square(a), b), np.divide(b, a), 1)
ycf = np.random.gamma(np.divide(np.square(a), b)-1, np.divide(b, a), 1)
return [X, t, yf, ycf]
def generate_full_data(sample_size):
sample_x = []
sample_t = []
sample_yf = []
sample_ycf = []
mu0 = []
mu1 = []
for i in range(sample_size):
[X, t, yf, ycf] = gen1([10,10,10])
sample_x.append(X)
sample_t.append(t)
sample_yf.append(yf)
sample_ycf.append(ycf)
if t==1:
mu1.append(yf)
mu0.append(ycf)
else:
mu1.append(ycf)
mu0.append(yf)
sample_x = np.reshape(sample_x, (sample_size, 25, 1))
sample_t = np.reshape(sample_t, (sample_size, 1))
sample_yf = np.reshape(sample_yf, (sample_size, 1))
sample_ycf = np.reshape(sample_ycf, (sample_size, 1))
mu0 = np.reshape(mu0,(sample_size,1))
mu1 = np.reshape(mu1,(sample_size,1))
ate = np.array(4)
yadd = np.array(0)
ymul = np.array(1)
return [sample_x, sample_t, sample_yf, sample_ycf, mu0, mu1, ate, yadd, ymul]
# q = gen1([10,10,10])
#print q
q = generate_full_data(1000)
np.savez('./synthetic_train.npz', x=q[0], t= q[1], yf=q[2], ycf=q[3], mu0=q[4],
mu1=q[5], ate=q[6], yadd=q[7], ymul=q[8])
np.random.seed(1)
q = generate_full_data(1000)
np.savez('./synthetic_test.npz', x=q[0], t= q[1], yf=q[2], ycf=q[3], mu0=q[4],
mu1=q[5], ate=q[6], yadd=q[7], ymul=q[8])
| <filename>data.py
import numpy as np
np.random.seed(0)
def sigmoid(xvec):
""" Compute the sigmoid function """
# Cap -xvec, to avoid overflow
# Undeflow is okay, since it get set to zero
if isinstance(xvec, (list, np.ndarray)):
xvec[xvec < -100] = -100
elif xvec < -100:
xvec = -100
vecsig = 1.0 / (1.0 + np.exp(np.negative(xvec)))
return vecsig
def gen1(length, feature_num=25, cts_A=True, cts_B=True, cts_C=True):
mu = [1, 2, 3]
sigma = [1, 1, 1]
A = np.random.normal(mu[0], sigma[0], length[0] * feature_num)
B = np.random.normal(mu[1], sigma[1], length[1] * feature_num)
C = np.random.normal(mu[2], sigma[2], length[2] * feature_num)
############################################## 2
# weights for X_i
weight_mu = [0.2, 0.4, 0.4]
weight_sigma = [0.2, 0.4, 0.4]
sample = []
for i in xrange(feature_num):
sample.append(
np.average([(np.average(i),np.var(i)) for i in [A[i * length[0]:(i + 1) * length[0]], B[i * length[1]:(i + 1) * length[1]], C[i * length[2]:(i + 1) * length[2]]]],axis=0))
sample = np.reshape(sample, (25,2))
X = []
X.append( np.random.normal(sample[:,0], sample[:,1],25))
X = np.reshape(X,(1,25))
############################################## 3
# weights for T
weights = [0.3, 0.7]
# GAI
p = sigmoid(np.dot(weights, mu[0:2]))
t = np.random.binomial(1, p, 1)
############################################## 4
if t == 0:
weights_alpha = [0.3, 0.7]
weights_beta = [0.2, 0.8]
a = np.dot([np.average(B), np.average(C)], weights_alpha)
b = np.dot([np.var(B), np.var(C)], weights_beta)
yf = np.random.gamma(np.divide(np.square(a), b)-1, np.divide(b, a), 1)
ycf = np.random.gamma(np.divide(np.square(a), b), np.divide(b, a), 1)
else:
weights_alpha = [0.3, 0.7]
weights_beta = [0.2, 0.8]
a = np.dot([np.average(B), np.average(C)], weights_alpha)
b = np.dot([np.var(B), np.var(C)], weights_beta)
yf = np.random.gamma(np.divide(np.square(a), b), np.divide(b, a), 1)
ycf = np.random.gamma(np.divide(np.square(a), b)-1, np.divide(b, a), 1)
return [X, t, yf, ycf]
def generate_full_data(sample_size):
sample_x = []
sample_t = []
sample_yf = []
sample_ycf = []
mu0 = []
mu1 = []
for i in range(sample_size):
[X, t, yf, ycf] = gen1([10,10,10])
sample_x.append(X)
sample_t.append(t)
sample_yf.append(yf)
sample_ycf.append(ycf)
if t==1:
mu1.append(yf)
mu0.append(ycf)
else:
mu1.append(ycf)
mu0.append(yf)
sample_x = np.reshape(sample_x, (sample_size, 25, 1))
sample_t = np.reshape(sample_t, (sample_size, 1))
sample_yf = np.reshape(sample_yf, (sample_size, 1))
sample_ycf = np.reshape(sample_ycf, (sample_size, 1))
mu0 = np.reshape(mu0,(sample_size,1))
mu1 = np.reshape(mu1,(sample_size,1))
ate = np.array(4)
yadd = np.array(0)
ymul = np.array(1)
return [sample_x, sample_t, sample_yf, sample_ycf, mu0, mu1, ate, yadd, ymul]
# q = gen1([10,10,10])
#print q
q = generate_full_data(1000)
np.savez('./synthetic_train.npz', x=q[0], t= q[1], yf=q[2], ycf=q[3], mu0=q[4],
mu1=q[5], ate=q[6], yadd=q[7], ymul=q[8])
np.random.seed(1)
q = generate_full_data(1000)
np.savez('./synthetic_test.npz', x=q[0], t= q[1], yf=q[2], ycf=q[3], mu0=q[4],
mu1=q[5], ate=q[6], yadd=q[7], ymul=q[8])
| de | 0.456282 | Compute the sigmoid function # Cap -xvec, to avoid overflow # Undeflow is okay, since it get set to zero ############################################## 2 # weights for X_i ############################################## 3 # weights for T # GAI ############################################## 4 # q = gen1([10,10,10]) #print q | 3.07532 | 3 |
Leetcode/135. Candy/solution2.py | asanoviskhak/Outtalent | 51 | 6612796 | class Solution:
def candy(self, ratings: List[int]) -> int:
if len(ratings) <= 1: return len(ratings)
def count(n: int) -> int:
return (n * (n + 1)) // 2
candies = up = down = old_slope = 0
for i in range(1, len(ratings)):
if ratings[i] > ratings[i - 1]:
new_slope = 1
elif ratings[i] < ratings[i - 1]:
new_slope = -1
else:
new_slope = 0
if (old_slope > 0 and new_slope == 0) or (old_slope < 0 <= new_slope):
candies += count(up) + count(down) + max(up, down)
up = down = 0
if new_slope > 0:
up += 1
elif new_slope < 0:
down += 1
elif new_slope == 0:
candies += 1
old_slope = new_slope
candies += count(up) + count(down) + max(up, down) + 1
return candies
| class Solution:
def candy(self, ratings: List[int]) -> int:
if len(ratings) <= 1: return len(ratings)
def count(n: int) -> int:
return (n * (n + 1)) // 2
candies = up = down = old_slope = 0
for i in range(1, len(ratings)):
if ratings[i] > ratings[i - 1]:
new_slope = 1
elif ratings[i] < ratings[i - 1]:
new_slope = -1
else:
new_slope = 0
if (old_slope > 0 and new_slope == 0) or (old_slope < 0 <= new_slope):
candies += count(up) + count(down) + max(up, down)
up = down = 0
if new_slope > 0:
up += 1
elif new_slope < 0:
down += 1
elif new_slope == 0:
candies += 1
old_slope = new_slope
candies += count(up) + count(down) + max(up, down) + 1
return candies
| none | 1 | 3.222917 | 3 | |
uncertainty/helpers.py | meyersbs/uncertainty | 17 | 6612797 | import collections
import csv
import itertools
import operator
import os
import pickle
import re
GREEK_LOWER = re.compile(u'[αβγδεζηθικλμξπρσςτυφψω]')
GREEK_UPPER = re.compile(u'[ΓΔΘΛΞΠΣΦΨΩ]')
ENGLISH_LOWER = re.compile(r'[a-z]')
ENGLISH_UPPER = re.compile(r'[A-Z]')
DIGIT = re.compile(r'[0-9]')
ROMAN_LOWER = re.compile(
'm{0,4}(cm|cd|d?c{0,3})(xc|xl|l?x{0,3})(ix|iv|v?i{0,3})'
)
ROMAN_UPPER = re.compile(
'M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})'
)
def aggregate(predictions):
prediction = 'C'
frequency = collections.Counter(predictions)
del frequency['C']
if frequency:
predictions = dict()
for (p, f) in frequency.items():
if f not in predictions:
predictions[f] = list()
predictions[f].append(p)
predictions = sorted(
predictions.items(), key=operator.itemgetter(0), reverse=True
)
prediction = predictions[0][1][0] \
if predictions and len(predictions[0][1]) == 1 else 'U'
return prediction
def dump(obj, filepath):
with open(filepath, 'wb') as file:
pickle.dump(obj, file)
def get_context(index, elements, size=2):
if index < 0 or index >= len(elements):
raise Exception('index must be positive and within elements\' bounds.')
context = [None] * (size * 2)
offsets = list(range(-size, size + 1))
offsets.remove(0)
indices = [index + offset for offset in offsets]
for (i, j) in enumerate(indices):
if j >= 0 and j < len(elements):
context[i] = elements[j]
return collections.OrderedDict(zip(offsets, context))
def get_wordpattern(word):
pattern = ''.join(get_charpattern(character) for character in word)
return ''.join(c for c, _ in itertools.groupby(pattern))
def get_charpattern(character):
# TODO: How do we distinguish between a sequence of Roman Numerals and
# a sequence of letters?
if not character.isalnum():
return '!'
elif ENGLISH_UPPER.search(character):
return 'A'
elif ENGLISH_LOWER.search(character):
return 'a'
elif DIGIT.search(character):
return '0'
elif GREEK_UPPER.search(character):
return 'G'
elif GREEK_LOWER.search(character):
return 'g'
elif ROMAN_UPPER.search(character):
return 'R'
elif ROMAN_LOWER.search(character):
return 'r'
def get_features(data_file):
"""
This preprocessing function takes in a data_file containing one sentence
per line, generates the features for each token, and writes them to a file
with the same name and '.tsv' appended to the end. See the documents
/test_data.txt and /test_data.txt.tsv for examples of valid input and
output, respectively.
This preprocessing step allows for the features of a corpus to be generated
only once, greatly speeding up the classification process.
NOTE: The output file will be much larger in size than the input file.
"""
with open(data_file, 'r') as f:
filename = data_file + '.tsv'
with open(filename, 'w', newline='') as tsvfile:
tsv_writer = csv.writer(tsvfile, delimiter='\t', quotechar='|',
quoting=csv.QUOTE_MINIMAL)
for i, line in enumerate(f.readlines()):
if line.strip() == '':
tsv_writer.writerow('')
else:
features = get_features(line)
for key, val in sorted(features.items()):
(tok_num, tok) = key.split("_")
row = ['sent' + str(i) + "token" + str(tok_num),
str(tok), str(STEMMER.stem(tok)).lower(),
pos_tag(tok)[0][1], 'X', 'X']
for k, v in sorted(val.items()):
row.append(str(k) + ":" + str(v))
tsv_writer.writerow(row)
def get_verbs(filepath):
"""
Return the contents of verbs file pointed to by the filepath argument as a
dictionary in which the key is the conjugate of a verb and the value is
uninflected verb form of the conjugate verb.
For example, {'scolded': 'scold', 'scolding': 'scold'}
Adapted from code provided in NodeBox:
https://www.nodebox.net/code/index.php/Linguistics#verb_conjugation
"""
verbs = dict()
with open(filepath) as file:
reader = csv.reader(file)
for row in reader:
for verb in row[1:]:
verbs[verb] = row[0]
return verbs
def load(filepath):
if not os.path.exists(filepath):
raise FileNotFoundError('No such file: {}'.format(filepath))
with open(filepath, 'rb') as file:
return pickle.load(file)
def read_tsv(path):
lines = list()
with open(path, 'r') as file:
for line in file:
line = line.strip(os.linesep)
lines.append(line.split('\t') if line else list())
return lines
def write_tsv(lines, path):
with open(path, 'w') as file:
for line in lines:
file.write('{}{}'.format('\t'.join(line), os.linesep))
| import collections
import csv
import itertools
import operator
import os
import pickle
import re
GREEK_LOWER = re.compile(u'[αβγδεζηθικλμξπρσςτυφψω]')
GREEK_UPPER = re.compile(u'[ΓΔΘΛΞΠΣΦΨΩ]')
ENGLISH_LOWER = re.compile(r'[a-z]')
ENGLISH_UPPER = re.compile(r'[A-Z]')
DIGIT = re.compile(r'[0-9]')
ROMAN_LOWER = re.compile(
'm{0,4}(cm|cd|d?c{0,3})(xc|xl|l?x{0,3})(ix|iv|v?i{0,3})'
)
ROMAN_UPPER = re.compile(
'M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})'
)
def aggregate(predictions):
prediction = 'C'
frequency = collections.Counter(predictions)
del frequency['C']
if frequency:
predictions = dict()
for (p, f) in frequency.items():
if f not in predictions:
predictions[f] = list()
predictions[f].append(p)
predictions = sorted(
predictions.items(), key=operator.itemgetter(0), reverse=True
)
prediction = predictions[0][1][0] \
if predictions and len(predictions[0][1]) == 1 else 'U'
return prediction
def dump(obj, filepath):
with open(filepath, 'wb') as file:
pickle.dump(obj, file)
def get_context(index, elements, size=2):
if index < 0 or index >= len(elements):
raise Exception('index must be positive and within elements\' bounds.')
context = [None] * (size * 2)
offsets = list(range(-size, size + 1))
offsets.remove(0)
indices = [index + offset for offset in offsets]
for (i, j) in enumerate(indices):
if j >= 0 and j < len(elements):
context[i] = elements[j]
return collections.OrderedDict(zip(offsets, context))
def get_wordpattern(word):
pattern = ''.join(get_charpattern(character) for character in word)
return ''.join(c for c, _ in itertools.groupby(pattern))
def get_charpattern(character):
# TODO: How do we distinguish between a sequence of Roman Numerals and
# a sequence of letters?
if not character.isalnum():
return '!'
elif ENGLISH_UPPER.search(character):
return 'A'
elif ENGLISH_LOWER.search(character):
return 'a'
elif DIGIT.search(character):
return '0'
elif GREEK_UPPER.search(character):
return 'G'
elif GREEK_LOWER.search(character):
return 'g'
elif ROMAN_UPPER.search(character):
return 'R'
elif ROMAN_LOWER.search(character):
return 'r'
def get_features(data_file):
"""
This preprocessing function takes in a data_file containing one sentence
per line, generates the features for each token, and writes them to a file
with the same name and '.tsv' appended to the end. See the documents
/test_data.txt and /test_data.txt.tsv for examples of valid input and
output, respectively.
This preprocessing step allows for the features of a corpus to be generated
only once, greatly speeding up the classification process.
NOTE: The output file will be much larger in size than the input file.
"""
with open(data_file, 'r') as f:
filename = data_file + '.tsv'
with open(filename, 'w', newline='') as tsvfile:
tsv_writer = csv.writer(tsvfile, delimiter='\t', quotechar='|',
quoting=csv.QUOTE_MINIMAL)
for i, line in enumerate(f.readlines()):
if line.strip() == '':
tsv_writer.writerow('')
else:
features = get_features(line)
for key, val in sorted(features.items()):
(tok_num, tok) = key.split("_")
row = ['sent' + str(i) + "token" + str(tok_num),
str(tok), str(STEMMER.stem(tok)).lower(),
pos_tag(tok)[0][1], 'X', 'X']
for k, v in sorted(val.items()):
row.append(str(k) + ":" + str(v))
tsv_writer.writerow(row)
def get_verbs(filepath):
"""
Return the contents of verbs file pointed to by the filepath argument as a
dictionary in which the key is the conjugate of a verb and the value is
uninflected verb form of the conjugate verb.
For example, {'scolded': 'scold', 'scolding': 'scold'}
Adapted from code provided in NodeBox:
https://www.nodebox.net/code/index.php/Linguistics#verb_conjugation
"""
verbs = dict()
with open(filepath) as file:
reader = csv.reader(file)
for row in reader:
for verb in row[1:]:
verbs[verb] = row[0]
return verbs
def load(filepath):
if not os.path.exists(filepath):
raise FileNotFoundError('No such file: {}'.format(filepath))
with open(filepath, 'rb') as file:
return pickle.load(file)
def read_tsv(path):
lines = list()
with open(path, 'r') as file:
for line in file:
line = line.strip(os.linesep)
lines.append(line.split('\t') if line else list())
return lines
def write_tsv(lines, path):
with open(path, 'w') as file:
for line in lines:
file.write('{}{}'.format('\t'.join(line), os.linesep))
| en | 0.82456 | # TODO: How do we distinguish between a sequence of Roman Numerals and # a sequence of letters? This preprocessing function takes in a data_file containing one sentence per line, generates the features for each token, and writes them to a file with the same name and '.tsv' appended to the end. See the documents /test_data.txt and /test_data.txt.tsv for examples of valid input and output, respectively. This preprocessing step allows for the features of a corpus to be generated only once, greatly speeding up the classification process. NOTE: The output file will be much larger in size than the input file. Return the contents of verbs file pointed to by the filepath argument as a dictionary in which the key is the conjugate of a verb and the value is uninflected verb form of the conjugate verb. For example, {'scolded': 'scold', 'scolding': 'scold'} Adapted from code provided in NodeBox: https://www.nodebox.net/code/index.php/Linguistics#verb_conjugation | 2.95612 | 3 |
tests/test_edit_distance.py | naturalness/sensibility | 17 | 6612798 | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# Copyright 2017 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests mistakes and edit distance.
"""
from functools import lru_cache
import pytest
from hypothesis import given
from hypothesis.strategies import text
from sensibility.evaluation.mistakes import Mistakes
from sensibility.evaluation.distance import (
tokenwise_distance, determine_edit, determine_fix_event
)
from sensibility.language import language
from sensibility.vocabulary import Vind
from sensibility import Insertion, Substitution, Deletion
def setup_module():
language.set('java')
def test_general() -> None:
assert 1 == tokenwise_distance(b'class Hello {', b'class Hello {}')
assert 1 == tokenwise_distance(b'class Hello {}', b'class Hello }')
assert 1 == tokenwise_distance(b'enum Hello {}', b'class Hello {}')
assert 0 == tokenwise_distance(b'class Hello {}', b'class Hello {}')
assert 2 >= tokenwise_distance(b'enum Hello {}', b'class Hello {')
def test_unused_keywords() -> None:
"""
Regression: Lexer should be able to handle const and goto keywords,
even though Java does not use them.
https://docs.oracle.com/javase/tutorial/java/nutsandbolts/_keywords.html
"""
assert 1 == tokenwise_distance(b'const int hello;', b'final int hello;')
assert 1 == tokenwise_distance(b'goto label;', b'int label;')
def test_unabstracted_edit_distance() -> None:
"""
Test edit distances when NOT using abstracted tokens.
"""
file_a = b"int new Value = 42;"
file_b = b"int newValue = 42;"
assert 1 == tokenwise_distance(file_a, file_b, abstract_open_classes=True)
assert 2 == tokenwise_distance(file_a, file_b, abstract_open_classes=False)
def test_get_edit(c) -> None:
ins = determine_edit(b'class Hello {', b'class Hello {}')
if isinstance(ins, Insertion):
assert ins.token == index_of(c('}'))
assert ins.index == 3
else:
pytest.fail(f'Wrong edit: {ins!r}')
delt = determine_edit(b'class Hello {{}', b'class Hello {}')
if isinstance(delt, Deletion):
assert delt.original_token == index_of(c('{'))
assert delt.index in {2, 3} # Can be either curly brace
else:
pytest.fail(f'Wrong edit: {delt!r}')
sub = determine_edit(b'goto label;', b'int label;')
if isinstance(sub, Substitution):
assert sub.token == index_of(c('int'))
assert sub.original_token == index_of(c('goto'))
assert sub.index == 0
else:
pytest.fail(f'Wrong edit: {sub!r}')
def test_get_unabstacted_edit(c) -> None:
ins = determine_edit(b'class Hello {', b'class Hello {}',
abstract_open_classes=False)
if isinstance(ins, Insertion):
assert ins.token == index_of(c('}'))
assert ins.index == 3
else:
pytest.fail(f'Wrong edit: {ins!r}')
delt = determine_edit(b'class Hello #{}', b'class Hello {}',
abstract_open_classes=False)
if isinstance(delt, Deletion):
assert delt.original_token == language.vocabulary.unk_token_index
assert delt.index == 2
else:
pytest.fail(f'Wrong edit: {delt!r}')
sub = determine_edit(b'goto label;', b'int label;',
abstract_open_classes=False)
if isinstance(sub, Substitution):
assert sub.token == index_of(c('int'))
assert sub.original_token == index_of(c('goto'))
assert sub.index == 0
else:
pytest.fail(f'Wrong edit: {sub!r}')
# This should be a two-token difference.
with pytest.raises(AssertionError):
determine_edit(b'goto label;', b'int number;',
abstract_open_classes=False)
def test_out_of_vocabulary() -> None:
"""
Regression: Distances should still be calculated if items are OOV
ERROR and UNDERSCORE are out-of-vocabulary as well.
In hindsight, const and goto should be OOV as well... :/
"""
assert 1 == tokenwise_distance(b'int #label;', b'int label;')
assert 1 == tokenwise_distance(b'int _;', b'int label;')
edit = determine_edit(b'int #label;', b'int label;')
if isinstance(edit, Deletion):
assert edit.original_token == language.vocabulary.unk_token_index
else:
pytest.fail(f'Wrong edit: {edit!r}')
def test_edit_line(c) -> None:
head = [
'class Hello {',
'public static void main(String args[]) {'
]
tail = [
'System.error.println("Not enough args!");',
'System.exit(1);',
'}',
'System.out.println("Hello, World!");',
'}',
'}'
]
# Glue together a source file from head, tail, and the provided line.
def to_source(line: str) -> bytes:
return '\n'.join(head + [line] + tail).encode('UTF-8')
before = to_source('if (args.length < 3)')
after = to_source('if (args.length < 3) {')
# The line the error happens on the line AFTER head (one-indexed).
error_line = len(head) + 1
# Sanity check: before should be invalid; after should be valid.
assert not language.check_syntax(before)
assert language.check_syntax(after)
fix_event = determine_fix_event(before, after)
assert fix_event.fix == Insertion(22, index_of(c('{')))
assert fix_event.line_no == error_line
assert fix_event.new_token == c('{')
assert fix_event.old_token is None
@given(text(), text())
def test_dependency(a, b):
"""
Test the assumption that the number of editops is equivalent to the
Levenshtein edit distance.
"""
from Levenshtein import distance, editops # type: ignore
assert distance(a, b) == len(editops(a, b))
def index_of(token: str) -> Vind:
"""
Given a token in the vocabulary, returns its vocabulary index.
"""
return language.vocabulary.to_index(token)
| #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# Copyright 2017 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests mistakes and edit distance.
"""
from functools import lru_cache
import pytest
from hypothesis import given
from hypothesis.strategies import text
from sensibility.evaluation.mistakes import Mistakes
from sensibility.evaluation.distance import (
tokenwise_distance, determine_edit, determine_fix_event
)
from sensibility.language import language
from sensibility.vocabulary import Vind
from sensibility import Insertion, Substitution, Deletion
def setup_module():
language.set('java')
def test_general() -> None:
assert 1 == tokenwise_distance(b'class Hello {', b'class Hello {}')
assert 1 == tokenwise_distance(b'class Hello {}', b'class Hello }')
assert 1 == tokenwise_distance(b'enum Hello {}', b'class Hello {}')
assert 0 == tokenwise_distance(b'class Hello {}', b'class Hello {}')
assert 2 >= tokenwise_distance(b'enum Hello {}', b'class Hello {')
def test_unused_keywords() -> None:
"""
Regression: Lexer should be able to handle const and goto keywords,
even though Java does not use them.
https://docs.oracle.com/javase/tutorial/java/nutsandbolts/_keywords.html
"""
assert 1 == tokenwise_distance(b'const int hello;', b'final int hello;')
assert 1 == tokenwise_distance(b'goto label;', b'int label;')
def test_unabstracted_edit_distance() -> None:
"""
Test edit distances when NOT using abstracted tokens.
"""
file_a = b"int new Value = 42;"
file_b = b"int newValue = 42;"
assert 1 == tokenwise_distance(file_a, file_b, abstract_open_classes=True)
assert 2 == tokenwise_distance(file_a, file_b, abstract_open_classes=False)
def test_get_edit(c) -> None:
ins = determine_edit(b'class Hello {', b'class Hello {}')
if isinstance(ins, Insertion):
assert ins.token == index_of(c('}'))
assert ins.index == 3
else:
pytest.fail(f'Wrong edit: {ins!r}')
delt = determine_edit(b'class Hello {{}', b'class Hello {}')
if isinstance(delt, Deletion):
assert delt.original_token == index_of(c('{'))
assert delt.index in {2, 3} # Can be either curly brace
else:
pytest.fail(f'Wrong edit: {delt!r}')
sub = determine_edit(b'goto label;', b'int label;')
if isinstance(sub, Substitution):
assert sub.token == index_of(c('int'))
assert sub.original_token == index_of(c('goto'))
assert sub.index == 0
else:
pytest.fail(f'Wrong edit: {sub!r}')
def test_get_unabstacted_edit(c) -> None:
ins = determine_edit(b'class Hello {', b'class Hello {}',
abstract_open_classes=False)
if isinstance(ins, Insertion):
assert ins.token == index_of(c('}'))
assert ins.index == 3
else:
pytest.fail(f'Wrong edit: {ins!r}')
delt = determine_edit(b'class Hello #{}', b'class Hello {}',
abstract_open_classes=False)
if isinstance(delt, Deletion):
assert delt.original_token == language.vocabulary.unk_token_index
assert delt.index == 2
else:
pytest.fail(f'Wrong edit: {delt!r}')
sub = determine_edit(b'goto label;', b'int label;',
abstract_open_classes=False)
if isinstance(sub, Substitution):
assert sub.token == index_of(c('int'))
assert sub.original_token == index_of(c('goto'))
assert sub.index == 0
else:
pytest.fail(f'Wrong edit: {sub!r}')
# This should be a two-token difference.
with pytest.raises(AssertionError):
determine_edit(b'goto label;', b'int number;',
abstract_open_classes=False)
def test_out_of_vocabulary() -> None:
"""
Regression: Distances should still be calculated if items are OOV
ERROR and UNDERSCORE are out-of-vocabulary as well.
In hindsight, const and goto should be OOV as well... :/
"""
assert 1 == tokenwise_distance(b'int #label;', b'int label;')
assert 1 == tokenwise_distance(b'int _;', b'int label;')
edit = determine_edit(b'int #label;', b'int label;')
if isinstance(edit, Deletion):
assert edit.original_token == language.vocabulary.unk_token_index
else:
pytest.fail(f'Wrong edit: {edit!r}')
def test_edit_line(c) -> None:
head = [
'class Hello {',
'public static void main(String args[]) {'
]
tail = [
'System.error.println("Not enough args!");',
'System.exit(1);',
'}',
'System.out.println("Hello, World!");',
'}',
'}'
]
# Glue together a source file from head, tail, and the provided line.
def to_source(line: str) -> bytes:
return '\n'.join(head + [line] + tail).encode('UTF-8')
before = to_source('if (args.length < 3)')
after = to_source('if (args.length < 3) {')
# The line the error happens on the line AFTER head (one-indexed).
error_line = len(head) + 1
# Sanity check: before should be invalid; after should be valid.
assert not language.check_syntax(before)
assert language.check_syntax(after)
fix_event = determine_fix_event(before, after)
assert fix_event.fix == Insertion(22, index_of(c('{')))
assert fix_event.line_no == error_line
assert fix_event.new_token == c('{')
assert fix_event.old_token is None
@given(text(), text())
def test_dependency(a, b):
"""
Test the assumption that the number of editops is equivalent to the
Levenshtein edit distance.
"""
from Levenshtein import distance, editops # type: ignore
assert distance(a, b) == len(editops(a, b))
def index_of(token: str) -> Vind:
"""
Given a token in the vocabulary, returns its vocabulary index.
"""
return language.vocabulary.to_index(token)
| en | 0.812499 | #!/usr/bin/env python3 # -*- coding: UTF-8 -*- # Copyright 2017 <NAME> <<EMAIL>> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Tests mistakes and edit distance. Regression: Lexer should be able to handle const and goto keywords, even though Java does not use them. https://docs.oracle.com/javase/tutorial/java/nutsandbolts/_keywords.html Test edit distances when NOT using abstracted tokens. # Can be either curly brace #{}', b'class Hello {}', # This should be a two-token difference. Regression: Distances should still be calculated if items are OOV ERROR and UNDERSCORE are out-of-vocabulary as well. In hindsight, const and goto should be OOV as well... :/ #label;', b'int label;') #label;', b'int label;') # Glue together a source file from head, tail, and the provided line. # The line the error happens on the line AFTER head (one-indexed). # Sanity check: before should be invalid; after should be valid. Test the assumption that the number of editops is equivalent to the Levenshtein edit distance. # type: ignore Given a token in the vocabulary, returns its vocabulary index. | 2.334233 | 2 |
rpyc_mem/service/rpyc_mem_service.py | m0hithreddy/rpyc-mem | 1 | 6612799 | """RPyC Memory Service"""
import threading
from importlib import import_module
import rpyc
from rpyc.utils.server import ThreadedServer
from rpyc_mem.errors import RpycMemSvcError
class RpycMemService(rpyc.Service):
"""
RPyC memory service provides functionality to create named and unnamed python objects on remote
hosts (one which runs this service). The remote objects are created using remote modules (see
``remote_import``). By default all objects created are unnamed, they can be mapped against
unique_key to make them named. named objects can be managed using unique_key. This service is
intended to be run with ``rpyc.utils.server.ThreadingServer`` or variants of it to maintain one
snapshot of the memory
:param str hostname: Hostname on which the service is run. Runs on ``0.0.0.0`` by default.
:param int port: Port on which the service is run. Picks a random by default. Can be queried
back with ``self.server_obj.port`` (this is available only when the service is ran).
:param args: Left for ``RPyC`` during ``Service`` initialization
:param kwargs: Left for ``RPyC`` during ``Service`` initialization
"""
_ALLOWED_GET_ATTRS = [
'memoize', 'get', 'update', 'delete', 'is_memoized', 'remote_import',
'rpyc_version'
]
_DEFAULT = object()
_shm_lock = threading.Lock()
_sharedmem = dict()
def __init__(self, hostname=None, port=None, *args, **kwargs):
"""Initialize Rpyc memory service"""
super().__init__(*args, **kwargs)
self.hostname = hostname
self.port = port
self.server_obj = None
def run(self, server=None, server_kwargs=None):
"""
Run the RPyC memory service. The ``host`` and ``port`` used are picked from the ``__init__``
configuration. By default ``ThreadingServer`` is used, however this can be altered by
passing different ``server`` and associated ``server_kwargs``.
:param server: The server to use for running the service.
:param server_kwargs: Update the default server arguments with these.
:return:
"""
if not server:
server = ThreadedServer
kwargs = {
'service': self.__class__,
'protocol_config': {
'allow_all_attrs': True,
'allow_setattr': True,
'allow_delattr': True
}
}
if self.hostname:
kwargs['hostname'] = self.hostname
if self.port:
kwargs['port'] = self.port
if server_kwargs:
kwargs.update(server_kwargs)
self.server_obj = server(**kwargs)
self.server_obj.start()
@classmethod
def memoize(cls, unique_key, robj=_DEFAULT, robj_gen=_DEFAULT):
"""
Memoize the mapping of remote object or remote object returned by the generator against
the unique_key
:param unique_key: The unique_key for creating/querying the mapping
:param typing.Any robj: The remote object for memoization (One among ``robj``, ``robj_gen``
should be passed)
:param typing.Callable robj_gen: The remote object generator for memoization (One among ``robj``,
``robj_gen`` should be passed)
:return: The memoized object
"""
if not cls._validate_obj_sources(robj, robj_gen):
raise RpycMemSvcError('Either object or object generator should be passed')
with cls._shm_lock:
if unique_key not in cls._sharedmem:
if robj is not cls._DEFAULT:
cls._sharedmem[unique_key] = robj
else:
cls._sharedmem[unique_key] = robj_gen() # noqa
return cls._sharedmem[unique_key]
@classmethod
def get(cls, unique_key):
"""
Get the remote object against the unique_key. Raise an exception if the mapping is not present
:param unique_key: The unique_key for querying the mapping
:return: The memoized object
"""
with cls._shm_lock:
if unique_key not in cls._sharedmem:
raise RpycMemSvcError('No remote object exists against the key')
return cls._sharedmem[unique_key]
@classmethod
def update(cls, unique_key, robj=_DEFAULT, robj_gen=_DEFAULT):
"""
Update the mapping with the remote object or remote object returned by the generator against
the unique_key (create new mapping if it doesnt exist)
:param unique_key: The unique_key for updating the mapping
:param typing.Any robj: The remote object for update (One among ``robj``, ``robj_gen`` should
be passed)
:param typing.Callable robj_gen: The remote object generator for update (One among ``robj``,
``robj_gen`` should be passed)
:return: The updated object
"""
if not cls._validate_obj_sources(robj, robj_gen):
raise RpycMemSvcError('Either object or object generator should be passed')
with cls._shm_lock:
if robj is not cls._DEFAULT:
cls._sharedmem[unique_key] = robj
else:
cls._sharedmem[unique_key] = robj_gen() # noqa
return cls._sharedmem[unique_key]
@classmethod
def delete(cls, unique_key):
"""
Delete the mapping against the unique_key. Raise an exception if the mapping is not present
:param unique_key: The unique_key for deleting the mapping
:return:
"""
with cls._shm_lock:
if unique_key not in cls._sharedmem:
raise RpycMemSvcError('No remote object exists against the key')
del cls._sharedmem[unique_key]
@classmethod
def is_memoized(cls, unique_key):
"""
Return ``True`` if a mapping exists against the unique_key
:param unique_key: The unique_key for querying the mapping
:return:
"""
with cls._shm_lock:
return unique_key in cls._sharedmem
@classmethod
def remote_import(cls, module, package=None):
"""
Make remote modules available to the clients, primarily for creating remote objects
:param str module: The module to import in absolute or relative terms (Ex: pkg.mod, ..mod)
:param str package: The package which acts as a base for resolving the module (should be set
when relative imports are used)
:return: Remote module
"""
return import_module(module, package)
@classmethod
def rpyc_version(cls):
"""
Return ``RPyC`` version of the server
:return:
"""
return rpyc.__version__
@classmethod
def _validate_obj_sources(cls, robj, robj_gen):
"""
Validate the object sources. Return False if both robj, robj_gen are set/not-set else True
:param robj: The remote object
:param robj_gen: The remote object generator
:return:
"""
if (robj is cls._DEFAULT and robj_gen is cls._DEFAULT) or \
(robj is not cls._DEFAULT and robj_gen is not cls._DEFAULT):
return False
return True
def _rpyc_getattr(self, name):
"""RPyC get attribute"""
if name in self._ALLOWED_GET_ATTRS:
return getattr(self, name)
raise AttributeError(
"'%s' object has no attribute '%s'" % (self.__class__.__name__, name)
)
def _rpyc_setattr(self, name, value):
"""RPyC set attribute"""
if name in self._ALLOWED_GET_ATTRS:
raise AttributeError('access denied')
raise AttributeError(
"'%s' object has no attribute '%s'" % (self.__class__.__name__, name)
)
def _rpyc_delattr(self, name):
"""RPyC delete attribute"""
if name in self._ALLOWED_GET_ATTRS:
raise AttributeError('access denied')
raise AttributeError(
"'%s' object has no attribute '%s'" % (self.__class__.__name__, name)
)
| """RPyC Memory Service"""
import threading
from importlib import import_module
import rpyc
from rpyc.utils.server import ThreadedServer
from rpyc_mem.errors import RpycMemSvcError
class RpycMemService(rpyc.Service):
"""
RPyC memory service provides functionality to create named and unnamed python objects on remote
hosts (one which runs this service). The remote objects are created using remote modules (see
``remote_import``). By default all objects created are unnamed, they can be mapped against
unique_key to make them named. named objects can be managed using unique_key. This service is
intended to be run with ``rpyc.utils.server.ThreadingServer`` or variants of it to maintain one
snapshot of the memory
:param str hostname: Hostname on which the service is run. Runs on ``0.0.0.0`` by default.
:param int port: Port on which the service is run. Picks a random by default. Can be queried
back with ``self.server_obj.port`` (this is available only when the service is ran).
:param args: Left for ``RPyC`` during ``Service`` initialization
:param kwargs: Left for ``RPyC`` during ``Service`` initialization
"""
_ALLOWED_GET_ATTRS = [
'memoize', 'get', 'update', 'delete', 'is_memoized', 'remote_import',
'rpyc_version'
]
_DEFAULT = object()
_shm_lock = threading.Lock()
_sharedmem = dict()
def __init__(self, hostname=None, port=None, *args, **kwargs):
"""Initialize Rpyc memory service"""
super().__init__(*args, **kwargs)
self.hostname = hostname
self.port = port
self.server_obj = None
def run(self, server=None, server_kwargs=None):
"""
Run the RPyC memory service. The ``host`` and ``port`` used are picked from the ``__init__``
configuration. By default ``ThreadingServer`` is used, however this can be altered by
passing different ``server`` and associated ``server_kwargs``.
:param server: The server to use for running the service.
:param server_kwargs: Update the default server arguments with these.
:return:
"""
if not server:
server = ThreadedServer
kwargs = {
'service': self.__class__,
'protocol_config': {
'allow_all_attrs': True,
'allow_setattr': True,
'allow_delattr': True
}
}
if self.hostname:
kwargs['hostname'] = self.hostname
if self.port:
kwargs['port'] = self.port
if server_kwargs:
kwargs.update(server_kwargs)
self.server_obj = server(**kwargs)
self.server_obj.start()
@classmethod
def memoize(cls, unique_key, robj=_DEFAULT, robj_gen=_DEFAULT):
"""
Memoize the mapping of remote object or remote object returned by the generator against
the unique_key
:param unique_key: The unique_key for creating/querying the mapping
:param typing.Any robj: The remote object for memoization (One among ``robj``, ``robj_gen``
should be passed)
:param typing.Callable robj_gen: The remote object generator for memoization (One among ``robj``,
``robj_gen`` should be passed)
:return: The memoized object
"""
if not cls._validate_obj_sources(robj, robj_gen):
raise RpycMemSvcError('Either object or object generator should be passed')
with cls._shm_lock:
if unique_key not in cls._sharedmem:
if robj is not cls._DEFAULT:
cls._sharedmem[unique_key] = robj
else:
cls._sharedmem[unique_key] = robj_gen() # noqa
return cls._sharedmem[unique_key]
@classmethod
def get(cls, unique_key):
"""
Get the remote object against the unique_key. Raise an exception if the mapping is not present
:param unique_key: The unique_key for querying the mapping
:return: The memoized object
"""
with cls._shm_lock:
if unique_key not in cls._sharedmem:
raise RpycMemSvcError('No remote object exists against the key')
return cls._sharedmem[unique_key]
@classmethod
def update(cls, unique_key, robj=_DEFAULT, robj_gen=_DEFAULT):
"""
Update the mapping with the remote object or remote object returned by the generator against
the unique_key (create new mapping if it doesnt exist)
:param unique_key: The unique_key for updating the mapping
:param typing.Any robj: The remote object for update (One among ``robj``, ``robj_gen`` should
be passed)
:param typing.Callable robj_gen: The remote object generator for update (One among ``robj``,
``robj_gen`` should be passed)
:return: The updated object
"""
if not cls._validate_obj_sources(robj, robj_gen):
raise RpycMemSvcError('Either object or object generator should be passed')
with cls._shm_lock:
if robj is not cls._DEFAULT:
cls._sharedmem[unique_key] = robj
else:
cls._sharedmem[unique_key] = robj_gen() # noqa
return cls._sharedmem[unique_key]
@classmethod
def delete(cls, unique_key):
"""
Delete the mapping against the unique_key. Raise an exception if the mapping is not present
:param unique_key: The unique_key for deleting the mapping
:return:
"""
with cls._shm_lock:
if unique_key not in cls._sharedmem:
raise RpycMemSvcError('No remote object exists against the key')
del cls._sharedmem[unique_key]
@classmethod
def is_memoized(cls, unique_key):
"""
Return ``True`` if a mapping exists against the unique_key
:param unique_key: The unique_key for querying the mapping
:return:
"""
with cls._shm_lock:
return unique_key in cls._sharedmem
@classmethod
def remote_import(cls, module, package=None):
"""
Make remote modules available to the clients, primarily for creating remote objects
:param str module: The module to import in absolute or relative terms (Ex: pkg.mod, ..mod)
:param str package: The package which acts as a base for resolving the module (should be set
when relative imports are used)
:return: Remote module
"""
return import_module(module, package)
@classmethod
def rpyc_version(cls):
"""
Return ``RPyC`` version of the server
:return:
"""
return rpyc.__version__
@classmethod
def _validate_obj_sources(cls, robj, robj_gen):
"""
Validate the object sources. Return False if both robj, robj_gen are set/not-set else True
:param robj: The remote object
:param robj_gen: The remote object generator
:return:
"""
if (robj is cls._DEFAULT and robj_gen is cls._DEFAULT) or \
(robj is not cls._DEFAULT and robj_gen is not cls._DEFAULT):
return False
return True
def _rpyc_getattr(self, name):
"""RPyC get attribute"""
if name in self._ALLOWED_GET_ATTRS:
return getattr(self, name)
raise AttributeError(
"'%s' object has no attribute '%s'" % (self.__class__.__name__, name)
)
def _rpyc_setattr(self, name, value):
"""RPyC set attribute"""
if name in self._ALLOWED_GET_ATTRS:
raise AttributeError('access denied')
raise AttributeError(
"'%s' object has no attribute '%s'" % (self.__class__.__name__, name)
)
def _rpyc_delattr(self, name):
"""RPyC delete attribute"""
if name in self._ALLOWED_GET_ATTRS:
raise AttributeError('access denied')
raise AttributeError(
"'%s' object has no attribute '%s'" % (self.__class__.__name__, name)
)
| en | 0.762149 | RPyC Memory Service RPyC memory service provides functionality to create named and unnamed python objects on remote hosts (one which runs this service). The remote objects are created using remote modules (see ``remote_import``). By default all objects created are unnamed, they can be mapped against unique_key to make them named. named objects can be managed using unique_key. This service is intended to be run with ``rpyc.utils.server.ThreadingServer`` or variants of it to maintain one snapshot of the memory :param str hostname: Hostname on which the service is run. Runs on ``0.0.0.0`` by default. :param int port: Port on which the service is run. Picks a random by default. Can be queried back with ``self.server_obj.port`` (this is available only when the service is ran). :param args: Left for ``RPyC`` during ``Service`` initialization :param kwargs: Left for ``RPyC`` during ``Service`` initialization Initialize Rpyc memory service Run the RPyC memory service. The ``host`` and ``port`` used are picked from the ``__init__`` configuration. By default ``ThreadingServer`` is used, however this can be altered by passing different ``server`` and associated ``server_kwargs``. :param server: The server to use for running the service. :param server_kwargs: Update the default server arguments with these. :return: Memoize the mapping of remote object or remote object returned by the generator against the unique_key :param unique_key: The unique_key for creating/querying the mapping :param typing.Any robj: The remote object for memoization (One among ``robj``, ``robj_gen`` should be passed) :param typing.Callable robj_gen: The remote object generator for memoization (One among ``robj``, ``robj_gen`` should be passed) :return: The memoized object # noqa Get the remote object against the unique_key. Raise an exception if the mapping is not present :param unique_key: The unique_key for querying the mapping :return: The memoized object Update the mapping with the remote object or remote object returned by the generator against the unique_key (create new mapping if it doesnt exist) :param unique_key: The unique_key for updating the mapping :param typing.Any robj: The remote object for update (One among ``robj``, ``robj_gen`` should be passed) :param typing.Callable robj_gen: The remote object generator for update (One among ``robj``, ``robj_gen`` should be passed) :return: The updated object # noqa Delete the mapping against the unique_key. Raise an exception if the mapping is not present :param unique_key: The unique_key for deleting the mapping :return: Return ``True`` if a mapping exists against the unique_key :param unique_key: The unique_key for querying the mapping :return: Make remote modules available to the clients, primarily for creating remote objects :param str module: The module to import in absolute or relative terms (Ex: pkg.mod, ..mod) :param str package: The package which acts as a base for resolving the module (should be set when relative imports are used) :return: Remote module Return ``RPyC`` version of the server :return: Validate the object sources. Return False if both robj, robj_gen are set/not-set else True :param robj: The remote object :param robj_gen: The remote object generator :return: RPyC get attribute RPyC set attribute RPyC delete attribute | 2.84165 | 3 |
report/loss.py | sweatybridge/text-to-anime | 0 | 6612800 | import numpy as np
import matplotlib.pyplot as plt
def read_loss(path):
train, val = [], []
with open(path, "r") as f:
iter_loss = []
for line in f:
if line.startswith("Epoch"):
train.append(np.mean(iter_loss))
iter_loss = []
elif line.startswith("Validation loss"):
loss = float(line.split(":")[-1].strip())
val.append(loss)
elif line.startswith("Train loss"):
loss = float(line.split(" ")[3].strip())
iter_loss.append(loss)
return train, val
def render(train, val, title):
plt.plot([i + 1 for i in range(len(train))], train)
plt.plot([(i + 1) * 5 for i in range(len(val))], val)
plt.xlabel("Epoch")
plt.ylabel("Loss")
plt.xlim(left=0)
plt.ylim(bottom=0)
plt.title(title)
plt.legend(["Train", "Val"])
plt.tight_layout()
plt.show()
def compare(train, title, legend):
for data in train:
plt.plot([i + 1 for i in range(len(data))], data)
plt.xlabel("Epoch")
plt.ylabel("Loss")
plt.xlim(left=0)
plt.ylim(bottom=0)
plt.title(title)
plt.legend(legend)
plt.tight_layout()
plt.show()
def lips_vs_full():
train, val = read_loss("report/loss_all.txt")
render(train, val, "Model A (68 landmarks)")
train, val = read_loss("report/loss_lips.txt")
render(train, val, "Model B (lips only)")
def pretrain_vs_no():
train, val = read_loss("loss_60.txt")
train_p, val_p = read_loss("loss_60_pretrain.txt")
compare([train, train_p], "Train loss (lips only)", ["No Pretrain", "Pretrain"])
compare([val, val_p], "Val loss (lips only)", ["No Pretrain", "Pretrain"])
if __name__ == "__main__":
lips_vs_full()
| import numpy as np
import matplotlib.pyplot as plt
def read_loss(path):
train, val = [], []
with open(path, "r") as f:
iter_loss = []
for line in f:
if line.startswith("Epoch"):
train.append(np.mean(iter_loss))
iter_loss = []
elif line.startswith("Validation loss"):
loss = float(line.split(":")[-1].strip())
val.append(loss)
elif line.startswith("Train loss"):
loss = float(line.split(" ")[3].strip())
iter_loss.append(loss)
return train, val
def render(train, val, title):
plt.plot([i + 1 for i in range(len(train))], train)
plt.plot([(i + 1) * 5 for i in range(len(val))], val)
plt.xlabel("Epoch")
plt.ylabel("Loss")
plt.xlim(left=0)
plt.ylim(bottom=0)
plt.title(title)
plt.legend(["Train", "Val"])
plt.tight_layout()
plt.show()
def compare(train, title, legend):
for data in train:
plt.plot([i + 1 for i in range(len(data))], data)
plt.xlabel("Epoch")
plt.ylabel("Loss")
plt.xlim(left=0)
plt.ylim(bottom=0)
plt.title(title)
plt.legend(legend)
plt.tight_layout()
plt.show()
def lips_vs_full():
train, val = read_loss("report/loss_all.txt")
render(train, val, "Model A (68 landmarks)")
train, val = read_loss("report/loss_lips.txt")
render(train, val, "Model B (lips only)")
def pretrain_vs_no():
train, val = read_loss("loss_60.txt")
train_p, val_p = read_loss("loss_60_pretrain.txt")
compare([train, train_p], "Train loss (lips only)", ["No Pretrain", "Pretrain"])
compare([val, val_p], "Val loss (lips only)", ["No Pretrain", "Pretrain"])
if __name__ == "__main__":
lips_vs_full()
| none | 1 | 2.851009 | 3 | |
example.py | Dominique57/ChatBotMaker | 0 | 6612801 | # app imports
from flask import Flask, request
from chatbotmaker import Bot, Dispatcher, Database
from chatbotmaker.defaults.dev import DevMessenger
from chatbotmaker.defaults.facebook import FacebookMessenger, facebook_route
dispatcher_config = {
'actions': {
'welcome': {
'func': lambda user, user_input: (
user.send_message('Im in welcome state'),
user.change_state('home')
)
},
'home': {
'func': lambda user, user_input: (
user.send_message('Im in home state'),
user.change_state('welcome')
)
},
}
}
FACEBOOK_CHECK_TOKEN = 'VERIFY_TOKEN'
FACEBOOK_AUTH_TOKEN = '<PASSWORD>'
messenger = FacebookMessenger(FACEBOOK_AUTH_TOKEN)
# SINCE token is fake, lets use a dev-messenger (terminal printing)
messenger = DevMessenger()
dispatcher = Dispatcher(dispatcher_config)
database = Database({'sqlalchemy.url': 'sqlite:///foo.db'})
bot = Bot({}, messenger, dispatcher, database)
app = Flask(__name__)
@app.route('/bot', methods=['GET', 'POST'])
def ngn_bot():
return facebook_route(request, FACEBOOK_CHECK_TOKEN, bot)
@app.route('/bot_debug', methods=['GET'])
def ngn_bot_debug():
if request.method == 'GET':
user_id = request.args.get("user")
user_input = request.args.get("message")
return bot.user_handle(user_id, user_input)
return "Message ignored"
app.run()
| # app imports
from flask import Flask, request
from chatbotmaker import Bot, Dispatcher, Database
from chatbotmaker.defaults.dev import DevMessenger
from chatbotmaker.defaults.facebook import FacebookMessenger, facebook_route
dispatcher_config = {
'actions': {
'welcome': {
'func': lambda user, user_input: (
user.send_message('Im in welcome state'),
user.change_state('home')
)
},
'home': {
'func': lambda user, user_input: (
user.send_message('Im in home state'),
user.change_state('welcome')
)
},
}
}
FACEBOOK_CHECK_TOKEN = 'VERIFY_TOKEN'
FACEBOOK_AUTH_TOKEN = '<PASSWORD>'
messenger = FacebookMessenger(FACEBOOK_AUTH_TOKEN)
# SINCE token is fake, lets use a dev-messenger (terminal printing)
messenger = DevMessenger()
dispatcher = Dispatcher(dispatcher_config)
database = Database({'sqlalchemy.url': 'sqlite:///foo.db'})
bot = Bot({}, messenger, dispatcher, database)
app = Flask(__name__)
@app.route('/bot', methods=['GET', 'POST'])
def ngn_bot():
return facebook_route(request, FACEBOOK_CHECK_TOKEN, bot)
@app.route('/bot_debug', methods=['GET'])
def ngn_bot_debug():
if request.method == 'GET':
user_id = request.args.get("user")
user_input = request.args.get("message")
return bot.user_handle(user_id, user_input)
return "Message ignored"
app.run()
| en | 0.553564 | # app imports # SINCE token is fake, lets use a dev-messenger (terminal printing) | 2.244812 | 2 |
borg_find/ui.py | essembeh/borg-find | 1 | 6612802 | """
some utils for fancy stdout
"""
from pathlib import Path
import shlex
from subprocess import CompletedProcess
from colorama import Style, Fore
from .model import BorgArchive, BorgFile, BorgRepository
def label(item):
"""
return a str with colors of the given object
"""
if isinstance(item, Path):
if not item.exists():
return str(item)
if item.is_dir():
return f"{Fore.BLUE}{Style.BRIGHT}{item}/{Style.RESET_ALL}"
return f"{Style.BRIGHT}{Fore.BLUE}{item.parent}/{Fore.MAGENTA}{item.name}{Style.RESET_ALL}"
if isinstance(item, BorgFile):
if item.is_dir():
return f"{Fore.BLUE}{Style.BRIGHT}{item.as_path}/{Style.RESET_ALL}"
color = Fore.MAGENTA
if item.is_link():
color = Fore.CYAN
elif item.is_executable():
color = Fore.GREEN
return f"{Style.BRIGHT}{Fore.BLUE}{item.as_path.parent}/{color}{item.as_path.name}{Style.RESET_ALL}"
if isinstance(item, BorgRepository):
return f"{Fore.CYAN}{item.source}{Style.RESET_ALL}"
if isinstance(item, BorgArchive):
return (
f"{label(item.repo)}::{Style.BRIGHT}{Fore.CYAN}{item.name}{Style.RESET_ALL}"
)
if isinstance(item, CompletedProcess):
cmd = item.args
if isinstance(cmd, list):
cmd = " ".join(map(shlex.quote, cmd))
return f"{Fore.YELLOW}{cmd}{Fore.RESET}"
def dumpproc(stdout, stderr=None):
"""
print stdout/stderr of a process
"""
if stdout is not None and len(stdout) > 0:
print(" ", "=" * 20, "BEGIN STDOUT", "=" * 20)
for line in stdout.decode().splitlines():
print(" ", f"{Style.DIM}{line}{Style.RESET_ALL}")
print(" ", "=" * 20, "END STDOUT", "=" * 20)
if stderr is not None and len(stderr) > 0:
print(" ", "=" * 20, "BEGIN STDERR", "=" * 20)
for line in stderr.decode().splitlines():
print(" ", f"{Fore.RED}{line}{Style.RESET_ALL}")
print(" ", "=" * 20, "END STDERR", "=" * 20)
| """
some utils for fancy stdout
"""
from pathlib import Path
import shlex
from subprocess import CompletedProcess
from colorama import Style, Fore
from .model import BorgArchive, BorgFile, BorgRepository
def label(item):
"""
return a str with colors of the given object
"""
if isinstance(item, Path):
if not item.exists():
return str(item)
if item.is_dir():
return f"{Fore.BLUE}{Style.BRIGHT}{item}/{Style.RESET_ALL}"
return f"{Style.BRIGHT}{Fore.BLUE}{item.parent}/{Fore.MAGENTA}{item.name}{Style.RESET_ALL}"
if isinstance(item, BorgFile):
if item.is_dir():
return f"{Fore.BLUE}{Style.BRIGHT}{item.as_path}/{Style.RESET_ALL}"
color = Fore.MAGENTA
if item.is_link():
color = Fore.CYAN
elif item.is_executable():
color = Fore.GREEN
return f"{Style.BRIGHT}{Fore.BLUE}{item.as_path.parent}/{color}{item.as_path.name}{Style.RESET_ALL}"
if isinstance(item, BorgRepository):
return f"{Fore.CYAN}{item.source}{Style.RESET_ALL}"
if isinstance(item, BorgArchive):
return (
f"{label(item.repo)}::{Style.BRIGHT}{Fore.CYAN}{item.name}{Style.RESET_ALL}"
)
if isinstance(item, CompletedProcess):
cmd = item.args
if isinstance(cmd, list):
cmd = " ".join(map(shlex.quote, cmd))
return f"{Fore.YELLOW}{cmd}{Fore.RESET}"
def dumpproc(stdout, stderr=None):
"""
print stdout/stderr of a process
"""
if stdout is not None and len(stdout) > 0:
print(" ", "=" * 20, "BEGIN STDOUT", "=" * 20)
for line in stdout.decode().splitlines():
print(" ", f"{Style.DIM}{line}{Style.RESET_ALL}")
print(" ", "=" * 20, "END STDOUT", "=" * 20)
if stderr is not None and len(stderr) > 0:
print(" ", "=" * 20, "BEGIN STDERR", "=" * 20)
for line in stderr.decode().splitlines():
print(" ", f"{Fore.RED}{line}{Style.RESET_ALL}")
print(" ", "=" * 20, "END STDERR", "=" * 20)
| en | 0.680387 | some utils for fancy stdout return a str with colors of the given object print stdout/stderr of a process | 2.448642 | 2 |
tfne/deserialization/codeepneat/__init__.py | githealthy18/Tensorflow-Neuroevolution | 121 | 6612803 | <filename>tfne/deserialization/codeepneat/__init__.py
# Import functions
from tfne.deserialization.codeepneat.codeepneat_deserialization import deserialize_codeepneat_genome
from tfne.deserialization.codeepneat.codeepneat_deserialization import deserialize_codeepneat_encoding
from tfne.deserialization.codeepneat.codeepneat_deserialization import deserialize_codeepneat_population
| <filename>tfne/deserialization/codeepneat/__init__.py
# Import functions
from tfne.deserialization.codeepneat.codeepneat_deserialization import deserialize_codeepneat_genome
from tfne.deserialization.codeepneat.codeepneat_deserialization import deserialize_codeepneat_encoding
from tfne.deserialization.codeepneat.codeepneat_deserialization import deserialize_codeepneat_population
| en | 0.359696 | # Import functions | 1.248025 | 1 |
gesture/PepperGestures/Pepper.py | ErikEkstedt/Project | 3 | 6612804 | <filename>gesture/PepperGestures/Pepper.py
from __future__ import print_function
import gym
from gym import spaces
import numpy as np
import time
import cv2
import qi
import motion
from screen import ObsRGB
#============== Help Functions ======================
def get_random_target(slim_low, slim_high, target='both', idx=None):
tmp_high = slim_high.abs() + slim_low.abs()
goal = torch.rand(slim_high.size())
goal = goal * tmp_high - slim_low.abs()
return goal
def printLimits(motion_service, name="Body"):
limits = motion_service.getLimits(name)
jointNames = motion_service.getBodyNames(name)
for i in range(0,len(limits)):
print(jointNames[i] + ":")
print("minAngle", limits[i][0],\
"maxAngle", limits[i][1],\
"maxVelocity", limits[i][2],\
"maxTorque", limits[i][3])
class Pepper_v0(gym.Env):
g ''' Directions:
Environment - Choregraphe
Open Choregraphe and connect to a virtual session.
Copy the PORT-number the simulation runs on.
Detach the "Robot view" window and have it
visable on desktop (we get pixel values from the screen).
Create a qi session with the PORT number.
Set target before reset (env.set_target() -> env.reset())
'''
def __init__(self, session,
rgb_shape=(64, 64),
step_time=0.05,
action_coeff=0.05,
use_head=False,
args=None):
'''
Arguments:
session - qi.session
head - boolean, if true actions include head actions.
'''
self.session = session
self.motion_service = session.service("ALMotion")
self.motion_service.setStiffnesses = 1 # movement possible
self.posture_service = session.service("ALRobotPosture")
self.posture_service.goToPosture("StandInit", 0.5)
o_low, o_high, a, b = self.get_limits(use_head) # different sizes w/ or w/out head
self.lim_low = o_low
self.lim_high = o_high
action_max = np.round( (o_high - o_low)*action_coeff, 3)
self.state_space = spaces.Box(o_low, o_high)
self.action_space = spaces.Box(-action_max, action_max)
self.observation_space = spaces.Box(low=0, high=255, shape=rgb_shape)
self.RGB_observer = ObsRGB()
self.rgb_shape = rgb_shape
self.observation_shape = (3, rgb_shape[0], rgb_shape[1])
self.useSensors = False
self.step_time = step_time
self.use_head = use_head
self.n = 0
self.MAX = args.MAX_TIME
if use_head:
self.names = ["LShoulderRoll", "LShoulderPitch",
"LElbowYaw", "LElbowRoll",
"LWristYaw", "LHand",
"RShoulderRoll", "RShoulderPitch",
"RElbowYaw", "RElbowRoll",
"RWristYaw", "RHand",
"HeadYaw", "HeadPitch"]
else:
self.names= ["LShoulderRoll", "LShoulderPitch",
"LElbowYaw", "LElbowRoll",
"LWristYaw", "LHand",
"RShoulderRoll", "RShoulderPitch",
"RElbowYaw", "RElbowRoll",
"RWristYaw", "RHand"]
self.fractionMaxSpeed = 0.1 # movement speed
def _reset(self):
'''Called at the start of episodes'''
if self.target is None:
print('Define a target')
return
self.posture_service.goToPosture("StandInit", 0.5)
time.sleep(0.2)
self.n = 0
self._getState() # sets self.state
vel = np.zeros(len(self.state))
rgb = self.RGB_observer.get_rgb()
rgb = cv2.resize(rgb, self.rgb_shape)
rgb = rgb.transpose((2,0,1))
self.potential = self.calc_potential()
return np.concatenate((self.state, vel)).astype('float32'), self.target, rgb
def _step(self, changes):
''' Step function that returns the joint and rgb values '''
self.n += 1
changes = changes.tolist()[0]
try:
self.motion_service.changeAngles(self.names, changes, self.fractionMaxSpeed)
except RuntimeError:
print('Angles: ', changes)
time.sleep(self.step_time)
prev_state = self.state
self._getState() # sets self.state
vel = self.state - prev_state
reward = self.calc_reward()
rgb = self.RGB_observer.get_rgb()
rgb = cv2.resize(rgb, self.rgb_shape)
rgb = rgb.transpose((2,0,1))
s = np.concatenate((self.state, vel)).astype('float32')
done = False
if self.n > self.MAX:
done = True
return s, self.target, rgb, reward, done
def set_angles(self, angles):
angles = angles.tolist()
self.motion_service.setAngles(self.names, angles, self.fractionMaxSpeed)
time.sleep(1)
self._getState() # sets self.state
print('state:',self.state)
print('target:',self.target)
raw_input()
def set_target(self, target):
self.target = target
def set_random_target(self):
tmp_high = np.abs(self.lim_high) + np.abs(self.lim_low)
target = np.random.rand(self.lim_high.size)
target = target * tmp_high - np.abs(self.lim_low)
self.target = target
return target
def calc_reward(self):
''' Difference potential as reward '''
potential_old = self.potential
self.potential = self.calc_potential()
return np.array([self.potential - potential_old])
def calc_potential(self):
p = -np.linalg.norm(self.target - self.state)
return np.array(p)
def _getState(self):
R = np.array(self.motion_service.getAngles(
"RArm", self.useSensors))
L = np.array(self.motion_service.getAngles(
"LArm", self.useSensors))
if self.use_head:
H = np.array(self.motion_service.getAngles(
"Head", self.useSensors))
self.state = np.concatenate((L, R, H))
else:
self.state = np.concatenate((L, R))
def _close(self):
self.motion_service.setStiffnesses = 0
def get_limits(self, head):
''' function that return limits of the robot
Arguments:
motion_service - qi.session object
Return:
min_angle
max_angle
max_velocity
max_torque
'''
limL = np.array(self.motion_service.getLimits("LArm"))
limR = np.array(self.motion_service.getLimits("RArm"))
if head:
limHead = np.array(self.motion_service.getLimits("Head"))
o_low = np.concatenate((limL[:, 0], limR[:, 0], limHead[:, 0]))
o_high = np.concatenate((limL[:, 1], limR[:, 1], limHead[:, 1]))
vel_max = np.concatenate((limL[:, 2], limR[:, 2], limHead[:, 2]))
torque_max = np.concatenate((limL[:, 3], limR[:, 3], limHead[:, 3]))
else:
o_low = np.concatenate((limL[:, 0], limR[:, 0]))
o_high = np.concatenate((limL[:, 1], limR[:, 1]))
vel_max = np.concatenate((limL[:, 2], limR[:, 2]))
torque_max = np.concatenate((limL[:, 3], limR[:, 3]))
return o_low, o_high, vel_max, torque_max
if __name__ == '__main__':
from arguments import get_args
import torch
args = get_args()
session = qi.Session()
session.connect("{}:{}".format(args.IP, args.PORT))
env = Pepper_v0(session)
# ====== Goal ===============
# "hurray pose"
L_arm = [-0.38450, 0.81796, -0.99049, -1.18418, -1.3949, 0.0199]
R_arm = [-0.90522, -1.03321, -0.05766, 0.84596, 1.39495, 0.01999]
goal = torch.Tensor(L_arm+R_arm)
# mask goal state according to goal_type
# agent.set_goal_state(goal)
s, o = env.reset()
print(s)
for i in range(500):
action = env.action_space.sample()
# print('Action:\nShape: {}\ntype: {}\nData: {} '.format(action.shape, action.dtype, action))
s, o = env.step(action)
print('State:\nShape: {}\ntype: {}\nData: {} '.format(s.shape, s.dtype, s))
| <filename>gesture/PepperGestures/Pepper.py
from __future__ import print_function
import gym
from gym import spaces
import numpy as np
import time
import cv2
import qi
import motion
from screen import ObsRGB
#============== Help Functions ======================
def get_random_target(slim_low, slim_high, target='both', idx=None):
tmp_high = slim_high.abs() + slim_low.abs()
goal = torch.rand(slim_high.size())
goal = goal * tmp_high - slim_low.abs()
return goal
def printLimits(motion_service, name="Body"):
limits = motion_service.getLimits(name)
jointNames = motion_service.getBodyNames(name)
for i in range(0,len(limits)):
print(jointNames[i] + ":")
print("minAngle", limits[i][0],\
"maxAngle", limits[i][1],\
"maxVelocity", limits[i][2],\
"maxTorque", limits[i][3])
class Pepper_v0(gym.Env):
g ''' Directions:
Environment - Choregraphe
Open Choregraphe and connect to a virtual session.
Copy the PORT-number the simulation runs on.
Detach the "Robot view" window and have it
visable on desktop (we get pixel values from the screen).
Create a qi session with the PORT number.
Set target before reset (env.set_target() -> env.reset())
'''
def __init__(self, session,
rgb_shape=(64, 64),
step_time=0.05,
action_coeff=0.05,
use_head=False,
args=None):
'''
Arguments:
session - qi.session
head - boolean, if true actions include head actions.
'''
self.session = session
self.motion_service = session.service("ALMotion")
self.motion_service.setStiffnesses = 1 # movement possible
self.posture_service = session.service("ALRobotPosture")
self.posture_service.goToPosture("StandInit", 0.5)
o_low, o_high, a, b = self.get_limits(use_head) # different sizes w/ or w/out head
self.lim_low = o_low
self.lim_high = o_high
action_max = np.round( (o_high - o_low)*action_coeff, 3)
self.state_space = spaces.Box(o_low, o_high)
self.action_space = spaces.Box(-action_max, action_max)
self.observation_space = spaces.Box(low=0, high=255, shape=rgb_shape)
self.RGB_observer = ObsRGB()
self.rgb_shape = rgb_shape
self.observation_shape = (3, rgb_shape[0], rgb_shape[1])
self.useSensors = False
self.step_time = step_time
self.use_head = use_head
self.n = 0
self.MAX = args.MAX_TIME
if use_head:
self.names = ["LShoulderRoll", "LShoulderPitch",
"LElbowYaw", "LElbowRoll",
"LWristYaw", "LHand",
"RShoulderRoll", "RShoulderPitch",
"RElbowYaw", "RElbowRoll",
"RWristYaw", "RHand",
"HeadYaw", "HeadPitch"]
else:
self.names= ["LShoulderRoll", "LShoulderPitch",
"LElbowYaw", "LElbowRoll",
"LWristYaw", "LHand",
"RShoulderRoll", "RShoulderPitch",
"RElbowYaw", "RElbowRoll",
"RWristYaw", "RHand"]
self.fractionMaxSpeed = 0.1 # movement speed
def _reset(self):
'''Called at the start of episodes'''
if self.target is None:
print('Define a target')
return
self.posture_service.goToPosture("StandInit", 0.5)
time.sleep(0.2)
self.n = 0
self._getState() # sets self.state
vel = np.zeros(len(self.state))
rgb = self.RGB_observer.get_rgb()
rgb = cv2.resize(rgb, self.rgb_shape)
rgb = rgb.transpose((2,0,1))
self.potential = self.calc_potential()
return np.concatenate((self.state, vel)).astype('float32'), self.target, rgb
def _step(self, changes):
''' Step function that returns the joint and rgb values '''
self.n += 1
changes = changes.tolist()[0]
try:
self.motion_service.changeAngles(self.names, changes, self.fractionMaxSpeed)
except RuntimeError:
print('Angles: ', changes)
time.sleep(self.step_time)
prev_state = self.state
self._getState() # sets self.state
vel = self.state - prev_state
reward = self.calc_reward()
rgb = self.RGB_observer.get_rgb()
rgb = cv2.resize(rgb, self.rgb_shape)
rgb = rgb.transpose((2,0,1))
s = np.concatenate((self.state, vel)).astype('float32')
done = False
if self.n > self.MAX:
done = True
return s, self.target, rgb, reward, done
def set_angles(self, angles):
angles = angles.tolist()
self.motion_service.setAngles(self.names, angles, self.fractionMaxSpeed)
time.sleep(1)
self._getState() # sets self.state
print('state:',self.state)
print('target:',self.target)
raw_input()
def set_target(self, target):
self.target = target
def set_random_target(self):
tmp_high = np.abs(self.lim_high) + np.abs(self.lim_low)
target = np.random.rand(self.lim_high.size)
target = target * tmp_high - np.abs(self.lim_low)
self.target = target
return target
def calc_reward(self):
''' Difference potential as reward '''
potential_old = self.potential
self.potential = self.calc_potential()
return np.array([self.potential - potential_old])
def calc_potential(self):
p = -np.linalg.norm(self.target - self.state)
return np.array(p)
def _getState(self):
R = np.array(self.motion_service.getAngles(
"RArm", self.useSensors))
L = np.array(self.motion_service.getAngles(
"LArm", self.useSensors))
if self.use_head:
H = np.array(self.motion_service.getAngles(
"Head", self.useSensors))
self.state = np.concatenate((L, R, H))
else:
self.state = np.concatenate((L, R))
def _close(self):
self.motion_service.setStiffnesses = 0
def get_limits(self, head):
''' function that return limits of the robot
Arguments:
motion_service - qi.session object
Return:
min_angle
max_angle
max_velocity
max_torque
'''
limL = np.array(self.motion_service.getLimits("LArm"))
limR = np.array(self.motion_service.getLimits("RArm"))
if head:
limHead = np.array(self.motion_service.getLimits("Head"))
o_low = np.concatenate((limL[:, 0], limR[:, 0], limHead[:, 0]))
o_high = np.concatenate((limL[:, 1], limR[:, 1], limHead[:, 1]))
vel_max = np.concatenate((limL[:, 2], limR[:, 2], limHead[:, 2]))
torque_max = np.concatenate((limL[:, 3], limR[:, 3], limHead[:, 3]))
else:
o_low = np.concatenate((limL[:, 0], limR[:, 0]))
o_high = np.concatenate((limL[:, 1], limR[:, 1]))
vel_max = np.concatenate((limL[:, 2], limR[:, 2]))
torque_max = np.concatenate((limL[:, 3], limR[:, 3]))
return o_low, o_high, vel_max, torque_max
if __name__ == '__main__':
from arguments import get_args
import torch
args = get_args()
session = qi.Session()
session.connect("{}:{}".format(args.IP, args.PORT))
env = Pepper_v0(session)
# ====== Goal ===============
# "hurray pose"
L_arm = [-0.38450, 0.81796, -0.99049, -1.18418, -1.3949, 0.0199]
R_arm = [-0.90522, -1.03321, -0.05766, 0.84596, 1.39495, 0.01999]
goal = torch.Tensor(L_arm+R_arm)
# mask goal state according to goal_type
# agent.set_goal_state(goal)
s, o = env.reset()
print(s)
for i in range(500):
action = env.action_space.sample()
# print('Action:\nShape: {}\ntype: {}\nData: {} '.format(action.shape, action.dtype, action))
s, o = env.step(action)
print('State:\nShape: {}\ntype: {}\nData: {} '.format(s.shape, s.dtype, s))
| en | 0.713611 | #============== Help Functions ====================== Directions: Environment - Choregraphe Open Choregraphe and connect to a virtual session. Copy the PORT-number the simulation runs on. Detach the "Robot view" window and have it visable on desktop (we get pixel values from the screen). Create a qi session with the PORT number. Set target before reset (env.set_target() -> env.reset()) Arguments: session - qi.session head - boolean, if true actions include head actions. # movement possible # different sizes w/ or w/out head # movement speed Called at the start of episodes # sets self.state Step function that returns the joint and rgb values # sets self.state # sets self.state Difference potential as reward function that return limits of the robot Arguments: motion_service - qi.session object Return: min_angle max_angle max_velocity max_torque # ====== Goal =============== # "hurray pose" # mask goal state according to goal_type # agent.set_goal_state(goal) # print('Action:\nShape: {}\ntype: {}\nData: {} '.format(action.shape, action.dtype, action)) | 2.605455 | 3 |
training/common/config.py | feiyunzhang/person_attribute_mxnet | 0 | 6612805 | import numpy as np
def set_lam_value():
set_value =np.random.beta(1,1)
return set_value
a= set_lam_value()
def get_lam_value():
get_value = a
return get_value
| import numpy as np
def set_lam_value():
set_value =np.random.beta(1,1)
return set_value
a= set_lam_value()
def get_lam_value():
get_value = a
return get_value
| none | 1 | 2.312994 | 2 | |
source/offline_ds_evaluation/latex.py | kschweig/HopfieldOfflineRL | 0 | 6612806 | <gh_stars>0
def create_latex_table(path, arguments):
environment = {"MiniGrid-LavaGapS7-v0": "lava", "MiniGrid-Dynamic-Obstacles-8x8-v0": "obstacles",
"CartPole-v1": "cartpole", "Acrobot-v1":"acrobot", "MountainCar-v0": "mountaincar",
"Breakout-MinAtar-v0": "breakout", "Space_invaders-MinAtar-v0": "spaceinvaders"}
buffer = { "random": "Random Policy", "mixed": "Mixed Policy", "er": "Exp. Replay",
"noisy": "Noisy Policy", "fully": "Final Policy"}
results = ["Return", "Unique State-Action Pairs", "Entropy"]
with open(path, "w") as f:
f.write("\\begin{table}[h]\n\\centering\n\\begin{tabular}{l" + "c"*len(results) + "}\n \hline \n")
f.write("Buffer Type")
for result in results:
f.write(" & " + result)
f.write(" \\\\ \\hline \n")
for i, buf in enumerate(buffer.values()):
f.write(buf + " & ")
for j in range(2, len(arguments)):
if isinstance(arguments[i][j], tuple):
f.write(f"${round(arguments[i][j][0], 2):.2f} \\pm {round(arguments[i][j][1], 2):.2f}$")
else:
f.write(f"${round(arguments[i][j], 5)}$")
if j == len(arguments) - 1:
f.write("\\\\ \n")
else:
f.write(" & ")
f.write("\n \hline \n")
f.write("\\end{tabular}\n\\caption{Dataset evaluation metrics for all buffer types of environment '"
+arguments[0][0].replace("_", "\_") +"'.}\n")
f.write("\\label{tab:ds_eval_"+environment[arguments[0][0]]+"}\n\\end{table}") | def create_latex_table(path, arguments):
environment = {"MiniGrid-LavaGapS7-v0": "lava", "MiniGrid-Dynamic-Obstacles-8x8-v0": "obstacles",
"CartPole-v1": "cartpole", "Acrobot-v1":"acrobot", "MountainCar-v0": "mountaincar",
"Breakout-MinAtar-v0": "breakout", "Space_invaders-MinAtar-v0": "spaceinvaders"}
buffer = { "random": "Random Policy", "mixed": "Mixed Policy", "er": "Exp. Replay",
"noisy": "Noisy Policy", "fully": "Final Policy"}
results = ["Return", "Unique State-Action Pairs", "Entropy"]
with open(path, "w") as f:
f.write("\\begin{table}[h]\n\\centering\n\\begin{tabular}{l" + "c"*len(results) + "}\n \hline \n")
f.write("Buffer Type")
for result in results:
f.write(" & " + result)
f.write(" \\\\ \\hline \n")
for i, buf in enumerate(buffer.values()):
f.write(buf + " & ")
for j in range(2, len(arguments)):
if isinstance(arguments[i][j], tuple):
f.write(f"${round(arguments[i][j][0], 2):.2f} \\pm {round(arguments[i][j][1], 2):.2f}$")
else:
f.write(f"${round(arguments[i][j], 5)}$")
if j == len(arguments) - 1:
f.write("\\\\ \n")
else:
f.write(" & ")
f.write("\n \hline \n")
f.write("\\end{tabular}\n\\caption{Dataset evaluation metrics for all buffer types of environment '"
+arguments[0][0].replace("_", "\_") +"'.}\n")
f.write("\\label{tab:ds_eval_"+environment[arguments[0][0]]+"}\n\\end{table}") | none | 1 | 2.900359 | 3 | |
app/google_vision_recognizer.py | Katel212/MyPersonalKitchenBot | 0 | 6612807 | import codecs
import io
import os
import random
import re
import string
import cv2
from google.cloud import vision_v1p4beta1 as vision
from google.cloud.vision_v1p4beta1 import types
SOURCE_PATH = os.environ['SOURCE_PATH']
DICTIONARIES_PATH = os.environ['DICTIONARIES_PATH']
def generate_random_string(length):
letters = string.ascii_lowercase
rand_string = ''.join(random.choice(letters) for i in range(length))
return rand_string
def load_food_names():
names = [line.rstrip('\n\r') for line in
codecs.open(os.path.join(DICTIONARIES_PATH, "food_recognise.dict"), 'r', 'utf_8_sig')]
return names
def recognize_food(img_path, list_foods):
# Scale image
img = cv2.imread(img_path)
height, width = img.shape[:2]
img = cv2.resize(img, (800, int((height * 800) / width)))
img_path = os.path.join(SOURCE_PATH, "output.jpg")
cv2.imwrite(img_path, img)
# Recognize
client = vision.ImageAnnotatorClient()
image_context = types.ImageContext(language_hints=["ru"])
with io.open(img_path, 'rb') as image_file:
content = image_file.read()
image = vision.types.Image(content=content)
response = client.label_detection(image=image)
labels = response.label_annotations
response = client.text_detection(image=image, image_context=image_context)
texts = response.text_annotations
food_res_list = []
is_find_img = False
for label in labels:
desc = label.description.lower()
score = round(label.score, 5)
if desc in list_foods and score > 0.7:
food_res_list.append(desc)
is_find_img = True
for text in texts:
for line in list_foods:
if re.fullmatch(r'.?' + line + r'.?', text.description.lower()):
food_res_list.append(line)
if is_find_img:
eng_rus_dict = codecs.open(os.path.join(DICTIONARIES_PATH, "rec_eng_rus.dict"), 'r', 'utf_8_sig')
lines = eng_rus_dict.readlines()
for line in lines:
pair = line.split(':')
for i in range(len(food_res_list)):
if food_res_list[i] == pair[0]:
food_res_list[i] = pair[1]
food_res_list = [line.rstrip() for line in food_res_list]
for i in range(len(food_res_list)):
food_res_list[i] = food_res_list[i].title()
os.remove(img_path)
return food_res_list
def recognize_check(img_path, list_foods):
# Scale image
img = cv2.imread(img_path)
height, width = img.shape[:2]
img = cv2.resize(img, (800, int((height * 800) / width)))
img_path = os.path.join(SOURCE_PATH, "output.jpg")
cv2.imwrite(img_path, img)
# Recognize
client = vision.ImageAnnotatorClient()
image_context = types.ImageContext(language_hints=["ru"])
with io.open(img_path, 'rb') as image_file:
content = image_file.read()
image = vision.types.Image(content=content)
response = client.text_detection(image=image, image_context=image_context)
texts = response.text_annotations
food_res_list = []
for text in texts:
for line in list_foods:
if re.fullmatch(r'.?' + line + r'.?', text.description.lower()):
food_res_list.append(line)
food_res_list = [line.rstrip() for line in food_res_list]
for i in range(len(food_res_list)):
food_res_list[i] = food_res_list[i].title()
os.remove(img_path)
return food_res_list
| import codecs
import io
import os
import random
import re
import string
import cv2
from google.cloud import vision_v1p4beta1 as vision
from google.cloud.vision_v1p4beta1 import types
SOURCE_PATH = os.environ['SOURCE_PATH']
DICTIONARIES_PATH = os.environ['DICTIONARIES_PATH']
def generate_random_string(length):
letters = string.ascii_lowercase
rand_string = ''.join(random.choice(letters) for i in range(length))
return rand_string
def load_food_names():
names = [line.rstrip('\n\r') for line in
codecs.open(os.path.join(DICTIONARIES_PATH, "food_recognise.dict"), 'r', 'utf_8_sig')]
return names
def recognize_food(img_path, list_foods):
# Scale image
img = cv2.imread(img_path)
height, width = img.shape[:2]
img = cv2.resize(img, (800, int((height * 800) / width)))
img_path = os.path.join(SOURCE_PATH, "output.jpg")
cv2.imwrite(img_path, img)
# Recognize
client = vision.ImageAnnotatorClient()
image_context = types.ImageContext(language_hints=["ru"])
with io.open(img_path, 'rb') as image_file:
content = image_file.read()
image = vision.types.Image(content=content)
response = client.label_detection(image=image)
labels = response.label_annotations
response = client.text_detection(image=image, image_context=image_context)
texts = response.text_annotations
food_res_list = []
is_find_img = False
for label in labels:
desc = label.description.lower()
score = round(label.score, 5)
if desc in list_foods and score > 0.7:
food_res_list.append(desc)
is_find_img = True
for text in texts:
for line in list_foods:
if re.fullmatch(r'.?' + line + r'.?', text.description.lower()):
food_res_list.append(line)
if is_find_img:
eng_rus_dict = codecs.open(os.path.join(DICTIONARIES_PATH, "rec_eng_rus.dict"), 'r', 'utf_8_sig')
lines = eng_rus_dict.readlines()
for line in lines:
pair = line.split(':')
for i in range(len(food_res_list)):
if food_res_list[i] == pair[0]:
food_res_list[i] = pair[1]
food_res_list = [line.rstrip() for line in food_res_list]
for i in range(len(food_res_list)):
food_res_list[i] = food_res_list[i].title()
os.remove(img_path)
return food_res_list
def recognize_check(img_path, list_foods):
# Scale image
img = cv2.imread(img_path)
height, width = img.shape[:2]
img = cv2.resize(img, (800, int((height * 800) / width)))
img_path = os.path.join(SOURCE_PATH, "output.jpg")
cv2.imwrite(img_path, img)
# Recognize
client = vision.ImageAnnotatorClient()
image_context = types.ImageContext(language_hints=["ru"])
with io.open(img_path, 'rb') as image_file:
content = image_file.read()
image = vision.types.Image(content=content)
response = client.text_detection(image=image, image_context=image_context)
texts = response.text_annotations
food_res_list = []
for text in texts:
for line in list_foods:
if re.fullmatch(r'.?' + line + r'.?', text.description.lower()):
food_res_list.append(line)
food_res_list = [line.rstrip() for line in food_res_list]
for i in range(len(food_res_list)):
food_res_list[i] = food_res_list[i].title()
os.remove(img_path)
return food_res_list
| en | 0.728652 | # Scale image # Recognize # Scale image # Recognize | 2.574511 | 3 |
lapillaga_frappe/lapillaga_frappe/doctype/education/test_education.py | lapillaga/lapillaga_frappe | 0 | 6612808 | # Copyright (c) 2021, <NAME> and Contributors
# See license.txt
# import frappe
import unittest
class TestEducation(unittest.TestCase):
pass
| # Copyright (c) 2021, <NAME> and Contributors
# See license.txt
# import frappe
import unittest
class TestEducation(unittest.TestCase):
pass
| en | 0.613455 | # Copyright (c) 2021, <NAME> and Contributors # See license.txt # import frappe | 1.181452 | 1 |
tests/audio/test_tts_yandex.py | osmr/tgchatbot | 1 | 6612809 | <reponame>osmr/tgchatbot
from tgchatbot.audio.tts_yandex import TtsYandex
import pytest
@pytest.mark.parametrize("lang", ["en", "ru", "tr"])
def test_tts_yandex(lang, pytestconfig):
oauth_token = pytestconfig.getoption("yandex_oauth_token")
folder_id = pytestconfig.getoption("yandex_folder_id")
iam_token = pytestconfig.getoption("yandex_iam_token")
if ((oauth_token is not None) or (iam_token is not None)) and (folder_id is not None):
test_dict = {
"en": "Hello",
"ru": "Привет",
"tr": "Merhaba",
}
text = test_dict[lang]
model = TtsYandex(
lang=lang,
oauth_token=oauth_token,
iam_token=iam_token,
folder_id=folder_id)
audio_data = model(text)
assert (len(audio_data) > 0)
| from tgchatbot.audio.tts_yandex import TtsYandex
import pytest
@pytest.mark.parametrize("lang", ["en", "ru", "tr"])
def test_tts_yandex(lang, pytestconfig):
oauth_token = pytestconfig.getoption("yandex_oauth_token")
folder_id = pytestconfig.getoption("yandex_folder_id")
iam_token = pytestconfig.getoption("yandex_iam_token")
if ((oauth_token is not None) or (iam_token is not None)) and (folder_id is not None):
test_dict = {
"en": "Hello",
"ru": "Привет",
"tr": "Merhaba",
}
text = test_dict[lang]
model = TtsYandex(
lang=lang,
oauth_token=oauth_token,
iam_token=iam_token,
folder_id=folder_id)
audio_data = model(text)
assert (len(audio_data) > 0) | none | 1 | 2.345407 | 2 | |
dwave/cloud/api/constants.py | stjordanis/dwave-cloud-client | 0 | 6612810 | <reponame>stjordanis/dwave-cloud-client
# Copyright 2021 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
# Default SAPI endpoint
DEFAULT_API_ENDPOINT = 'https://cloud.dwavesys.com/sapi/'
class ProblemStatus(str, enum.Enum):
"""Solver API problem status values.
Initially a problem is in the PENDING state. When the D-Wave system starts
to process a problem, its state changes to IN_PROGRESS. After completion,
the problem status changes to either COMPLETED or FAILED (if an error
occurred). COMPLETED, FAILED, and CANCELLED are all terminal states.
After a problem enters a terminal state, its status does not change. Users
can cancel a problem at any time before it reaches its terminal state.
"""
PENDING = "PENDING"
IN_PROGRESS = "IN_PROGRESS"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
CANCELLED = "CANCELLED"
class ProblemEncodingFormat(str, enum.Enum):
QP = "qp"
BQ = "bq" # deprecated for submission
REF = "ref"
class AnswerEncodingFormat(str, enum.Enum):
QP = "qp"
BQ = "bq" # dimod (de-)serialization-based
class ProblemType(str, enum.Enum):
ISING = "ising"
QUBO = "qubo"
BQM = "bqm"
DQM = "dqm"
| # Copyright 2021 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
# Default SAPI endpoint
DEFAULT_API_ENDPOINT = 'https://cloud.dwavesys.com/sapi/'
class ProblemStatus(str, enum.Enum):
"""Solver API problem status values.
Initially a problem is in the PENDING state. When the D-Wave system starts
to process a problem, its state changes to IN_PROGRESS. After completion,
the problem status changes to either COMPLETED or FAILED (if an error
occurred). COMPLETED, FAILED, and CANCELLED are all terminal states.
After a problem enters a terminal state, its status does not change. Users
can cancel a problem at any time before it reaches its terminal state.
"""
PENDING = "PENDING"
IN_PROGRESS = "IN_PROGRESS"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
CANCELLED = "CANCELLED"
class ProblemEncodingFormat(str, enum.Enum):
QP = "qp"
BQ = "bq" # deprecated for submission
REF = "ref"
class AnswerEncodingFormat(str, enum.Enum):
QP = "qp"
BQ = "bq" # dimod (de-)serialization-based
class ProblemType(str, enum.Enum):
ISING = "ising"
QUBO = "qubo"
BQM = "bqm"
DQM = "dqm" | en | 0.858782 | # Copyright 2021 D-Wave Systems Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Default SAPI endpoint Solver API problem status values. Initially a problem is in the PENDING state. When the D-Wave system starts to process a problem, its state changes to IN_PROGRESS. After completion, the problem status changes to either COMPLETED or FAILED (if an error occurred). COMPLETED, FAILED, and CANCELLED are all terminal states. After a problem enters a terminal state, its status does not change. Users can cancel a problem at any time before it reaches its terminal state. # deprecated for submission # dimod (de-)serialization-based | 2.085564 | 2 |
pyansiwrapper/runner.py | stonelake/pyansiwrapper | 0 | 6612811 | <filename>pyansiwrapper/runner.py
from pyansiwrapper.core.task_executor import TaskExecutor
class Task(object):
"""
Describes a task.
"""
def __init__(self, module_name):
self.module_name = module_name
self.module_args = None
self.register_var = None
@classmethod
def module(cls, name):
"""
Creates task with module.
"""
return cls(name)
def args(self, module_args):
"""
Adds module arguments
Can be a dict as well as the psoitional argument (value)
"""
self.module_args = module_args
return self
def register(self, var):
"""
Registers variable.
"""
self.register_var = var
return self
def to_task_dict(self):
"""
Converts task to the dict.
"""
return dict(action=dict(module=self.module_name,
args=self.module_args or ()),
register=self.register_var)
class AdHoc(object):
"""
Run several tasks in a row.
"""
def __init__(self, inventory_file=None):
self.inventory_file = inventory_file
self.verbose = 4
self.hosts_names = None
self._tasks = []
def hosts(self, hosts):
"""
Specifies the hosts pattern to run tasks on.
"""
self.hosts_names = hosts
return self
def task(self, task):
"""
Adds new task to the play
"""
self._tasks.append(task.to_task_dict())
return self
def run(self):
"""
Starts the play.
"""
executor = TaskExecutor(self.hosts_names,
inventory_file=self.inventory_file,
verbose=self.verbose)
executor.add_tasks(self._tasks)
executor.run()
class Playbook(object):
"""
The playbook runner.
"""
pass
| <filename>pyansiwrapper/runner.py
from pyansiwrapper.core.task_executor import TaskExecutor
class Task(object):
"""
Describes a task.
"""
def __init__(self, module_name):
self.module_name = module_name
self.module_args = None
self.register_var = None
@classmethod
def module(cls, name):
"""
Creates task with module.
"""
return cls(name)
def args(self, module_args):
"""
Adds module arguments
Can be a dict as well as the psoitional argument (value)
"""
self.module_args = module_args
return self
def register(self, var):
"""
Registers variable.
"""
self.register_var = var
return self
def to_task_dict(self):
"""
Converts task to the dict.
"""
return dict(action=dict(module=self.module_name,
args=self.module_args or ()),
register=self.register_var)
class AdHoc(object):
"""
Run several tasks in a row.
"""
def __init__(self, inventory_file=None):
self.inventory_file = inventory_file
self.verbose = 4
self.hosts_names = None
self._tasks = []
def hosts(self, hosts):
"""
Specifies the hosts pattern to run tasks on.
"""
self.hosts_names = hosts
return self
def task(self, task):
"""
Adds new task to the play
"""
self._tasks.append(task.to_task_dict())
return self
def run(self):
"""
Starts the play.
"""
executor = TaskExecutor(self.hosts_names,
inventory_file=self.inventory_file,
verbose=self.verbose)
executor.add_tasks(self._tasks)
executor.run()
class Playbook(object):
"""
The playbook runner.
"""
pass
| en | 0.820864 | Describes a task. Creates task with module. Adds module arguments Can be a dict as well as the psoitional argument (value) Registers variable. Converts task to the dict. Run several tasks in a row. Specifies the hosts pattern to run tasks on. Adds new task to the play Starts the play. The playbook runner. | 2.842739 | 3 |
genfigs.py | approbatory/rikura | 1 | 6612812 | from example import *
if False:
figure()
h = 70
rnn, costs = example(hidden=h, examples=1000, epochs=100, eta=1, rnn=None, binary=True, progress=True)
plot(costs)
title('Binary RNN with %d Hidden Units' % h)
xlabel('Epoch #')
ylabel('Discrete Error')
savefig('error_curve.svg')
figure()
hist(ravel(rnn.aux['h']))
title('Histogram of Recurrent Weight Average Values')
savefig('histogram.svg')
if True:
figure()
hids, resids = experiment()
plots(hids, resids[:,-1])
title('Residual Error for BRNN vs. Hidden Layer Size')
xlabel('Hidden layer size')
ylabel('Residual error')
savefig('residual_error.svg')
| from example import *
if False:
figure()
h = 70
rnn, costs = example(hidden=h, examples=1000, epochs=100, eta=1, rnn=None, binary=True, progress=True)
plot(costs)
title('Binary RNN with %d Hidden Units' % h)
xlabel('Epoch #')
ylabel('Discrete Error')
savefig('error_curve.svg')
figure()
hist(ravel(rnn.aux['h']))
title('Histogram of Recurrent Weight Average Values')
savefig('histogram.svg')
if True:
figure()
hids, resids = experiment()
plots(hids, resids[:,-1])
title('Residual Error for BRNN vs. Hidden Layer Size')
xlabel('Hidden layer size')
ylabel('Residual error')
savefig('residual_error.svg')
| none | 1 | 2.839485 | 3 | |
ws_sdk/tests/test_app.py | meramsey/ws-sdk | 0 | 6612813 | <filename>ws_sdk/tests/test_app.py
import json
from datetime import datetime
from unittest import TestCase
from mock import patch
import logging
from ws_sdk import ws_utilities
from ws_sdk.ws_constants import *
from ws_sdk.app import WSApp
from ws_sdk.ws_errors import *
logger = logging.getLogger(__name__)
ws_sdk_web = logging.getLogger(WSApp.__module__)
ws_sdk_web.setLevel(logging.DEBUG)
logger.setLevel(logging.DEBUG)
class TestWS(TestCase):
valid_token = "<KEY>"
def setUp(self):
logging.basicConfig(level=logging.DEBUG)
self.ws_app = WSApp(url="app", user_key=self.valid_token,
token=self.valid_token, token_type=ScopeTypes.ORGANIZATION)
def test_ws_constructor_invalid_user_key(self):
with self.assertRaises(WsSdkTokenError):
WSApp(user_key="INCORRECT", token=self.valid_token)
def test_set_token_in_body(self):
ret = self.ws_app.set_token_in_body()
self.assertEqual(ret[0], ScopeTypes.ORGANIZATION)
def test_spdx_lic_dict(self):
ret = self.ws_app.spdx_lic_dict
self.assertEqual(ret, ws_utilities.get_spdx_license_dict())
@patch('ws_sdk.app.WSApp.get_scope_type_by_token')
def test_set_token_in_body_with_token(self, mock_get_scope_type_by_token):
mock_get_scope_type_by_token.return_value = ScopeTypes.ORGANIZATION
ret = self.ws_app.set_token_in_body(token="TOKEN")
self.assertEqual(ret[0], ScopeTypes.ORGANIZATION)
def test_set_token_in_body_with_tuple(self):
ret = self.ws_app.set_token_in_body(token=(self.ws_app.token, ScopeTypes.ORGANIZATION))
self.assertEqual(ret[0], ScopeTypes.ORGANIZATION)
def test_report_metadata_decorator(self):
bin_type_ret = WSApp.get_container_vulnerability(WSApp, ReportsMetaData.REPORT_BIN_TYPE)
scope_type_ret = WSApp.get_container_vulnerability(WSApp, ReportsMetaData.REPORT_SCOPE)
self.assertEqual(bin_type_ret, "xlsx") and self.assertEqual(scope_type_ret, [ScopeTypes.ORGANIZATION])
def test_get_reports_meta_data(self):
ret = WSApp.get_reports_meta_data()
self.assertIsInstance(ret, list) and self.assertGreaterEqual(len(ret), 20)
def test_get_report_types_with_filter(self):
ret = WSApp.get_report_types(scope=ScopeTypes.ORGANIZATION)
self.assertIsInstance(ret, list) and self.assertGreaterEqual(len(ret), 17)
@patch('ws_sdk.app.requests.Session.post')
def test__call_ws_api(self, mock_post):
mock_post.return_value.status_code = 200
mock_post.return_value.text = '{"key": "val"}'
res = self.ws_app.call_ws_api("api_call")
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.json.loads')
@patch('ws_sdk.app.requests.Session.post')
def test__call_ws_api__bytes(self, mock_post, mock_json_loads):
mock_post.return_value.status_code = 200
mock_post.return_value.content = bytes()
mock_post.return_value.encoding = None
mock_json_loads.side_effect = json.JSONDecodeError(doc="DOC", pos=1, msg="Error")
res = self.ws_app.call_ws_api("api_call")
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.json.loads')
@patch('ws_sdk.app.requests.Session.post')
def test__call_ws_api__text(self, mock_post, mock_json_loads):
mock_post.return_value.status_code = 200
mock_post.return_value.encoding = 'UTF-8'
mock_post.return_value.text = "TEXT"
mock_json_loads.side_effect = json.JSONDecodeError(doc="DOC", pos=1, msg="Error")
res = self.ws_app.call_ws_api("api_call")
self.assertIsInstance(res, str)
@patch('ws_sdk.app.requests.Session.post')
def test__call_ws_api_timeout_exception(self, mock_post):
mock_post.side_effect = TimeoutError()
with self.assertRaises(TimeoutError):
self.ws_app.call_ws_api("api_call")
@patch('ws_sdk.app.WSApp.call_ws_api')
def test__generic_get(self, mock_call_ws_api):
mock_call_ws_api.return_value = []
res = self.ws_app._generic_get(token_type=self.ws_app.token_type, get_type='suffix', kv_dict={})
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_organization_details')
@patch('ws_sdk.app.WSApp.get_products')
@patch('ws_sdk.app.WSApp.get_projects')
def test_get_scopes_as_org(self, mock_get_projects, mock_get_products, mock_get_organization_details):
mock_get_projects.return_value = [{'name': "PROD_NAME", 'token': "TOKEN"}]
mock_get_products.return_value = [{'name': "PROJ_NAME", 'token': "TOKEN"}]
mock_get_organization_details.return_value = {'orgName': "ORG_NAME"}
res = self.ws_app.get_scopes(token="TOKEN")
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_name')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_scopes_as_product(self, mock_generic_get, mock_get_name):
mock_generic_get.return_value = {'projectVitals': [{}]}
mock_get_name.return_value = "PROD_NAME"
self.ws_app.token_type = ScopeTypes.PRODUCT
res = self.ws_app.get_scopes()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_alerts(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_report_on_product(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_alerts(report=True, token="PROD_TOKEN")
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_by_type(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'alerts': {}}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
from_date = datetime.now()
to_date = datetime.now()
res = self.ws_app.get_alerts(alert_type='SECURITY_VULNERABILITY', from_date=from_date, to_date=to_date)
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_alerts_by_false_type(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_alerts(alert_type='FALSE')
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_all(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.return_value = {'alerts': []}
res = self.ws_app.get_alerts()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_ignored(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.return_value = {'alerts': []}
res = self.ws_app.get_alerts(ignored=True)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_ignored_report(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.return_value = {'alerts': []}
res = self.ws_app.get_alerts(ignored=True, report=True)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_alerts')
def test_get_ignored_alerts(self, mock_get_alerts):
mock_get_alerts.return_value = []
res = self.ws_app.get_ignored_alerts()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_resolved_report(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.return_value = bytes()
res = self.ws_app.get_alerts(resolved=True, report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_alerts_just_resolved(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_alerts(resolved=True)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.get_alerts')
def test_get_resolved_alerts(self, mock_get_alerts):
mock_get_alerts.return_value = bytes()
res = self.ws_app.get_resolved_alerts(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_by_project_tag(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.return_value = {'alerts': []}
res = self.ws_app.get_alerts(tags={"key": "value"})
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_alerts_by_project_tag_product_token(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_alerts(tags={"key": "value"}, token=ScopeTypes.PRODUCT)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_alerts_by_project_2_tags(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_alerts(tags={'k1': "v2", 'k2': "v2"})
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_products(self, mock_generic_get):
mock_generic_get.return_value = {'productVitals': [{'type': ScopeTypes.PRODUCT}]}
res = self.ws_app.get_products()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_products')
def test_get_product_by_name(self, mock_get_products):
mock_get_products.return_value = {'name': "PROD_NAME", 'token': "TOKEN"}
ret_d = {'name': "PROD_NAME", 'token': "TOKEN"}
res = self.ws_app.get_products(name="PROD_NAME")
self.assertEqual(res, ret_d)
@patch('ws_sdk.app.WSApp.get_products')
def test_get_product_by_token(self, mock_get_products):
mock_get_products.return_value = {'name': "PROD_NAME", 'token': "TOKEN"}
ret_d = {'name': "PROD_NAME", 'token': "TOKEN"}
res = self.ws_app.get_products(token="TOKEN")
self.assertEqual(res, ret_d)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_projects(self, mock_generic_get):
mock_generic_get.return_value = {'productVitals': []}
res = self.ws_app.get_projects()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_projects_no_inc(self, mock_generic_get):
mock_generic_get.return_value = {'projectVitals': []}
res = self.ws_app.get_projects(include_prod_proj_names=False)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_organization_details(self, mock_generic_get):
mock_generic_get.return_value = {"orgName": "ORG_NAME", "orgToken": "ORG_TOKEN"}
res = self.ws_app.get_organization_details()
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.get_organization_details')
def test_get_name_as_org(self, mock_get_organization_details):
mock_get_organization_details.return_value = {'orgName': "ORG_NAME"}
res = self.ws_app.get_name()
self.assertIsInstance(res, str)
@patch('ws_sdk.app.WSApp.get_tags')
def test_get_name_as_prod(self, mock_get_tags):
self.ws_app.token_type = ScopeTypes.PRODUCT
mock_get_tags.return_value = [{"name": "PROD_NAME"}]
res = self.ws_app.get_name()
self.assertIsInstance(res, str)
def test_get_organization_details_not_org(self):
self.ws_app.token_type = ScopeTypes.PRODUCT
res = self.ws_app.get_organization_details()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_inventory(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory__product_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_inventory(token="PRODUCT", report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory_project(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': []}
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_inventory(token="PROJECT", include_in_house_data=False)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_inventory()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory_with_filter(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': [{'name': "FILTERED_LIB"}]}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_inventory(lib_name="FILTERED_LIB")
self.assertIsInstance(res, list) and self.assertEqual(len(res), 1)
@patch('ws_sdk.app.WSApp.get_inventory')
def test_get_lib(self, mock_get_inventory):
ret_d = {'keyUuid': "LIB_UUID"}
mock_get_inventory.return_value = [ret_d]
ret = self.ws_app.get_lib(name="LIB_UUID")
self.assertEqual(ret, ret_d)
@patch('ws_sdk.app.WSApp.get_inventory')
def test_get_lib_not_found(self, mock_get_inventory):
mock_get_inventory.return_value = []
with self.assertRaises(WsSdkServerInvalidLibName):
self.ws_app.get_lib(name="LIB_UUID")
@patch('ws_sdk.app.WSApp.get_lib')
def test_get_lib_uuid(self, mock_get_lib):
mock_get_lib.return_value = {'keyUuid': "LIB_UUID"}
ret = self.ws_app.get_lib_uuid(name="LIB_UUID")
self.assertEqual(ret, "LIB_UUID")
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory_inc_in_house(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_inventory(include_in_house_data=True)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory_with_deps(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': [{'dependencies': [{'filename': 'DEP_FILENAME-1.2.3.jar'}],
'filename': 'FILENAME-4.5.6.jar'}]}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_inventory(with_dependencies=True)
self.assertIsInstance(res, list) and self.assertEqual(len(res), 2)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_lib_dependencies(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = []
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_lib_dependencies(token="TOKEN", key_uuid="KEY_UUID")
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_scopes')
def test_get_scopes_from_name(self, mock_get_scopes):
mock_get_scopes.return_value = [{'name': "NAME", 'token': "TOKEN"}]
res = self.ws_app.get_scopes_from_name(name="NAME")
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_scopes')
def test_get_scopes_from_name_not_found(self, mock_get_scopes):
mock_get_scopes.return_value = []
res = self.ws_app.get_scopes_from_name("NAME")
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_scope_by_token')
def test_get_scope_type_by_token(self, mock_get_scope_by_token):
mock_get_scope_by_token.return_value = {'type': "TOKEN"}
res = self.ws_app.get_scope_type_by_token(token="TOKEN")
self.assertEqual(res, "TOKEN")
@patch('ws_sdk.app.WSApp.get_scope_by_token')
def test_get_scope_name_by_token(self, mock_get_scope_by_token):
mock_get_scope_by_token.return_value = {'name': "NAME"}
res = self.ws_app.get_scope_name_by_token(token="TOKEN")
self.assertEqual(res, "NAME")
@patch('ws_sdk.app.WSApp.get_scope_by_token')
def test_get_name_from_token(self, mock_get_scope_by_token):
mock_get_scope_by_token.return_value = {'name': "NAME"}
res = self.ws_app.get_scope_name_by_token(token="TOKEN")
self.assertEqual(res, "NAME")
@patch('ws_sdk.app.WSApp.get_scopes_from_name')
def test_get_tokens_from_name(self, mock_get_scopes_from_name):
mock_get_scopes_from_name.return_value = [{'name': "NAME", 'token': "TOKEN"}]
res = self.ws_app.get_tokens_from_name('NAME')
self.assertIsInstance(res, list) and self.assertDictEqual(res[0], {'name': "NAME", 'token': "TOKEN"})
@patch('ws_sdk.app.WSApp.get_projects')
@patch('ws_sdk.app.WSApp.get_products')
def test_get_scopes_by_token(self, mock_get_products, mock_get_projects):
mock_get_projects.return_value = [{'token': "TOKEN"}]
mock_get_products.return_value = []
res = self.ws_app.get_scope_by_token(token="TOKEN")
self.assertIn('token', res) and self.assertEqual(res['token'], "TOKEN")
@patch('ws_sdk.app.WSApp.get_products')
def test_get_scopes_by_token_of_product(self, mock_get_products):
mock_get_products.return_value = [{'token': "TOKEN"}]
res = self.ws_app.get_scope_by_token(token="TOKEN", token_type=ScopeTypes.PRODUCT)
self.assertIn('token', res) and self.assertEqual(res['token'], "TOKEN")
@patch('ws_sdk.app.WSApp.get_projects')
def test_get_scopes_by_token_as_product(self, mock_get_projects):
self.ws_app.token_type = ScopeTypes.PRODUCT
mock_get_projects.return_value = [{'token': "TOKEN"}]
res = self.ws_app.get_scope_by_token(token="TOKEN")
self.assertIn('token', res) and self.assertEqual(res['token'], "TOKEN")
@patch('ws_sdk.app.WSApp.get_scopes_from_name')
def test_get_token_from_name_not_found(self, mock_get_scopes_from_name):
mock_get_scopes_from_name.return_value = []
res = self.ws_app.get_tokens_from_name('NAME_NOT_FOUND')
self.assertIsInstance(res, list) and self.assertEqual(len(res), 0)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_vulnerability(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'vulnerabilities': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_vulnerability()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_vulnerability_cluster(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'vulnerabilities': []}
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_vulnerability(cluster=True, token=ScopeTypes.PRODUCT)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_vulnerability_cluster_as_org(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'vulnerabilities': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_vulnerability(cluster=True)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_vulnerability_report_xlsx_of_product(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_vulnerability(token="PRODUCT", report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.get_vulnerability')
def test_get_vulnerabilities_per_lib(self, mock_get_vulnerability):
mock_get_vulnerability.return_value = []
res = self.ws_app.get_vulnerabilities_per_lib()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_change_log(self, mock_generic_get):
mock_generic_get.return_value = {'changes': []}
res = self.ws_app.get_change_log()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_change_log_start_date(self, mock_generic_get):
mock_generic_get.return_value = {'changes': []}
res = self.ws_app.get_change_log(start_date=datetime.now())
self.assertIsInstance(res, list)
@patch('ws_sdk.ws_constants.ENTITY_TYPES')
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_assignments(self, mock_generic_get, mock_set_token_in_body, mock_entity_types):
mock_generic_get.return_value = {}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_entity_types.return_value = {}
res = self.ws_app.get_user_group_assignments(entity_type=USERS,
role_type=RoleTypes.PRODUCT_INTEGRATOR)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_assignments_project(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {}
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_user_group_assignments()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_risk(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_risk()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_risk_project(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_risk()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_due_diligence(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_due_diligence()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_attributes(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_attributes()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_attributes__project(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_attributes()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_licenses(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_licenses(full_spdx=True)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_source_files(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'sourceFiles': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_source_files()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_source_files')
def test_get_source_file_inventory(self, mock_get_source_files):
mock_get_source_files.return_value = bytes()
res = self.ws_app.get_source_file_inventory()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_source_files_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_source_files(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.get_in_house_libraries')
def test_get_in_house(self, mock_get_in_house_libraries):
mock_get_in_house_libraries.return_value = bytes()
res = self.ws_app.get_in_house()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_in_house_libraries(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_in_house_libraries()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_in_house_libraries_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_in_house_libraries(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_library_location(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_library_location()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_library_location_on_project(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraryLocations': []}
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_library_location()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_license_compatibility_org_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_license_compatibility(report=True)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_license_compatibility(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_license_compatibility()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_license_compatibility_report_prod(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_license_compatibility(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_license_compatibility_org(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_license_compatibility()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_licenses_histogram(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'licenseHistogram': {}}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_licenses(histogram=True)
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_attribution(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = dict()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_attribution(reporting_aggregation_mode="BY_COMPONENT", token="TOKEN")
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_attribution_bin(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_attribution(reporting_aggregation_mode="BY_COMPONENT", token="TOKEN", report=True, export_format="TXT")
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_attribution_on_org(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_attribution(reporting_aggregation_mode="BY_COMPONENT", token="TOKEN")
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_effective_licenses(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_effective_licenses()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_effective_licenses_project(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_effective_licenses()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_bugs(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_bugs()
self.assertIsInstance(res, bytes)
def test_get_bugs_not_report(self):
res = self.ws_app.get_bugs(report=False)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_request_history(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_request_history()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_request_history_plugin(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_request_history(plugin=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_request_history_plugin_project(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_request_history(plugin=True)
self.assertIs(res, None)
def test_get_request_history_not_report(self):
res = self.ws_app.get_request_history(report=False)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.get_projects')
def test_get_project(self, mock_get_projects):
mock_get_projects.return_value = [{'token': "TOKEN"}]
res = self.ws_app.get_project(token="TOKEN")
self.assertEqual(res['token'], "TOKEN")
@patch('ws_sdk.app.WSApp.get_projects')
def test_get_project_not_found(self, mock_get_projects):
mock_get_projects.return_value = [{'token': "TOKEN"}]
with self.assertRaises(WsSdkServerMissingTokenError):
res = self.ws_app.get_project(token="NOT_FOUND")
@patch('ws_sdk.app.WSApp.get_scope_by_token')
def test_get_product_of_project(self, mock_get_scope_by_token):
mock_get_scope_by_token.return_value = {'token': "TOKEN",
'productToken': "PRODUCTTOKEN",
'type': PROJECT}
res = self.ws_app.get_product_of_project(token="TOKEN")
self.assertEqual(res['token'], "TOKEN")
@patch('ws_sdk.app.WSApp.get_scope_name_by_token')
@patch('ws_sdk.app.WSApp.call_ws_api')
@patch('ws_sdk.app.WSApp.get_project')
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_delete_scope(self, mock_set_token_in_body, mock_get_project, mock_call_ws_api,
mock_get_scope_name_by_token):
mock_set_token_in_body.return_value = (PROJECT, {})
mock_get_project.return_value = {'token': "TOKEN", 'productToken': "PROD_TOKEN"}
mock_call_ws_api.return_value = {}
mock_get_scope_name_by_token.return_value = "PROJECT_NAME"
res = self.ws_app.delete_scope(token="TOKEN")
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_users(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'users': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_users()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_users_as_product(self, mock_set_token_in_body):
self.ws_app.token_type = ScopeTypes.PRODUCT
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_users()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_get_libraries(self, mock_call_ws_api):
mock_call_ws_api.return_value = {'libraries': []}
res = self.ws_app.get_libraries(search_value="LIB_NAME", version="VERSION", search_only_name=True)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_inventory')
def test_get_libraries_not_global(self, mock_get_inventory):
mock_get_inventory.return_value = []
res = self.ws_app.get_libraries(search_value="LIB_NAME", global_search=False)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_library_detailed(self, mock_generic_get):
mock_generic_get.return_value = {"librariesInformation": []}
res = self.ws_app.get_library_details(name="NAME", lib_type="Source Library", version="VERSION", languages=["java"])
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_tags_as_org(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.side_effect = [{'productTags': []}, {'projectTags': []}]
res = self.ws_app.get_tags()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_tags_as_prod(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'projectTags': []}
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_tags()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_set_alerts_status(self, mock_call_ws_api, mock_set_token_in_body):
mock_call_ws_api.return_value = {}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.set_alerts_status(alert_uuids="UUID", status=AlertStatus.AL_STATUS_IGNORED)
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_set_alerts_status_no_uuids(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
with self.assertLogs(level='INFO') as cm:
self.ws_app.set_alerts_status(alert_uuids=[], status=AlertStatus.AL_STATUS_ACTIVE)
self.assertEqual(cm.output, ["ERROR:ws_sdk.app:At least 1 alert uuid must be provided"])
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_set_alerts_status_invalid_status(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
with self.assertLogs(level='INFO') as cm:
self.ws_app.set_alerts_status(alert_uuids=["UUID"], status="INVALID")
self.assertEqual(cm.output, ['ERROR:ws_sdk.app:INVALID status is invalid. Must be \"Ignored\" or \"Active\"'])
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_lib_notice(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = "TEXT"
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_lib_notice(as_text=True)
self.assertEqual(res, "TEXT")
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_lib_notice_not_product(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
with self.assertRaises(WsSdkServerTokenTypeError):
self.ws_app.get_lib_notice()
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_set_lib_notice(self, mock_call_ws_api):
mock_call_ws_api.return_value = []
res = self.ws_app.set_lib_notice(lib_uuid='LIB_UUID', text=[{"k1": "v1", "k2": "v2"}, {"k1": "v1", "k2": "v2"}],
reference='REFERENCE')
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_policies(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'policies': [{'policyContext': 'DOMAIN'}]}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_policies()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.call_ws_api')
@patch('ws_sdk.app.WSApp.get_users')
def test_create_user(self, mock_get_users, mock_call_ws_api):
mock_get_users.return_value = []
mock_call_ws_api.return_value = {}
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.create_user(name="NAME", email="<EMAIL>", inviter_email="<EMAIL>")
self.assertEqual(cm.output, ["DEBUG:ws_sdk.app:Token: 'None' is a organization",
"DEBUG:ws_sdk.app:Creating User: NAME email : <EMAIL> with Inviter email: <EMAIL>"])
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_create_product(self, mock_call_ws_api):
mock_call_ws_api.return_value = {}
ret = self.ws_app.create_product(name="NEW_PRODUCT")
self.assertEqual(ret, {})
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_create_project(self, mock_call_ws_api, mock_set_token_in_body):
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
mock_call_ws_api.return_value = {}
ret = self.ws_app.create_project(name="NEW_PROJECT", product_token="PROD_TOKEN")
self.assertEqual(ret, {})
def test_create_project_prod_token_and_name(self):
with self.assertLogs(level='DEBUG') as cm:
ret = self.ws_app.create_project(name="NEW_PROJECT", product_token="TOKEN", product_name="NAME")
self.assertEqual(cm.output, ["ERROR:ws_sdk.app:Unable to create project: 'NEW_PROJECT'. Only project token or project name is allowed"])
def test_create_project_no_prod_token(self):
with self.assertLogs(level='DEBUG') as cm:
ret = self.ws_app.create_project(name="NEW_PROJECT")
self.assertEqual(cm.output, ["ERROR:ws_sdk.app:Unable to create project: 'NEW_PROJECT'. Missing product value"])
@patch('ws_sdk.app.WSApp.call_ws_api')
@patch('ws_sdk.app.WSApp.get_users')
def test_delete_user(self, mock_get_users, mock_call_ws_api):
mock_get_users.return_value = [{"name": "USERNAME"}]
mock_call_ws_api.return_value = {}
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.delete_user(email="<EMAIL>")
self.assertEqual(cm.output, [
f"DEBUG:ws_sdk.app:Deleting user email: <EMAIL> from Organization Token: {self.ws_app.token}"])
@patch('ws_sdk.app.WSApp.call_ws_api')
@patch('ws_sdk.app.WSApp.get_groups')
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_create_group(self, mock_set_token_in_body, mock_get_groups, mock_call_ws_api):
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
mock_get_groups.return_value = []
mock_call_ws_api.return_value = {}
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.create_group(name="GRP_NAME")
self.assertEqual(cm.output, [f"DEBUG:ws_sdk.app:Creating Group: GRP_NAME"])
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp.get_users')
@patch('ws_sdk.app.WSApp.get_groups')
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_assign_user_to_group(self, mock_call_ws_api, mock_get_groups, mock_get_users, mock_set_token_in_body):
mock_call_ws_api.return_value = []
mock_get_groups.side_effect = [[{"name": "GRP_NAME"}], []]
mock_get_users.return_value = [{"name": "USERNAME"}]
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.assign_user_to_group(user_email="EMAIL", group_name="GRP_NAME")
self.assertEqual(cm.output, ["DEBUG:ws_sdk.app:Assigning user's Email: EMAIL to Group: GRP_NAME"])
@patch('ws_sdk.app.WSApp.get_groups')
@patch('ws_sdk.app.WSApp._generic_set')
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_assign_to_scope(self, mock_set_token_in_body, mock_generic_set, mock_get_groups):
mock_generic_set.return_value = []
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
mock_get_groups.side_effect = [[{"name": "GRP_NAME"}], []]
group_name = "GRP_NAME"
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.assign_to_scope(role_type=RoleTypes.P_INTEGRATORS, group=group_name)
self.assertEqual(cm.output, [
f"DEBUG:ws_sdk.app:Assigning User(s): None Group(s): {group_name} to Role: {RoleTypes.P_INTEGRATORS}"])
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_invite_user_to_web_advisor(self, mock_call_ws_api):
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.invite_user_to_web_advisor(user_email="<EMAIL>")
self.assertEqual(cm.output, ["DEBUG:ws_sdk.app:Token: 'None' is a organization",
"DEBUG:ws_sdk.app:Inviting email: '<EMAIL>' to Web Advisor"])
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_regenerate_service_user_key(self, mock_call_ws_api):
mock_call_ws_api.return_value = {'userToken': self.valid_token}
res = self.ws_app.regenerate_service_user_key(service_user_key=self.valid_token)
self.assertEqual(res, self.valid_token)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_integration_token(self, mock_generic_get):
mock_generic_get.return_value = self.valid_token
ret = self.ws_app.get_integration_token(integration_type=IntegrationTypes.INT_1)
self.assertEqual(ret, self.valid_token)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_last_scan_process_status(self, mock_generic_get):
mock_generic_get.return_value = {'requestState': "FINISHED"}
ret = self.ws_app.get_last_scan_process_status(request_token="<KEY>")
self.assertEqual(ret, "FINISHED")
@patch('ws_sdk.app.WSApp.call_ws_api')
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_change_origin_of_source_lib(self, mock_set_token_in_body, mock_call_api):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.change_origin_of_source_lib(lib_uuid="LIB_UUID", source_files_sha1=["SHA1_1", "SHA1_2"])
self.assertEqual(cm.output, ["DEBUG:ws_sdk.app:Changing original of source library: 'LIB_UUID'"])
def test_generate_whitesource_url(self):
id = 42
type = 'project'
ret = self.ws_app.generate_whitesource_url(id, type)
ret_d = f"{self.ws_app.url}/Wss/WSS.html/Wss/WSS.html#!{type};id={id}"
self.assertEqual(ret, ret_d)
if __name__ == '__main__':
TestCase.unittest.main()
| <filename>ws_sdk/tests/test_app.py
import json
from datetime import datetime
from unittest import TestCase
from mock import patch
import logging
from ws_sdk import ws_utilities
from ws_sdk.ws_constants import *
from ws_sdk.app import WSApp
from ws_sdk.ws_errors import *
logger = logging.getLogger(__name__)
ws_sdk_web = logging.getLogger(WSApp.__module__)
ws_sdk_web.setLevel(logging.DEBUG)
logger.setLevel(logging.DEBUG)
class TestWS(TestCase):
valid_token = "<KEY>"
def setUp(self):
logging.basicConfig(level=logging.DEBUG)
self.ws_app = WSApp(url="app", user_key=self.valid_token,
token=self.valid_token, token_type=ScopeTypes.ORGANIZATION)
def test_ws_constructor_invalid_user_key(self):
with self.assertRaises(WsSdkTokenError):
WSApp(user_key="INCORRECT", token=self.valid_token)
def test_set_token_in_body(self):
ret = self.ws_app.set_token_in_body()
self.assertEqual(ret[0], ScopeTypes.ORGANIZATION)
def test_spdx_lic_dict(self):
ret = self.ws_app.spdx_lic_dict
self.assertEqual(ret, ws_utilities.get_spdx_license_dict())
@patch('ws_sdk.app.WSApp.get_scope_type_by_token')
def test_set_token_in_body_with_token(self, mock_get_scope_type_by_token):
mock_get_scope_type_by_token.return_value = ScopeTypes.ORGANIZATION
ret = self.ws_app.set_token_in_body(token="TOKEN")
self.assertEqual(ret[0], ScopeTypes.ORGANIZATION)
def test_set_token_in_body_with_tuple(self):
ret = self.ws_app.set_token_in_body(token=(self.ws_app.token, ScopeTypes.ORGANIZATION))
self.assertEqual(ret[0], ScopeTypes.ORGANIZATION)
def test_report_metadata_decorator(self):
bin_type_ret = WSApp.get_container_vulnerability(WSApp, ReportsMetaData.REPORT_BIN_TYPE)
scope_type_ret = WSApp.get_container_vulnerability(WSApp, ReportsMetaData.REPORT_SCOPE)
self.assertEqual(bin_type_ret, "xlsx") and self.assertEqual(scope_type_ret, [ScopeTypes.ORGANIZATION])
def test_get_reports_meta_data(self):
ret = WSApp.get_reports_meta_data()
self.assertIsInstance(ret, list) and self.assertGreaterEqual(len(ret), 20)
def test_get_report_types_with_filter(self):
ret = WSApp.get_report_types(scope=ScopeTypes.ORGANIZATION)
self.assertIsInstance(ret, list) and self.assertGreaterEqual(len(ret), 17)
@patch('ws_sdk.app.requests.Session.post')
def test__call_ws_api(self, mock_post):
mock_post.return_value.status_code = 200
mock_post.return_value.text = '{"key": "val"}'
res = self.ws_app.call_ws_api("api_call")
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.json.loads')
@patch('ws_sdk.app.requests.Session.post')
def test__call_ws_api__bytes(self, mock_post, mock_json_loads):
mock_post.return_value.status_code = 200
mock_post.return_value.content = bytes()
mock_post.return_value.encoding = None
mock_json_loads.side_effect = json.JSONDecodeError(doc="DOC", pos=1, msg="Error")
res = self.ws_app.call_ws_api("api_call")
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.json.loads')
@patch('ws_sdk.app.requests.Session.post')
def test__call_ws_api__text(self, mock_post, mock_json_loads):
mock_post.return_value.status_code = 200
mock_post.return_value.encoding = 'UTF-8'
mock_post.return_value.text = "TEXT"
mock_json_loads.side_effect = json.JSONDecodeError(doc="DOC", pos=1, msg="Error")
res = self.ws_app.call_ws_api("api_call")
self.assertIsInstance(res, str)
@patch('ws_sdk.app.requests.Session.post')
def test__call_ws_api_timeout_exception(self, mock_post):
mock_post.side_effect = TimeoutError()
with self.assertRaises(TimeoutError):
self.ws_app.call_ws_api("api_call")
@patch('ws_sdk.app.WSApp.call_ws_api')
def test__generic_get(self, mock_call_ws_api):
mock_call_ws_api.return_value = []
res = self.ws_app._generic_get(token_type=self.ws_app.token_type, get_type='suffix', kv_dict={})
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_organization_details')
@patch('ws_sdk.app.WSApp.get_products')
@patch('ws_sdk.app.WSApp.get_projects')
def test_get_scopes_as_org(self, mock_get_projects, mock_get_products, mock_get_organization_details):
mock_get_projects.return_value = [{'name': "PROD_NAME", 'token': "TOKEN"}]
mock_get_products.return_value = [{'name': "PROJ_NAME", 'token': "TOKEN"}]
mock_get_organization_details.return_value = {'orgName': "ORG_NAME"}
res = self.ws_app.get_scopes(token="TOKEN")
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_name')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_scopes_as_product(self, mock_generic_get, mock_get_name):
mock_generic_get.return_value = {'projectVitals': [{}]}
mock_get_name.return_value = "PROD_NAME"
self.ws_app.token_type = ScopeTypes.PRODUCT
res = self.ws_app.get_scopes()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_alerts(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_report_on_product(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_alerts(report=True, token="PROD_TOKEN")
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_by_type(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'alerts': {}}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
from_date = datetime.now()
to_date = datetime.now()
res = self.ws_app.get_alerts(alert_type='SECURITY_VULNERABILITY', from_date=from_date, to_date=to_date)
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_alerts_by_false_type(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_alerts(alert_type='FALSE')
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_all(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.return_value = {'alerts': []}
res = self.ws_app.get_alerts()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_ignored(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.return_value = {'alerts': []}
res = self.ws_app.get_alerts(ignored=True)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_ignored_report(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.return_value = {'alerts': []}
res = self.ws_app.get_alerts(ignored=True, report=True)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_alerts')
def test_get_ignored_alerts(self, mock_get_alerts):
mock_get_alerts.return_value = []
res = self.ws_app.get_ignored_alerts()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_resolved_report(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.return_value = bytes()
res = self.ws_app.get_alerts(resolved=True, report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_alerts_just_resolved(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_alerts(resolved=True)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.get_alerts')
def test_get_resolved_alerts(self, mock_get_alerts):
mock_get_alerts.return_value = bytes()
res = self.ws_app.get_resolved_alerts(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_alerts_by_project_tag(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.return_value = {'alerts': []}
res = self.ws_app.get_alerts(tags={"key": "value"})
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_alerts_by_project_tag_product_token(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_alerts(tags={"key": "value"}, token=ScopeTypes.PRODUCT)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_alerts_by_project_2_tags(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_alerts(tags={'k1': "v2", 'k2': "v2"})
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_products(self, mock_generic_get):
mock_generic_get.return_value = {'productVitals': [{'type': ScopeTypes.PRODUCT}]}
res = self.ws_app.get_products()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_products')
def test_get_product_by_name(self, mock_get_products):
mock_get_products.return_value = {'name': "PROD_NAME", 'token': "TOKEN"}
ret_d = {'name': "PROD_NAME", 'token': "TOKEN"}
res = self.ws_app.get_products(name="PROD_NAME")
self.assertEqual(res, ret_d)
@patch('ws_sdk.app.WSApp.get_products')
def test_get_product_by_token(self, mock_get_products):
mock_get_products.return_value = {'name': "PROD_NAME", 'token': "TOKEN"}
ret_d = {'name': "PROD_NAME", 'token': "TOKEN"}
res = self.ws_app.get_products(token="TOKEN")
self.assertEqual(res, ret_d)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_projects(self, mock_generic_get):
mock_generic_get.return_value = {'productVitals': []}
res = self.ws_app.get_projects()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_projects_no_inc(self, mock_generic_get):
mock_generic_get.return_value = {'projectVitals': []}
res = self.ws_app.get_projects(include_prod_proj_names=False)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_organization_details(self, mock_generic_get):
mock_generic_get.return_value = {"orgName": "ORG_NAME", "orgToken": "ORG_TOKEN"}
res = self.ws_app.get_organization_details()
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.get_organization_details')
def test_get_name_as_org(self, mock_get_organization_details):
mock_get_organization_details.return_value = {'orgName': "ORG_NAME"}
res = self.ws_app.get_name()
self.assertIsInstance(res, str)
@patch('ws_sdk.app.WSApp.get_tags')
def test_get_name_as_prod(self, mock_get_tags):
self.ws_app.token_type = ScopeTypes.PRODUCT
mock_get_tags.return_value = [{"name": "PROD_NAME"}]
res = self.ws_app.get_name()
self.assertIsInstance(res, str)
def test_get_organization_details_not_org(self):
self.ws_app.token_type = ScopeTypes.PRODUCT
res = self.ws_app.get_organization_details()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_inventory(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory__product_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_inventory(token="PRODUCT", report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory_project(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': []}
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_inventory(token="PROJECT", include_in_house_data=False)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_inventory()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory_with_filter(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': [{'name': "FILTERED_LIB"}]}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_inventory(lib_name="FILTERED_LIB")
self.assertIsInstance(res, list) and self.assertEqual(len(res), 1)
@patch('ws_sdk.app.WSApp.get_inventory')
def test_get_lib(self, mock_get_inventory):
ret_d = {'keyUuid': "LIB_UUID"}
mock_get_inventory.return_value = [ret_d]
ret = self.ws_app.get_lib(name="LIB_UUID")
self.assertEqual(ret, ret_d)
@patch('ws_sdk.app.WSApp.get_inventory')
def test_get_lib_not_found(self, mock_get_inventory):
mock_get_inventory.return_value = []
with self.assertRaises(WsSdkServerInvalidLibName):
self.ws_app.get_lib(name="LIB_UUID")
@patch('ws_sdk.app.WSApp.get_lib')
def test_get_lib_uuid(self, mock_get_lib):
mock_get_lib.return_value = {'keyUuid': "LIB_UUID"}
ret = self.ws_app.get_lib_uuid(name="LIB_UUID")
self.assertEqual(ret, "LIB_UUID")
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory_inc_in_house(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_inventory(include_in_house_data=True)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_inventory_with_deps(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': [{'dependencies': [{'filename': 'DEP_FILENAME-1.2.3.jar'}],
'filename': 'FILENAME-4.5.6.jar'}]}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_inventory(with_dependencies=True)
self.assertIsInstance(res, list) and self.assertEqual(len(res), 2)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_lib_dependencies(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = []
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_lib_dependencies(token="TOKEN", key_uuid="KEY_UUID")
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_scopes')
def test_get_scopes_from_name(self, mock_get_scopes):
mock_get_scopes.return_value = [{'name': "NAME", 'token': "TOKEN"}]
res = self.ws_app.get_scopes_from_name(name="NAME")
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_scopes')
def test_get_scopes_from_name_not_found(self, mock_get_scopes):
mock_get_scopes.return_value = []
res = self.ws_app.get_scopes_from_name("NAME")
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_scope_by_token')
def test_get_scope_type_by_token(self, mock_get_scope_by_token):
mock_get_scope_by_token.return_value = {'type': "TOKEN"}
res = self.ws_app.get_scope_type_by_token(token="TOKEN")
self.assertEqual(res, "TOKEN")
@patch('ws_sdk.app.WSApp.get_scope_by_token')
def test_get_scope_name_by_token(self, mock_get_scope_by_token):
mock_get_scope_by_token.return_value = {'name': "NAME"}
res = self.ws_app.get_scope_name_by_token(token="TOKEN")
self.assertEqual(res, "NAME")
@patch('ws_sdk.app.WSApp.get_scope_by_token')
def test_get_name_from_token(self, mock_get_scope_by_token):
mock_get_scope_by_token.return_value = {'name': "NAME"}
res = self.ws_app.get_scope_name_by_token(token="TOKEN")
self.assertEqual(res, "NAME")
@patch('ws_sdk.app.WSApp.get_scopes_from_name')
def test_get_tokens_from_name(self, mock_get_scopes_from_name):
mock_get_scopes_from_name.return_value = [{'name': "NAME", 'token': "TOKEN"}]
res = self.ws_app.get_tokens_from_name('NAME')
self.assertIsInstance(res, list) and self.assertDictEqual(res[0], {'name': "NAME", 'token': "TOKEN"})
@patch('ws_sdk.app.WSApp.get_projects')
@patch('ws_sdk.app.WSApp.get_products')
def test_get_scopes_by_token(self, mock_get_products, mock_get_projects):
mock_get_projects.return_value = [{'token': "TOKEN"}]
mock_get_products.return_value = []
res = self.ws_app.get_scope_by_token(token="TOKEN")
self.assertIn('token', res) and self.assertEqual(res['token'], "TOKEN")
@patch('ws_sdk.app.WSApp.get_products')
def test_get_scopes_by_token_of_product(self, mock_get_products):
mock_get_products.return_value = [{'token': "TOKEN"}]
res = self.ws_app.get_scope_by_token(token="TOKEN", token_type=ScopeTypes.PRODUCT)
self.assertIn('token', res) and self.assertEqual(res['token'], "TOKEN")
@patch('ws_sdk.app.WSApp.get_projects')
def test_get_scopes_by_token_as_product(self, mock_get_projects):
self.ws_app.token_type = ScopeTypes.PRODUCT
mock_get_projects.return_value = [{'token': "TOKEN"}]
res = self.ws_app.get_scope_by_token(token="TOKEN")
self.assertIn('token', res) and self.assertEqual(res['token'], "TOKEN")
@patch('ws_sdk.app.WSApp.get_scopes_from_name')
def test_get_token_from_name_not_found(self, mock_get_scopes_from_name):
mock_get_scopes_from_name.return_value = []
res = self.ws_app.get_tokens_from_name('NAME_NOT_FOUND')
self.assertIsInstance(res, list) and self.assertEqual(len(res), 0)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_vulnerability(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'vulnerabilities': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_vulnerability()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_vulnerability_cluster(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'vulnerabilities': []}
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_vulnerability(cluster=True, token=ScopeTypes.PRODUCT)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_vulnerability_cluster_as_org(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'vulnerabilities': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_vulnerability(cluster=True)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_vulnerability_report_xlsx_of_product(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_vulnerability(token="PRODUCT", report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.get_vulnerability')
def test_get_vulnerabilities_per_lib(self, mock_get_vulnerability):
mock_get_vulnerability.return_value = []
res = self.ws_app.get_vulnerabilities_per_lib()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_change_log(self, mock_generic_get):
mock_generic_get.return_value = {'changes': []}
res = self.ws_app.get_change_log()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_change_log_start_date(self, mock_generic_get):
mock_generic_get.return_value = {'changes': []}
res = self.ws_app.get_change_log(start_date=datetime.now())
self.assertIsInstance(res, list)
@patch('ws_sdk.ws_constants.ENTITY_TYPES')
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_assignments(self, mock_generic_get, mock_set_token_in_body, mock_entity_types):
mock_generic_get.return_value = {}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_entity_types.return_value = {}
res = self.ws_app.get_user_group_assignments(entity_type=USERS,
role_type=RoleTypes.PRODUCT_INTEGRATOR)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_assignments_project(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {}
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_user_group_assignments()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_risk(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_risk()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_risk_project(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_risk()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_due_diligence(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_due_diligence()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_attributes(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_attributes()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_attributes__project(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_attributes()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_licenses(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_licenses(full_spdx=True)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_source_files(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'sourceFiles': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_source_files()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_source_files')
def test_get_source_file_inventory(self, mock_get_source_files):
mock_get_source_files.return_value = bytes()
res = self.ws_app.get_source_file_inventory()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_source_files_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_source_files(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.get_in_house_libraries')
def test_get_in_house(self, mock_get_in_house_libraries):
mock_get_in_house_libraries.return_value = bytes()
res = self.ws_app.get_in_house()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_in_house_libraries(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraries': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_in_house_libraries()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_in_house_libraries_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_in_house_libraries(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_library_location(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_library_location()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_library_location_on_project(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'libraryLocations': []}
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_library_location()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_license_compatibility_org_report(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_license_compatibility(report=True)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_license_compatibility(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_license_compatibility()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_license_compatibility_report_prod(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_license_compatibility(report=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_license_compatibility_org(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_license_compatibility()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_licenses_histogram(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'licenseHistogram': {}}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_licenses(histogram=True)
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_attribution(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = dict()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_attribution(reporting_aggregation_mode="BY_COMPONENT", token="TOKEN")
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_attribution_bin(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_attribution(reporting_aggregation_mode="BY_COMPONENT", token="TOKEN", report=True, export_format="TXT")
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_attribution_on_org(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_attribution(reporting_aggregation_mode="BY_COMPONENT", token="TOKEN")
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_effective_licenses(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_effective_licenses()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_effective_licenses_project(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_effective_licenses()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_bugs(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_bugs()
self.assertIsInstance(res, bytes)
def test_get_bugs_not_report(self):
res = self.ws_app.get_bugs(report=False)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_request_history(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_request_history()
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_request_history_plugin(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_request_history(plugin=True)
self.assertIsInstance(res, bytes)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_request_history_plugin_project(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = bytes()
mock_set_token_in_body.return_value = (PROJECT, {})
res = self.ws_app.get_request_history(plugin=True)
self.assertIs(res, None)
def test_get_request_history_not_report(self):
res = self.ws_app.get_request_history(report=False)
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.get_projects')
def test_get_project(self, mock_get_projects):
mock_get_projects.return_value = [{'token': "TOKEN"}]
res = self.ws_app.get_project(token="TOKEN")
self.assertEqual(res['token'], "TOKEN")
@patch('ws_sdk.app.WSApp.get_projects')
def test_get_project_not_found(self, mock_get_projects):
mock_get_projects.return_value = [{'token': "TOKEN"}]
with self.assertRaises(WsSdkServerMissingTokenError):
res = self.ws_app.get_project(token="NOT_FOUND")
@patch('ws_sdk.app.WSApp.get_scope_by_token')
def test_get_product_of_project(self, mock_get_scope_by_token):
mock_get_scope_by_token.return_value = {'token': "TOKEN",
'productToken': "PRODUCTTOKEN",
'type': PROJECT}
res = self.ws_app.get_product_of_project(token="TOKEN")
self.assertEqual(res['token'], "TOKEN")
@patch('ws_sdk.app.WSApp.get_scope_name_by_token')
@patch('ws_sdk.app.WSApp.call_ws_api')
@patch('ws_sdk.app.WSApp.get_project')
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_delete_scope(self, mock_set_token_in_body, mock_get_project, mock_call_ws_api,
mock_get_scope_name_by_token):
mock_set_token_in_body.return_value = (PROJECT, {})
mock_get_project.return_value = {'token': "TOKEN", 'productToken': "PROD_TOKEN"}
mock_call_ws_api.return_value = {}
mock_get_scope_name_by_token.return_value = "PROJECT_NAME"
res = self.ws_app.delete_scope(token="TOKEN")
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_users(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'users': []}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_users()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_users_as_product(self, mock_set_token_in_body):
self.ws_app.token_type = ScopeTypes.PRODUCT
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_users()
self.assertIs(res, None)
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_get_libraries(self, mock_call_ws_api):
mock_call_ws_api.return_value = {'libraries': []}
res = self.ws_app.get_libraries(search_value="LIB_NAME", version="VERSION", search_only_name=True)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.get_inventory')
def test_get_libraries_not_global(self, mock_get_inventory):
mock_get_inventory.return_value = []
res = self.ws_app.get_libraries(search_value="LIB_NAME", global_search=False)
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_library_detailed(self, mock_generic_get):
mock_generic_get.return_value = {"librariesInformation": []}
res = self.ws_app.get_library_details(name="NAME", lib_type="Source Library", version="VERSION", languages=["java"])
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_tags_as_org(self, mock_generic_get, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
mock_generic_get.side_effect = [{'productTags': []}, {'projectTags': []}]
res = self.ws_app.get_tags()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_tags_as_prod(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'projectTags': []}
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_tags()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_set_alerts_status(self, mock_call_ws_api, mock_set_token_in_body):
mock_call_ws_api.return_value = {}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.set_alerts_status(alert_uuids="UUID", status=AlertStatus.AL_STATUS_IGNORED)
self.assertIsInstance(res, dict)
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_set_alerts_status_no_uuids(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
with self.assertLogs(level='INFO') as cm:
self.ws_app.set_alerts_status(alert_uuids=[], status=AlertStatus.AL_STATUS_ACTIVE)
self.assertEqual(cm.output, ["ERROR:ws_sdk.app:At least 1 alert uuid must be provided"])
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_set_alerts_status_invalid_status(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
with self.assertLogs(level='INFO') as cm:
self.ws_app.set_alerts_status(alert_uuids=["UUID"], status="INVALID")
self.assertEqual(cm.output, ['ERROR:ws_sdk.app:INVALID status is invalid. Must be \"Ignored\" or \"Active\"'])
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_lib_notice(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = "TEXT"
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
res = self.ws_app.get_lib_notice(as_text=True)
self.assertEqual(res, "TEXT")
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_get_lib_notice_not_product(self, mock_set_token_in_body):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
with self.assertRaises(WsSdkServerTokenTypeError):
self.ws_app.get_lib_notice()
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_set_lib_notice(self, mock_call_ws_api):
mock_call_ws_api.return_value = []
res = self.ws_app.set_lib_notice(lib_uuid='LIB_UUID', text=[{"k1": "v1", "k2": "v2"}, {"k1": "v1", "k2": "v2"}],
reference='REFERENCE')
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_policies(self, mock_generic_get, mock_set_token_in_body):
mock_generic_get.return_value = {'policies': [{'policyContext': 'DOMAIN'}]}
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
res = self.ws_app.get_policies()
self.assertIsInstance(res, list)
@patch('ws_sdk.app.WSApp.call_ws_api')
@patch('ws_sdk.app.WSApp.get_users')
def test_create_user(self, mock_get_users, mock_call_ws_api):
mock_get_users.return_value = []
mock_call_ws_api.return_value = {}
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.create_user(name="NAME", email="<EMAIL>", inviter_email="<EMAIL>")
self.assertEqual(cm.output, ["DEBUG:ws_sdk.app:Token: 'None' is a organization",
"DEBUG:ws_sdk.app:Creating User: NAME email : <EMAIL> with Inviter email: <EMAIL>"])
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_create_product(self, mock_call_ws_api):
mock_call_ws_api.return_value = {}
ret = self.ws_app.create_product(name="NEW_PRODUCT")
self.assertEqual(ret, {})
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_create_project(self, mock_call_ws_api, mock_set_token_in_body):
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
mock_call_ws_api.return_value = {}
ret = self.ws_app.create_project(name="NEW_PROJECT", product_token="PROD_TOKEN")
self.assertEqual(ret, {})
def test_create_project_prod_token_and_name(self):
with self.assertLogs(level='DEBUG') as cm:
ret = self.ws_app.create_project(name="NEW_PROJECT", product_token="TOKEN", product_name="NAME")
self.assertEqual(cm.output, ["ERROR:ws_sdk.app:Unable to create project: 'NEW_PROJECT'. Only project token or project name is allowed"])
def test_create_project_no_prod_token(self):
with self.assertLogs(level='DEBUG') as cm:
ret = self.ws_app.create_project(name="NEW_PROJECT")
self.assertEqual(cm.output, ["ERROR:ws_sdk.app:Unable to create project: 'NEW_PROJECT'. Missing product value"])
@patch('ws_sdk.app.WSApp.call_ws_api')
@patch('ws_sdk.app.WSApp.get_users')
def test_delete_user(self, mock_get_users, mock_call_ws_api):
mock_get_users.return_value = [{"name": "USERNAME"}]
mock_call_ws_api.return_value = {}
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.delete_user(email="<EMAIL>")
self.assertEqual(cm.output, [
f"DEBUG:ws_sdk.app:Deleting user email: <EMAIL> from Organization Token: {self.ws_app.token}"])
@patch('ws_sdk.app.WSApp.call_ws_api')
@patch('ws_sdk.app.WSApp.get_groups')
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_create_group(self, mock_set_token_in_body, mock_get_groups, mock_call_ws_api):
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
mock_get_groups.return_value = []
mock_call_ws_api.return_value = {}
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.create_group(name="GRP_NAME")
self.assertEqual(cm.output, [f"DEBUG:ws_sdk.app:Creating Group: GRP_NAME"])
@patch('ws_sdk.app.WSApp.set_token_in_body')
@patch('ws_sdk.app.WSApp.get_users')
@patch('ws_sdk.app.WSApp.get_groups')
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_assign_user_to_group(self, mock_call_ws_api, mock_get_groups, mock_get_users, mock_set_token_in_body):
mock_call_ws_api.return_value = []
mock_get_groups.side_effect = [[{"name": "GRP_NAME"}], []]
mock_get_users.return_value = [{"name": "USERNAME"}]
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.assign_user_to_group(user_email="EMAIL", group_name="GRP_NAME")
self.assertEqual(cm.output, ["DEBUG:ws_sdk.app:Assigning user's Email: EMAIL to Group: GRP_NAME"])
@patch('ws_sdk.app.WSApp.get_groups')
@patch('ws_sdk.app.WSApp._generic_set')
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_assign_to_scope(self, mock_set_token_in_body, mock_generic_set, mock_get_groups):
mock_generic_set.return_value = []
mock_set_token_in_body.return_value = (ScopeTypes.PRODUCT, {})
mock_get_groups.side_effect = [[{"name": "GRP_NAME"}], []]
group_name = "GRP_NAME"
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.assign_to_scope(role_type=RoleTypes.P_INTEGRATORS, group=group_name)
self.assertEqual(cm.output, [
f"DEBUG:ws_sdk.app:Assigning User(s): None Group(s): {group_name} to Role: {RoleTypes.P_INTEGRATORS}"])
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_invite_user_to_web_advisor(self, mock_call_ws_api):
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.invite_user_to_web_advisor(user_email="<EMAIL>")
self.assertEqual(cm.output, ["DEBUG:ws_sdk.app:Token: 'None' is a organization",
"DEBUG:ws_sdk.app:Inviting email: '<EMAIL>' to Web Advisor"])
@patch('ws_sdk.app.WSApp.call_ws_api')
def test_regenerate_service_user_key(self, mock_call_ws_api):
mock_call_ws_api.return_value = {'userToken': self.valid_token}
res = self.ws_app.regenerate_service_user_key(service_user_key=self.valid_token)
self.assertEqual(res, self.valid_token)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_integration_token(self, mock_generic_get):
mock_generic_get.return_value = self.valid_token
ret = self.ws_app.get_integration_token(integration_type=IntegrationTypes.INT_1)
self.assertEqual(ret, self.valid_token)
@patch('ws_sdk.app.WSApp._generic_get')
def test_get_last_scan_process_status(self, mock_generic_get):
mock_generic_get.return_value = {'requestState': "FINISHED"}
ret = self.ws_app.get_last_scan_process_status(request_token="<KEY>")
self.assertEqual(ret, "FINISHED")
@patch('ws_sdk.app.WSApp.call_ws_api')
@patch('ws_sdk.app.WSApp.set_token_in_body')
def test_change_origin_of_source_lib(self, mock_set_token_in_body, mock_call_api):
mock_set_token_in_body.return_value = (self.ws_app.token_type, {})
with self.assertLogs(level='DEBUG') as cm:
res = self.ws_app.change_origin_of_source_lib(lib_uuid="LIB_UUID", source_files_sha1=["SHA1_1", "SHA1_2"])
self.assertEqual(cm.output, ["DEBUG:ws_sdk.app:Changing original of source library: 'LIB_UUID'"])
def test_generate_whitesource_url(self):
id = 42
type = 'project'
ret = self.ws_app.generate_whitesource_url(id, type)
ret_d = f"{self.ws_app.url}/Wss/WSS.html/Wss/WSS.html#!{type};id={id}"
self.assertEqual(ret, ret_d)
if __name__ == '__main__':
TestCase.unittest.main()
| kn | 0.076552 | #!{type};id={id}" | 2.31988 | 2 |
notification_extension/apps.py | rockyfire/Django-China-API | 0 | 6612814 | <filename>notification_extension/apps.py
from django.apps import AppConfig
class NotificationExtensionConfig(AppConfig):
name = 'notification_extension'
| <filename>notification_extension/apps.py
from django.apps import AppConfig
class NotificationExtensionConfig(AppConfig):
name = 'notification_extension'
| none | 1 | 1.18679 | 1 | |
src/binary_response/scripts/profile.py | david-zwicker/sensing-normalized-results | 0 | 6612815 | #!/usr/bin/env python
'''
Created on Apr 16, 2015
@author: <NAME> <<EMAIL>>
'''
from __future__ import division
import sys
import os.path
# append base path to sys.path
script_path = os.path.split(os.path.realpath(__file__))[0]
sys.path.append(os.path.join(script_path, '..', '..'))
import cProfile
import pstats
import tempfile
from binary_response import LibraryBinaryNumeric
# parameters
Ns, Nr, m = 16, 8, 4
model = LibraryBinaryNumeric(Ns, Nr)
model.set_commonness('random_uniform', m)
model.mutual_information() #< make sure numba jiting has happened
# run the profiler and save result to temporary file
cmd = "model.optimize_library('mutual_information', method='anneal', steps=1000)"
with tempfile.NamedTemporaryFile() as tmpfile:
cProfile.run(cmd, tmpfile.name)
stats = pstats.Stats(tmpfile.name)
# display the results
stats.strip_dirs()
stats.sort_stats('cumulative')
stats.print_stats(30)
| #!/usr/bin/env python
'''
Created on Apr 16, 2015
@author: <NAME> <<EMAIL>>
'''
from __future__ import division
import sys
import os.path
# append base path to sys.path
script_path = os.path.split(os.path.realpath(__file__))[0]
sys.path.append(os.path.join(script_path, '..', '..'))
import cProfile
import pstats
import tempfile
from binary_response import LibraryBinaryNumeric
# parameters
Ns, Nr, m = 16, 8, 4
model = LibraryBinaryNumeric(Ns, Nr)
model.set_commonness('random_uniform', m)
model.mutual_information() #< make sure numba jiting has happened
# run the profiler and save result to temporary file
cmd = "model.optimize_library('mutual_information', method='anneal', steps=1000)"
with tempfile.NamedTemporaryFile() as tmpfile:
cProfile.run(cmd, tmpfile.name)
stats = pstats.Stats(tmpfile.name)
# display the results
stats.strip_dirs()
stats.sort_stats('cumulative')
stats.print_stats(30)
| en | 0.689127 | #!/usr/bin/env python Created on Apr 16, 2015 @author: <NAME> <<EMAIL>> # append base path to sys.path # parameters #< make sure numba jiting has happened # run the profiler and save result to temporary file # display the results | 1.996336 | 2 |
anp.py | z8432k/feodorov-math-lab1-py | 0 | 6612816 | <gh_stars>0
import numpy as np
def geo_mean(iterable):
a = np.array(iterable)
return a.prod()**(1.0/len(a))
def eigenVec(m, dst=None):
rows, cols = m.shape
rowsGeom = np.ones([cols])
for i in range(rows):
row = m[i]
geom = geo_mean(row)
rowsGeom[i] = geom
geomSum = np.sum(rowsGeom)
for i in range(rows):
rowsGeom[i] = rowsGeom[i] / geomSum
# lambda
colsSum = np.zeros([rows])
for i in range(rows):
for k in range(cols):
colsSum[i] += m[k, i]
colsSum[i] *= rowsGeom[i]
lmbda = np.sum(colsSum)
if (lmbda > rows):
raise "Wrong lambda"
if (not dst is None):
np.copyto(dst, rowsGeom)
return rowsGeom
def eigVecCalc(srcVec, dstVec):
srcSiz = len(srcVec)
result = np.ones([srcSiz, srcSiz])
for i in range(srcSiz):
for k in range(srcSiz):
result[i, k] = srcVec[i] / srcVec[k]
eigenVec(result, dstVec)
def AHPSolve(cw, normVectors, solveVector):
# Нормированный собственный вектор оценки критериев
eigVecCalc(cw, eigNormVectors[0])
awRows, awCols = np.shape(aw)
# Заполняем нормирпованные собственные вектора оценок альтернатив
for i in range(awRows):
eigVecCalc(aw[i], normVectors[i + 1])
for i in range(awRows):
solveVector[i] = 0
for j in range(awCols):
solveVector[i] += normVectors[0, j] * normVectors[j+1, i]
minimum = np.amin(solveVector)
return np.where(solveVector == minimum)
def buildNormPairMatrix(normVectors, normPairMatrix):
normRows, normCols = np.shape(normVectors)
for i in range(normRows):
if i == 0:
continue
for j in range(normRows - 1):
for k in range(normRows - 1):
normPairMatrix[i - 1, k, j*2] = normVectors[i, k] / \
(normVectors[i, k] + normVectors[i, j])
normPairMatrix[i - 1, k, (j*2+1)] = normVectors[i, j] / \
(normVectors[i, j] + normVectors[i, k])
def buildAHPPlusSolveMatrix(pairMatrix, normVectors, solveMatrix):
for i in range(cwSiz):
for j in range(awCols * 2):
for k in range(cwSiz): # позиция в столбце и счётчик матриц
solveMatrix[i, j] += normVectors[0, k] * \
pairMatrix[k, i, j]
def ahpPlusSolve(solveMatrix, solveVector):
sRows, sCols = np.shape(solveMatrix)
for i in range(sRows):
for j in range(0, sCols, 2):
solveVector[i] += solveMatrix[i, j]
solveSum = np.sum(solveVector)
for i in range(sRows):
solveVector[i] = solveVector[i] / solveSum
minimum = np.amin(solveVector)
result = np.where(solveVector == minimum)
return result
# Решение задачи
cw = np.array([ 3, 7, 5, 1, 9 ])
cwSiz = len(cw);
# Оценки по критериям уложены по строкам
aw = np.array([
[3, 1, 8, 4, 5],
[1, 2, 5, 8, 9],
[5, 6, 9, 7, 2],
[4, 9, 7, 8, 5],
[9, 1, 6, 5, 3]
])
awRows, awCols = np.shape(aw)
eigNormVectors = np.zeros([awCols + 1, cwSiz])
ahpSolveVector = np.zeros([awCols])
result1 = AHPSolve(cw, eigNormVectors, ahpSolveVector)
# Матрица нормализации попарных сравнений альтернатив
normPairMatrix = np.zeros([cwSiz, cwSiz, awCols * 2])
buildNormPairMatrix(eigNormVectors, normPairMatrix)
ahpPlusSolveMatrix = np.zeros([cwSiz, awCols * 2])
buildAHPPlusSolveMatrix(normPairMatrix, eigNormVectors, ahpPlusSolveMatrix)
anpPlusSolveVector = np.zeros([cwSiz])
result2 = ahpPlusSolve(ahpPlusSolveMatrix, anpPlusSolveVector)
print(result1)
print(result2)
| import numpy as np
def geo_mean(iterable):
a = np.array(iterable)
return a.prod()**(1.0/len(a))
def eigenVec(m, dst=None):
rows, cols = m.shape
rowsGeom = np.ones([cols])
for i in range(rows):
row = m[i]
geom = geo_mean(row)
rowsGeom[i] = geom
geomSum = np.sum(rowsGeom)
for i in range(rows):
rowsGeom[i] = rowsGeom[i] / geomSum
# lambda
colsSum = np.zeros([rows])
for i in range(rows):
for k in range(cols):
colsSum[i] += m[k, i]
colsSum[i] *= rowsGeom[i]
lmbda = np.sum(colsSum)
if (lmbda > rows):
raise "Wrong lambda"
if (not dst is None):
np.copyto(dst, rowsGeom)
return rowsGeom
def eigVecCalc(srcVec, dstVec):
srcSiz = len(srcVec)
result = np.ones([srcSiz, srcSiz])
for i in range(srcSiz):
for k in range(srcSiz):
result[i, k] = srcVec[i] / srcVec[k]
eigenVec(result, dstVec)
def AHPSolve(cw, normVectors, solveVector):
# Нормированный собственный вектор оценки критериев
eigVecCalc(cw, eigNormVectors[0])
awRows, awCols = np.shape(aw)
# Заполняем нормирпованные собственные вектора оценок альтернатив
for i in range(awRows):
eigVecCalc(aw[i], normVectors[i + 1])
for i in range(awRows):
solveVector[i] = 0
for j in range(awCols):
solveVector[i] += normVectors[0, j] * normVectors[j+1, i]
minimum = np.amin(solveVector)
return np.where(solveVector == minimum)
def buildNormPairMatrix(normVectors, normPairMatrix):
normRows, normCols = np.shape(normVectors)
for i in range(normRows):
if i == 0:
continue
for j in range(normRows - 1):
for k in range(normRows - 1):
normPairMatrix[i - 1, k, j*2] = normVectors[i, k] / \
(normVectors[i, k] + normVectors[i, j])
normPairMatrix[i - 1, k, (j*2+1)] = normVectors[i, j] / \
(normVectors[i, j] + normVectors[i, k])
def buildAHPPlusSolveMatrix(pairMatrix, normVectors, solveMatrix):
for i in range(cwSiz):
for j in range(awCols * 2):
for k in range(cwSiz): # позиция в столбце и счётчик матриц
solveMatrix[i, j] += normVectors[0, k] * \
pairMatrix[k, i, j]
def ahpPlusSolve(solveMatrix, solveVector):
sRows, sCols = np.shape(solveMatrix)
for i in range(sRows):
for j in range(0, sCols, 2):
solveVector[i] += solveMatrix[i, j]
solveSum = np.sum(solveVector)
for i in range(sRows):
solveVector[i] = solveVector[i] / solveSum
minimum = np.amin(solveVector)
result = np.where(solveVector == minimum)
return result
# Решение задачи
cw = np.array([ 3, 7, 5, 1, 9 ])
cwSiz = len(cw);
# Оценки по критериям уложены по строкам
aw = np.array([
[3, 1, 8, 4, 5],
[1, 2, 5, 8, 9],
[5, 6, 9, 7, 2],
[4, 9, 7, 8, 5],
[9, 1, 6, 5, 3]
])
awRows, awCols = np.shape(aw)
eigNormVectors = np.zeros([awCols + 1, cwSiz])
ahpSolveVector = np.zeros([awCols])
result1 = AHPSolve(cw, eigNormVectors, ahpSolveVector)
# Матрица нормализации попарных сравнений альтернатив
normPairMatrix = np.zeros([cwSiz, cwSiz, awCols * 2])
buildNormPairMatrix(eigNormVectors, normPairMatrix)
ahpPlusSolveMatrix = np.zeros([cwSiz, awCols * 2])
buildAHPPlusSolveMatrix(normPairMatrix, eigNormVectors, ahpPlusSolveMatrix)
anpPlusSolveVector = np.zeros([cwSiz])
result2 = ahpPlusSolve(ahpPlusSolveMatrix, anpPlusSolveVector)
print(result1)
print(result2) | ru | 0.987464 | # lambda # Нормированный собственный вектор оценки критериев # Заполняем нормирпованные собственные вектора оценок альтернатив # позиция в столбце и счётчик матриц # Решение задачи # Оценки по критериям уложены по строкам # Матрица нормализации попарных сравнений альтернатив | 2.518724 | 3 |
test/test_data/__init__.py | tomplex/reshape | 1 | 6612817 | <reponame>tomplex/reshape
from pathlib import Path
test_data_dir = Path(__file__).parent
basic_wkt = test_data_dir / 'basic.wkt'
basic_noheader_wkt = test_data_dir / 'basic_noheader.wkt'
| from pathlib import Path
test_data_dir = Path(__file__).parent
basic_wkt = test_data_dir / 'basic.wkt'
basic_noheader_wkt = test_data_dir / 'basic_noheader.wkt' | none | 1 | 1.819276 | 2 | |
focus_sibling.py | KJoke70/i3-tools | 26 | 6612818 | #!/usr/bin/env python3
import i3ipc
import argparse
parser = argparse.ArgumentParser(description='focus on sibling in' \
'next/prev top-level container')
parser.add_argument('direction', help='left=-1, right=1', \
type=int)
args = parser.parse_args()
direction = args.direction
i3 = i3ipc.Connection()
con = i3.get_tree().find_focused()
# find outermost parent
counter = 0 # how often to ascend to outermost parent
while con.parent.type != "workspace":
con = con.parent
counter += 1
nodes = con.parent.nodes
index = 0
# find left/right sibling of outermost parent
for l in nodes:
if l.id == con.id:
break
index += 1
index += direction
if index >= len(nodes) or index < 0:
exit()
# descent to level 'counter' or a lowest leaf
con2 = nodes[index]
for i in range(counter, 0, -1):
n = con2.nodes
if len(n) == 0:
break #exit()?
else:
con2 = n[direction if direction < 0 else 0]
con2.command('focus')
| #!/usr/bin/env python3
import i3ipc
import argparse
parser = argparse.ArgumentParser(description='focus on sibling in' \
'next/prev top-level container')
parser.add_argument('direction', help='left=-1, right=1', \
type=int)
args = parser.parse_args()
direction = args.direction
i3 = i3ipc.Connection()
con = i3.get_tree().find_focused()
# find outermost parent
counter = 0 # how often to ascend to outermost parent
while con.parent.type != "workspace":
con = con.parent
counter += 1
nodes = con.parent.nodes
index = 0
# find left/right sibling of outermost parent
for l in nodes:
if l.id == con.id:
break
index += 1
index += direction
if index >= len(nodes) or index < 0:
exit()
# descent to level 'counter' or a lowest leaf
con2 = nodes[index]
for i in range(counter, 0, -1):
n = con2.nodes
if len(n) == 0:
break #exit()?
else:
con2 = n[direction if direction < 0 else 0]
con2.command('focus')
| en | 0.765822 | #!/usr/bin/env python3 # find outermost parent # how often to ascend to outermost parent # find left/right sibling of outermost parent # descent to level 'counter' or a lowest leaf #exit()? | 2.994172 | 3 |
dequeue.py | Xavier-cvpr/python-Data-Structures-and-Algorithms-master | 2 | 6612819 | # -*- coding: utf-8 -*-
"""
Created on Tue Dec 11 18:25:22 2018
@author: Xavier
"""
class Deque(object):
#双端队列
def __init__(self):
#构造函数胡
self.__list=[]
def add_front(self,item):
#头部添加一个item元素
self.__list.insert(0,item)
#self.__list.insert(0,item)
def add_rear(self,item):
#从队列尾部添加一个元素
self.__list.append(item)
def pop_front(self):
#从队列头部取
return self.__list.pop(0)
def pop_rear(self):
#从队列尾部取
return self.__list.pop()
def is_empty(self):
#判断一个队列是否为空
return self.__list==[]
def size(self):
#返回队列的大小
return len(self.__list)
if __name__=="__main__":
s=Queue()
s.enqueue(1)
s.enqueue(2)
s.enqueue(3)
s.enqueue(4)
print(s.dequeue())
print(s.dequeue())
print(s.dequeue())
print(s.dequeue())
| # -*- coding: utf-8 -*-
"""
Created on Tue Dec 11 18:25:22 2018
@author: Xavier
"""
class Deque(object):
#双端队列
def __init__(self):
#构造函数胡
self.__list=[]
def add_front(self,item):
#头部添加一个item元素
self.__list.insert(0,item)
#self.__list.insert(0,item)
def add_rear(self,item):
#从队列尾部添加一个元素
self.__list.append(item)
def pop_front(self):
#从队列头部取
return self.__list.pop(0)
def pop_rear(self):
#从队列尾部取
return self.__list.pop()
def is_empty(self):
#判断一个队列是否为空
return self.__list==[]
def size(self):
#返回队列的大小
return len(self.__list)
if __name__=="__main__":
s=Queue()
s.enqueue(1)
s.enqueue(2)
s.enqueue(3)
s.enqueue(4)
print(s.dequeue())
print(s.dequeue())
print(s.dequeue())
print(s.dequeue())
| zh | 0.680267 | # -*- coding: utf-8 -*- Created on Tue Dec 11 18:25:22 2018
@author: Xavier #双端队列 #构造函数胡 #头部添加一个item元素 #self.__list.insert(0,item) #从队列尾部添加一个元素 #从队列头部取 #从队列尾部取 #判断一个队列是否为空 #返回队列的大小 | 4.197134 | 4 |
oehlert_x442.3_Assignment9.py | soehlert/python_homework | 0 | 6612820 | <gh_stars>0
#!/usr/bin/env python3
import os
import urllib.request
import re
############
# Q1
############
def zero_length(directory):
""" Using os.walk, write a script that will print the filenames of zero
length files. It should also print the count of zero length files. """
os.chdir(directory)
zero_length_files = 0
for dirname, subdirname, fn in os.walk(directory):
for f in fn:
if os.path.getsize(f) == 0:
zero_length_files += 1
print(os.path.abspath(f))
print("number of zero length files: {}".format(zero_length_files))
zero_length("/tmp/zero")
############
# Q2
############
def html_images(url):
""" List and count all of the images in a given HTML web page/file.
You can assume that:
Each image file is enclosed with the tag <img and ends with >
The HTML page/file is syntactically correct """
images = 0
with urllib.request.urlopen(url) as response:
html = response.read()
image_list = re.findall(r"<img.*?>", str(html))
for i in image_list:
print(i)
if i:
print(i)
images += 1
print(images)
html_images("https://www.cnn.com/")
| #!/usr/bin/env python3
import os
import urllib.request
import re
############
# Q1
############
def zero_length(directory):
""" Using os.walk, write a script that will print the filenames of zero
length files. It should also print the count of zero length files. """
os.chdir(directory)
zero_length_files = 0
for dirname, subdirname, fn in os.walk(directory):
for f in fn:
if os.path.getsize(f) == 0:
zero_length_files += 1
print(os.path.abspath(f))
print("number of zero length files: {}".format(zero_length_files))
zero_length("/tmp/zero")
############
# Q2
############
def html_images(url):
""" List and count all of the images in a given HTML web page/file.
You can assume that:
Each image file is enclosed with the tag <img and ends with >
The HTML page/file is syntactically correct """
images = 0
with urllib.request.urlopen(url) as response:
html = response.read()
image_list = re.findall(r"<img.*?>", str(html))
for i in image_list:
print(i)
if i:
print(i)
images += 1
print(images)
html_images("https://www.cnn.com/") | en | 0.638476 | #!/usr/bin/env python3 ############ # Q1 ############ Using os.walk, write a script that will print the filenames of zero length files. It should also print the count of zero length files. ############ # Q2 ############ List and count all of the images in a given HTML web page/file. You can assume that: Each image file is enclosed with the tag <img and ends with > The HTML page/file is syntactically correct | 3.915076 | 4 |
fence/oidc/grants/refresh_token_grant.py | giangbui/fence | 2 | 6612821 | <reponame>giangbui/fence<filename>fence/oidc/grants/refresh_token_grant.py
import bcrypt
from authlib.specs.rfc6749.errors import (
InvalidClientError,
InvalidRequestError,
InvalidScopeError,
UnauthorizedClientError,
)
from authlib.specs.rfc6749.grants import RefreshTokenGrant as AuthlibRefreshTokenGrant
from authlib.specs.rfc6749.util import scope_to_list
import flask
from fence.jwt.blacklist import is_token_blacklisted
from fence.jwt.errors import JWTError
from fence.jwt.validate import validate_jwt
from fence.models import ClientAuthType, User
class RefreshTokenGrant(AuthlibRefreshTokenGrant):
"""
Implement the refresh token grant which the OIDC provider will use.
This class both implements some methods required by authlib, and overrides
others to change the default behavior from authlib; see method docstrings
for details.
NOTE: ``self._authenticated_token`` is the refresh token claims as a
dictionary; ``self.params['refresh_token']`` is the actual string.
"""
TOKEN_ENDPOINT_AUTH_METHODS = [auth_type.value for auth_type in ClientAuthType]
def authenticate_refresh_token(self, refresh_token):
"""
Validate a refresh token.
Required to implement this method for authlib.
Args:
refresh_token (str): refresh token as from a request
Return:
dict: the claims from the validated token
"""
try:
if is_token_blacklisted(refresh_token):
return
except JWTError:
return
return validate_jwt(refresh_token, purpose="refresh")
def create_access_token(self, token, client, authenticated_token):
"""
Authlib requires the implementation of this method to save the token.
However, fence does not save the access tokens to a database, so just
return the original token again.
"""
return token
@staticmethod
def authenticate_user(claims):
"""
Return user from the claims (decoded from JWT). Required for authlib.
"""
user_id = claims.get("sub")
if not user_id:
return None
with flask.current_app.db.session as session:
return session.query(User).filter_by(id=user_id).first()
def validate_token_request(self):
"""
Override over authlib to allow public clients to use refresh tokens.
"""
client = self.authenticate_token_endpoint_client()
if not client.check_grant_type(self.GRANT_TYPE):
raise UnauthorizedClientError("invalid grant type")
self.request.client = client
self.authenticate_token_endpoint_client()
token = self._validate_request_token()
self._validate_token_scope(token)
self.request.credential = token
def validate_access_token_request(self):
"""
Override the parent method from authlib to not fail immediately for
public clients.
"""
client = self.authenticate_token_endpoint_client()
if not client.check_grant_type(self.GRANT_TYPE):
raise UnauthorizedClientError(uri=self.uri)
self._authenticated_client = client
refresh_token = self.params.get("refresh_token")
if refresh_token is None:
raise InvalidRequestError(
'Missing "refresh_token" in request.', uri=self.uri
)
refresh_claims = self.authenticate_refresh_token(refresh_token)
if not refresh_claims:
raise InvalidRequestError(
'Invalid "refresh_token" in request.', uri=self.uri
)
scope = self.params.get("scope")
if scope:
original_scope = refresh_claims["scope"]
if not original_scope:
raise InvalidScopeError(uri=self.uri)
original_scope = set(scope_to_list(original_scope))
if not original_scope.issuperset(set(scope_to_list(scope))):
raise InvalidScopeError(uri=self.uri)
self._authenticated_token = refresh_claims
def create_token_response(self):
"""
OVERRIDES method from authlib.
Docs from authlib:
If valid and authorized, the authorization server issues an access
token as described in Section 5.1. If the request failed
verification or is invalid, the authorization server returns an
error response as described in Section 5.2.
"""
credential = self.request.credential
user = self.authenticate_user(credential)
if not user:
raise InvalidRequestError('There is no "user" for this token.')
scope = self.request.scope
if not scope:
scope = credential["aud"]
client = self.request.client
expires_in = credential["exp"]
token = self.generate_token(
client, self.GRANT_TYPE, user=user, expires_in=expires_in, scope=scope
)
# TODO
flask.current_app.logger.info("")
self.request.user = user
self.server.save_token(token, self.request)
token = self.process_token(token, self.request)
return 200, token, self.TOKEN_RESPONSE_HEADER
| import bcrypt
from authlib.specs.rfc6749.errors import (
InvalidClientError,
InvalidRequestError,
InvalidScopeError,
UnauthorizedClientError,
)
from authlib.specs.rfc6749.grants import RefreshTokenGrant as AuthlibRefreshTokenGrant
from authlib.specs.rfc6749.util import scope_to_list
import flask
from fence.jwt.blacklist import is_token_blacklisted
from fence.jwt.errors import JWTError
from fence.jwt.validate import validate_jwt
from fence.models import ClientAuthType, User
class RefreshTokenGrant(AuthlibRefreshTokenGrant):
"""
Implement the refresh token grant which the OIDC provider will use.
This class both implements some methods required by authlib, and overrides
others to change the default behavior from authlib; see method docstrings
for details.
NOTE: ``self._authenticated_token`` is the refresh token claims as a
dictionary; ``self.params['refresh_token']`` is the actual string.
"""
TOKEN_ENDPOINT_AUTH_METHODS = [auth_type.value for auth_type in ClientAuthType]
def authenticate_refresh_token(self, refresh_token):
"""
Validate a refresh token.
Required to implement this method for authlib.
Args:
refresh_token (str): refresh token as from a request
Return:
dict: the claims from the validated token
"""
try:
if is_token_blacklisted(refresh_token):
return
except JWTError:
return
return validate_jwt(refresh_token, purpose="refresh")
def create_access_token(self, token, client, authenticated_token):
"""
Authlib requires the implementation of this method to save the token.
However, fence does not save the access tokens to a database, so just
return the original token again.
"""
return token
@staticmethod
def authenticate_user(claims):
"""
Return user from the claims (decoded from JWT). Required for authlib.
"""
user_id = claims.get("sub")
if not user_id:
return None
with flask.current_app.db.session as session:
return session.query(User).filter_by(id=user_id).first()
def validate_token_request(self):
"""
Override over authlib to allow public clients to use refresh tokens.
"""
client = self.authenticate_token_endpoint_client()
if not client.check_grant_type(self.GRANT_TYPE):
raise UnauthorizedClientError("invalid grant type")
self.request.client = client
self.authenticate_token_endpoint_client()
token = self._validate_request_token()
self._validate_token_scope(token)
self.request.credential = token
def validate_access_token_request(self):
"""
Override the parent method from authlib to not fail immediately for
public clients.
"""
client = self.authenticate_token_endpoint_client()
if not client.check_grant_type(self.GRANT_TYPE):
raise UnauthorizedClientError(uri=self.uri)
self._authenticated_client = client
refresh_token = self.params.get("refresh_token")
if refresh_token is None:
raise InvalidRequestError(
'Missing "refresh_token" in request.', uri=self.uri
)
refresh_claims = self.authenticate_refresh_token(refresh_token)
if not refresh_claims:
raise InvalidRequestError(
'Invalid "refresh_token" in request.', uri=self.uri
)
scope = self.params.get("scope")
if scope:
original_scope = refresh_claims["scope"]
if not original_scope:
raise InvalidScopeError(uri=self.uri)
original_scope = set(scope_to_list(original_scope))
if not original_scope.issuperset(set(scope_to_list(scope))):
raise InvalidScopeError(uri=self.uri)
self._authenticated_token = refresh_claims
def create_token_response(self):
"""
OVERRIDES method from authlib.
Docs from authlib:
If valid and authorized, the authorization server issues an access
token as described in Section 5.1. If the request failed
verification or is invalid, the authorization server returns an
error response as described in Section 5.2.
"""
credential = self.request.credential
user = self.authenticate_user(credential)
if not user:
raise InvalidRequestError('There is no "user" for this token.')
scope = self.request.scope
if not scope:
scope = credential["aud"]
client = self.request.client
expires_in = credential["exp"]
token = self.generate_token(
client, self.GRANT_TYPE, user=user, expires_in=expires_in, scope=scope
)
# TODO
flask.current_app.logger.info("")
self.request.user = user
self.server.save_token(token, self.request)
token = self.process_token(token, self.request)
return 200, token, self.TOKEN_RESPONSE_HEADER | en | 0.83001 | Implement the refresh token grant which the OIDC provider will use. This class both implements some methods required by authlib, and overrides others to change the default behavior from authlib; see method docstrings for details. NOTE: ``self._authenticated_token`` is the refresh token claims as a dictionary; ``self.params['refresh_token']`` is the actual string. Validate a refresh token. Required to implement this method for authlib. Args: refresh_token (str): refresh token as from a request Return: dict: the claims from the validated token Authlib requires the implementation of this method to save the token. However, fence does not save the access tokens to a database, so just return the original token again. Return user from the claims (decoded from JWT). Required for authlib. Override over authlib to allow public clients to use refresh tokens. Override the parent method from authlib to not fail immediately for public clients. OVERRIDES method from authlib. Docs from authlib: If valid and authorized, the authorization server issues an access token as described in Section 5.1. If the request failed verification or is invalid, the authorization server returns an error response as described in Section 5.2. # TODO | 2.387684 | 2 |
example/6.json_request.py | ToteBrick/Tornado | 0 | 6612822 | <reponame>ToteBrick/Tornado<gh_stars>0
# -*- coding:utf-8 -*-
import json
from tornado.web import Application, RequestHandler
from tornado.ioloop import IOLoop
class IndexHandler(RequestHandler):
def get(self):
print(self.request)
json_str = {"username": "admin", "password": "<PASSWORD>"}
# self.write(json.dumps(json_str))
self.write(json_str)
if __name__ == "__main__":
app = Application([(r"/", IndexHandler)])
app.listen(8001)
IOLoop.current().start()
| # -*- coding:utf-8 -*-
import json
from tornado.web import Application, RequestHandler
from tornado.ioloop import IOLoop
class IndexHandler(RequestHandler):
def get(self):
print(self.request)
json_str = {"username": "admin", "password": "<PASSWORD>"}
# self.write(json.dumps(json_str))
self.write(json_str)
if __name__ == "__main__":
app = Application([(r"/", IndexHandler)])
app.listen(8001)
IOLoop.current().start() | en | 0.360706 | # -*- coding:utf-8 -*- # self.write(json.dumps(json_str)) | 2.30385 | 2 |
exer601.py | profnssorg/valmorMantelli1 | 0 | 6612823 | ###Titulo: Lista
###Função: Este programa modifica a listagem 6.6 para ler 7 notas ao invés de 5
###Autor: <NAME>.
###Data: 27/12/2018
###Versão: 0.0.2
### Declaração de variáve
notas = [0, 0, 0, 0, 0, 0, 0, 0]
soma = 0
x = 0
### Atribuição de valor e processamento
while x < 7:
notas [x] = float(input ("Nota %d: " % x))
soma = soma + notas [x]
x += 1
x = 0
while x < 7:
### Saída
print("Nota %d: %6.2f" % (x, notas [x]))
x +=1
print("Média: %6.2f" % (soma / x))
| ###Titulo: Lista
###Função: Este programa modifica a listagem 6.6 para ler 7 notas ao invés de 5
###Autor: <NAME>.
###Data: 27/12/2018
###Versão: 0.0.2
### Declaração de variáve
notas = [0, 0, 0, 0, 0, 0, 0, 0]
soma = 0
x = 0
### Atribuição de valor e processamento
while x < 7:
notas [x] = float(input ("Nota %d: " % x))
soma = soma + notas [x]
x += 1
x = 0
while x < 7:
### Saída
print("Nota %d: %6.2f" % (x, notas [x]))
x +=1
print("Média: %6.2f" % (soma / x))
| pt | 0.979182 | ###Titulo: Lista ###Função: Este programa modifica a listagem 6.6 para ler 7 notas ao invés de 5 ###Autor: <NAME>. ###Data: 27/12/2018 ###Versão: 0.0.2 ### Declaração de variáve ### Atribuição de valor e processamento ### Saída | 3.828294 | 4 |
sysdfiles/service_file.py | ghuband/sysdfiles | 0 | 6612824 | from .unit_file import UnitFile
# =============================================================================
# ServiceFile
# =============================================================================
class ServiceFile(UnitFile):
def __init__(self, file_name=''):
UnitFile.__init__(self, file_name)
self.add_properties('service',
[['bus_name'],
['exec_reload', 'l', '', 1],
['exec_start', 'l', '', 1],
['exec_start_post', 'l', '', 1],
['exec_start_pre', 'l', '', 1],
['exec_stop', 'l', '', 1],
['exec_stop_post', 'l', '', 1],
['file_descriptor_store_max', 'i'],
['guess_main_pid', 'b'],
['non_blocking', 'b'],
['notify_access'],
['permissions_start_only', 'b'],
['pid_file'],
['remain_after_exit', 'b'],
['restart'],
['restart_force_exit_status', 'l'],
['restart_prevent_exit_status', 'l'],
['restart_sec', 'ns'],
['root_directory_start_only', 'b'],
['runtime_max_sec', 'ns'],
['sockets', 'l', ' ', 3],
['success_exit_status', 'l'],
['timeout_sec', 'ns'],
['timeout_start_sec', 'ns'],
['timeout_stop_sec', 'ns'],
['type'],
['usb_function_descriptors'],
['usb_function_strings'],
['watchdog_sec', 'ns']])
self.add_exec_properties()
self.add_kill_properties()
self.add_resource_control_properties()
| from .unit_file import UnitFile
# =============================================================================
# ServiceFile
# =============================================================================
class ServiceFile(UnitFile):
def __init__(self, file_name=''):
UnitFile.__init__(self, file_name)
self.add_properties('service',
[['bus_name'],
['exec_reload', 'l', '', 1],
['exec_start', 'l', '', 1],
['exec_start_post', 'l', '', 1],
['exec_start_pre', 'l', '', 1],
['exec_stop', 'l', '', 1],
['exec_stop_post', 'l', '', 1],
['file_descriptor_store_max', 'i'],
['guess_main_pid', 'b'],
['non_blocking', 'b'],
['notify_access'],
['permissions_start_only', 'b'],
['pid_file'],
['remain_after_exit', 'b'],
['restart'],
['restart_force_exit_status', 'l'],
['restart_prevent_exit_status', 'l'],
['restart_sec', 'ns'],
['root_directory_start_only', 'b'],
['runtime_max_sec', 'ns'],
['sockets', 'l', ' ', 3],
['success_exit_status', 'l'],
['timeout_sec', 'ns'],
['timeout_start_sec', 'ns'],
['timeout_stop_sec', 'ns'],
['type'],
['usb_function_descriptors'],
['usb_function_strings'],
['watchdog_sec', 'ns']])
self.add_exec_properties()
self.add_kill_properties()
self.add_resource_control_properties()
| en | 0.343589 | # ============================================================================= # ServiceFile # ============================================================================= | 2.177864 | 2 |
fugue_dask/registry.py | kvnkho/fugue | 547 | 6612825 | <reponame>kvnkho/fugue
import inspect
from typing import Any, Optional
import dask.dataframe as dd
from fugue import DataFrame, register_execution_engine
from fugue._utils.interfaceless import (
DataFrameParam,
ExecutionEngineParam,
SimpleAnnotationConverter,
register_annotation_converter,
)
from fugue.workflow import register_raw_df_type
from fugue_dask.execution_engine import DaskExecutionEngine
def register() -> None:
"""Register Dask Execution Engine
.. note::
This function is automatically called when you do
>>> import fugue_dask
"""
_register_raw_dataframes()
_register_engines()
_register_annotation_converters()
def _register_raw_dataframes() -> None:
register_raw_df_type(dd.DataFrame)
def _register_engines() -> None:
register_execution_engine(
"dask",
lambda conf, **kwargs: DaskExecutionEngine(conf=conf),
on_dup="ignore",
)
def _register_annotation_converters() -> None:
register_annotation_converter(
0.8,
SimpleAnnotationConverter(
DaskExecutionEngine,
lambda param: _DaskExecutionEngineParam(param),
),
)
register_annotation_converter(
0.8,
SimpleAnnotationConverter(
dd.DataFrame, lambda param: _DaskDataFrameParam(param)
),
)
class _DaskExecutionEngineParam(ExecutionEngineParam):
def __init__(
self,
param: Optional[inspect.Parameter],
):
super().__init__(
param, annotation="DaskExecutionEngine", engine_type=DaskExecutionEngine
)
class _DaskDataFrameParam(DataFrameParam):
def __init__(self, param: Optional[inspect.Parameter]):
super().__init__(param, annotation="dask.dataframe.DataFrame")
def to_input_data(self, df: DataFrame, ctx: Any) -> Any:
assert isinstance(ctx, DaskExecutionEngine)
return ctx.to_df(df).native
def to_output_df(self, output: Any, schema: Any, ctx: Any) -> DataFrame:
assert isinstance(output, dd.DataFrame)
assert isinstance(ctx, DaskExecutionEngine)
return ctx.to_df(output, schema=schema)
def count(self, df: DataFrame) -> int: # pragma: no cover
raise NotImplementedError("not allowed")
| import inspect
from typing import Any, Optional
import dask.dataframe as dd
from fugue import DataFrame, register_execution_engine
from fugue._utils.interfaceless import (
DataFrameParam,
ExecutionEngineParam,
SimpleAnnotationConverter,
register_annotation_converter,
)
from fugue.workflow import register_raw_df_type
from fugue_dask.execution_engine import DaskExecutionEngine
def register() -> None:
"""Register Dask Execution Engine
.. note::
This function is automatically called when you do
>>> import fugue_dask
"""
_register_raw_dataframes()
_register_engines()
_register_annotation_converters()
def _register_raw_dataframes() -> None:
register_raw_df_type(dd.DataFrame)
def _register_engines() -> None:
register_execution_engine(
"dask",
lambda conf, **kwargs: DaskExecutionEngine(conf=conf),
on_dup="ignore",
)
def _register_annotation_converters() -> None:
register_annotation_converter(
0.8,
SimpleAnnotationConverter(
DaskExecutionEngine,
lambda param: _DaskExecutionEngineParam(param),
),
)
register_annotation_converter(
0.8,
SimpleAnnotationConverter(
dd.DataFrame, lambda param: _DaskDataFrameParam(param)
),
)
class _DaskExecutionEngineParam(ExecutionEngineParam):
def __init__(
self,
param: Optional[inspect.Parameter],
):
super().__init__(
param, annotation="DaskExecutionEngine", engine_type=DaskExecutionEngine
)
class _DaskDataFrameParam(DataFrameParam):
def __init__(self, param: Optional[inspect.Parameter]):
super().__init__(param, annotation="dask.dataframe.DataFrame")
def to_input_data(self, df: DataFrame, ctx: Any) -> Any:
assert isinstance(ctx, DaskExecutionEngine)
return ctx.to_df(df).native
def to_output_df(self, output: Any, schema: Any, ctx: Any) -> DataFrame:
assert isinstance(output, dd.DataFrame)
assert isinstance(ctx, DaskExecutionEngine)
return ctx.to_df(output, schema=schema)
def count(self, df: DataFrame) -> int: # pragma: no cover
raise NotImplementedError("not allowed") | en | 0.709797 | Register Dask Execution Engine .. note:: This function is automatically called when you do >>> import fugue_dask # pragma: no cover | 2.422721 | 2 |
app/__init__.py | im-so-sorry/barad-dur | 0 | 6612826 | from apps.core.celery import *
| from apps.core.celery import *
| none | 1 | 1.014471 | 1 | |
language/bert/sequene_parallel/model/layers/__init__.py | huxin711/ColossalAI-Examples | 39 | 6612827 | from .embedding import VocabEmbedding, Embedding
from .bert_layer import BertLayer
from .head import BertDualHead
from .preprocess import PreProcessor
| from .embedding import VocabEmbedding, Embedding
from .bert_layer import BertLayer
from .head import BertDualHead
from .preprocess import PreProcessor
| none | 1 | 1.203255 | 1 | |
src/commands/wishlist.py | SCProjectsLKOrg/TorrentSeedr | 0 | 6612828 | <filename>src/commands/wishlist.py
from src.objs import *
from src.functions.floodControl import floodControl
from src.functions.exceptions import exceptions, noAccount
#: View account profile, add new accounts and remove existing accounts
@bot.message_handler(commands=['wishlist'])
def wishlist(message, userLanguage=None):
userId = message.from_user.id
if floodControl(message, userLanguage):
userLanguage = userLanguage or dbSql.getSetting(userId, 'language')
ac = dbSql.getDefaultAc(userId)
#! If user has an account
if ac:
account = Seedr(
token=ac['token'],
callbackFunc=lambda token: dbSql.updateAccount(
token, userId, ac['accountId']
)
)
response = account.getSettings()
#! On success
if 'error' not in response:
text = ''
# Torrent Hunt wishlist
wishlistItems = dbSql.getWishLists(userId)
if wishlistItems:
for item in wishlistItems:
text += f"🌟 <b>{item['title']}</b>\n\nAdd: /addTorrent_1{item['wishlistId']}\nRemove: /removeWL_1{item['wishlistId']}\n\n"
# Seedr Wishlist
if response['result'] is True:
if response['account']['wishlist']:
for wish in response['account']['wishlist']:
text += f"⭐ <b>{wish['title']}</b>\n\nAdd: /addTorrent_0{wish['id']}\nRemove: /removeWL_0{wish['id']}\n\n"
if text:
bot.send_message(message.chat.id, text[:4000])
else:
bot.send_message(message.chat.id, language["noWishlist"][userLanguage])
else:
exceptions(message, response, ac, userLanguage)
#! If no accounts
else:
noAccount(message, userLanguage)
| <filename>src/commands/wishlist.py
from src.objs import *
from src.functions.floodControl import floodControl
from src.functions.exceptions import exceptions, noAccount
#: View account profile, add new accounts and remove existing accounts
@bot.message_handler(commands=['wishlist'])
def wishlist(message, userLanguage=None):
userId = message.from_user.id
if floodControl(message, userLanguage):
userLanguage = userLanguage or dbSql.getSetting(userId, 'language')
ac = dbSql.getDefaultAc(userId)
#! If user has an account
if ac:
account = Seedr(
token=ac['token'],
callbackFunc=lambda token: dbSql.updateAccount(
token, userId, ac['accountId']
)
)
response = account.getSettings()
#! On success
if 'error' not in response:
text = ''
# Torrent Hunt wishlist
wishlistItems = dbSql.getWishLists(userId)
if wishlistItems:
for item in wishlistItems:
text += f"🌟 <b>{item['title']}</b>\n\nAdd: /addTorrent_1{item['wishlistId']}\nRemove: /removeWL_1{item['wishlistId']}\n\n"
# Seedr Wishlist
if response['result'] is True:
if response['account']['wishlist']:
for wish in response['account']['wishlist']:
text += f"⭐ <b>{wish['title']}</b>\n\nAdd: /addTorrent_0{wish['id']}\nRemove: /removeWL_0{wish['id']}\n\n"
if text:
bot.send_message(message.chat.id, text[:4000])
else:
bot.send_message(message.chat.id, language["noWishlist"][userLanguage])
else:
exceptions(message, response, ac, userLanguage)
#! If no accounts
else:
noAccount(message, userLanguage)
| en | 0.430351 | #: View account profile, add new accounts and remove existing accounts #! If user has an account #! On success # Torrent Hunt wishlist # Seedr Wishlist #! If no accounts | 2.460992 | 2 |
aidants_connect_web/migrations/0001_202101_stable_schema.py | betagouv/Aidants_Connect | 16 | 6612829 | <filename>aidants_connect_web/migrations/0001_202101_stable_schema.py
# Generated by Django 3.1.1 on 2021-01-11 16:43
import aidants_connect_web.models
from django.conf import settings
import django.contrib.auth.validators
import django.contrib.postgres.fields
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
("auth", "0012_alter_user_first_name_max_length"),
]
operations = [
migrations.CreateModel(
name="Aidant",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("password", models.CharField(max_length=128, verbose_name="password")),
(
"last_login",
models.DateTimeField(
blank=True, null=True, verbose_name="last login"
),
),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
(
"username",
models.CharField(
error_messages={
"unique": "A user with that username already exists."
},
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
max_length=150,
unique=True,
validators=[
django.contrib.auth.validators.UnicodeUsernameValidator()
],
verbose_name="username",
),
),
(
"first_name",
models.CharField(
blank=True, max_length=150, verbose_name="first name"
),
),
(
"last_name",
models.CharField(
blank=True, max_length=150, verbose_name="last name"
),
),
(
"email",
models.EmailField(
blank=True, max_length=254, verbose_name="email address"
),
),
(
"is_staff",
models.BooleanField(
default=False,
help_text="Designates whether the user can log into this admin site.",
verbose_name="staff status",
),
),
(
"is_active",
models.BooleanField(
default=True,
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
verbose_name="active",
),
),
(
"date_joined",
models.DateTimeField(
default=django.utils.timezone.now, verbose_name="date joined"
),
),
("profession", models.TextField()),
(
"groups",
models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.Group",
verbose_name="groups",
),
),
],
options={
"verbose_name": "aidant",
},
managers=[
("objects", aidants_connect_web.models.AidantManager()),
],
),
migrations.CreateModel(
name="Autorisation",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"demarche",
models.CharField(
choices=[
("papiers", "Papiers - Citoyenneté"),
("famille", "Famille"),
("social", "Social - Santé"),
("travail", "Travail"),
("logement", "Logement"),
("transports", "Transports"),
("argent", "Argent"),
("justice", "Justice"),
("etranger", "Étranger"),
("loisirs", "Loisirs"),
],
max_length=16,
),
),
(
"revocation_date",
models.DateTimeField(
blank=True, null=True, verbose_name="Date de révocation"
),
),
("last_renewal_token", models.TextField(default="No token provided")),
],
),
migrations.CreateModel(
name="Organisation",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"name",
models.TextField(default="No name provided", verbose_name="Nom"),
),
(
"siret",
models.PositiveIntegerField(default=1, verbose_name="N° SIRET"),
),
(
"address",
models.TextField(
default="No address provided", verbose_name="Adresse"
),
),
],
),
migrations.CreateModel(
name="Usager",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("given_name", models.CharField(max_length=255, verbose_name="Prénom")),
("family_name", models.CharField(max_length=255, verbose_name="Nom")),
("preferred_username", models.CharField(blank=True, max_length=255)),
(
"gender",
models.CharField(
choices=[("female", "Femme"), ("male", "Homme")],
default="female",
max_length=6,
verbose_name="Genre",
),
),
("birthdate", models.DateField(verbose_name="Date de naissance")),
(
"birthplace",
models.CharField(
blank=True,
max_length=5,
null=True,
verbose_name="Lieu de naissance",
),
),
(
"birthcountry",
models.CharField(
default="99100", max_length=5, verbose_name="Pays de naissance"
),
),
("sub", models.TextField(unique=True)),
(
"email",
models.EmailField(
default="<EMAIL>",
max_length=254,
),
),
(
"creation_date",
models.DateTimeField(
default=django.utils.timezone.now,
verbose_name="Date de création",
),
),
],
options={
"ordering": ["family_name", "given_name"],
},
),
migrations.CreateModel(
name="Mandat",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"creation_date",
models.DateTimeField(
default=django.utils.timezone.now,
verbose_name="Date de création",
),
),
(
"expiration_date",
models.DateTimeField(
default=django.utils.timezone.now,
verbose_name="Date d'expiration",
),
),
(
"duree_keyword",
models.CharField(
choices=[
("SHORT", "pour une durée de 1 jour"),
("LONG", "pour une durée de 1 an"),
(
"EUS_03_20",
"jusqu’à la fin de l’état d’urgence sanitaire ",
),
],
max_length=16,
null=True,
verbose_name="Durée",
),
),
(
"is_remote",
models.BooleanField(
default=False, verbose_name="Signé à distance ?"
),
),
(
"organisation",
models.ForeignKey(
default=aidants_connect_web.models.get_staff_organisation_name_id,
on_delete=django.db.models.deletion.PROTECT,
related_name="mandats",
to="aidants_connect_web.organisation",
),
),
(
"usager",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="mandats",
to="aidants_connect_web.usager",
),
),
],
),
migrations.CreateModel(
name="Journal",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"action",
models.CharField(
choices=[
("connect_aidant", "Connexion d'un aidant"),
(
"activity_check_aidant",
"Reprise de connexion d'un aidant",
),
("franceconnect_usager", "FranceConnexion d'un usager"),
(
"update_email_usager",
"L'email de l'usager a été modifié",
),
("create_attestation", "Création d'une attestation"),
("create_autorisation", "Création d'une autorisation"),
("use_autorisation", "Utilisation d'une autorisation"),
("cancel_autorisation", "Révocation d'une autorisation"),
],
max_length=30,
),
),
("creation_date", models.DateTimeField(auto_now_add=True)),
("demarche", models.CharField(blank=True, max_length=100, null=True)),
("duree", models.IntegerField(blank=True, null=True)),
("access_token", models.TextField(blank=True, null=True)),
("autorisation", models.IntegerField(blank=True, null=True)),
(
"attestation_hash",
models.CharField(blank=True, max_length=100, null=True),
),
("additional_information", models.TextField(blank=True, null=True)),
("is_remote_mandat", models.BooleanField(default=False)),
(
"aidant",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="journal_entries",
to=settings.AUTH_USER_MODEL,
),
),
(
"usager",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name="journal_entries",
to="aidants_connect_web.usager",
),
),
],
options={
"verbose_name": "entrée de journal",
"verbose_name_plural": "entrées de journal",
},
),
migrations.CreateModel(
name="Connection",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("state", models.TextField()),
("nonce", models.TextField(default="No Nonce Provided")),
(
"connection_type",
models.CharField(
choices=[("FS", "FC as FS"), ("FI", "FC as FI")],
default="FI",
max_length=2,
),
),
(
"demarches",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(default="No démarche"),
null=True,
size=None,
),
),
(
"duree_keyword",
models.CharField(
choices=[
("SHORT", "pour une durée de 1 jour"),
("LONG", "pour une durée de 1 an"),
(
"EUS_03_20",
"jusqu’à la fin de l’état d’urgence sanitaire ",
),
],
max_length=16,
null=True,
),
),
("mandat_is_remote", models.BooleanField(default=False)),
(
"expires_on",
models.DateTimeField(
default=aidants_connect_web.models.default_connection_expiration_date
),
),
("access_token", models.TextField(default="No token provided")),
("code", models.TextField()),
("demarche", models.TextField(default="No demarche provided")),
("complete", models.BooleanField(default=False)),
(
"aidant",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="connections",
to=settings.AUTH_USER_MODEL,
),
),
(
"autorisation",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="connections",
to="aidants_connect_web.autorisation",
),
),
(
"usager",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="connections",
to="aidants_connect_web.usager",
),
),
],
options={
"verbose_name": "connexion",
},
),
migrations.AddField(
model_name="autorisation",
name="mandat",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="autorisations",
to="aidants_connect_web.mandat",
),
),
migrations.AddField(
model_name="aidant",
name="organisation",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="aidants",
to="aidants_connect_web.organisation",
),
),
migrations.AddField(
model_name="aidant",
name="user_permissions",
field=models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.Permission",
verbose_name="user permissions",
),
),
]
| <filename>aidants_connect_web/migrations/0001_202101_stable_schema.py
# Generated by Django 3.1.1 on 2021-01-11 16:43
import aidants_connect_web.models
from django.conf import settings
import django.contrib.auth.validators
import django.contrib.postgres.fields
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
("auth", "0012_alter_user_first_name_max_length"),
]
operations = [
migrations.CreateModel(
name="Aidant",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("password", models.CharField(max_length=128, verbose_name="password")),
(
"last_login",
models.DateTimeField(
blank=True, null=True, verbose_name="last login"
),
),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
(
"username",
models.CharField(
error_messages={
"unique": "A user with that username already exists."
},
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
max_length=150,
unique=True,
validators=[
django.contrib.auth.validators.UnicodeUsernameValidator()
],
verbose_name="username",
),
),
(
"first_name",
models.CharField(
blank=True, max_length=150, verbose_name="first name"
),
),
(
"last_name",
models.CharField(
blank=True, max_length=150, verbose_name="last name"
),
),
(
"email",
models.EmailField(
blank=True, max_length=254, verbose_name="email address"
),
),
(
"is_staff",
models.BooleanField(
default=False,
help_text="Designates whether the user can log into this admin site.",
verbose_name="staff status",
),
),
(
"is_active",
models.BooleanField(
default=True,
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
verbose_name="active",
),
),
(
"date_joined",
models.DateTimeField(
default=django.utils.timezone.now, verbose_name="date joined"
),
),
("profession", models.TextField()),
(
"groups",
models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.Group",
verbose_name="groups",
),
),
],
options={
"verbose_name": "aidant",
},
managers=[
("objects", aidants_connect_web.models.AidantManager()),
],
),
migrations.CreateModel(
name="Autorisation",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"demarche",
models.CharField(
choices=[
("papiers", "Papiers - Citoyenneté"),
("famille", "Famille"),
("social", "Social - Santé"),
("travail", "Travail"),
("logement", "Logement"),
("transports", "Transports"),
("argent", "Argent"),
("justice", "Justice"),
("etranger", "Étranger"),
("loisirs", "Loisirs"),
],
max_length=16,
),
),
(
"revocation_date",
models.DateTimeField(
blank=True, null=True, verbose_name="Date de révocation"
),
),
("last_renewal_token", models.TextField(default="No token provided")),
],
),
migrations.CreateModel(
name="Organisation",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"name",
models.TextField(default="No name provided", verbose_name="Nom"),
),
(
"siret",
models.PositiveIntegerField(default=1, verbose_name="N° SIRET"),
),
(
"address",
models.TextField(
default="No address provided", verbose_name="Adresse"
),
),
],
),
migrations.CreateModel(
name="Usager",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("given_name", models.CharField(max_length=255, verbose_name="Prénom")),
("family_name", models.CharField(max_length=255, verbose_name="Nom")),
("preferred_username", models.CharField(blank=True, max_length=255)),
(
"gender",
models.CharField(
choices=[("female", "Femme"), ("male", "Homme")],
default="female",
max_length=6,
verbose_name="Genre",
),
),
("birthdate", models.DateField(verbose_name="Date de naissance")),
(
"birthplace",
models.CharField(
blank=True,
max_length=5,
null=True,
verbose_name="Lieu de naissance",
),
),
(
"birthcountry",
models.CharField(
default="99100", max_length=5, verbose_name="Pays de naissance"
),
),
("sub", models.TextField(unique=True)),
(
"email",
models.EmailField(
default="<EMAIL>",
max_length=254,
),
),
(
"creation_date",
models.DateTimeField(
default=django.utils.timezone.now,
verbose_name="Date de création",
),
),
],
options={
"ordering": ["family_name", "given_name"],
},
),
migrations.CreateModel(
name="Mandat",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"creation_date",
models.DateTimeField(
default=django.utils.timezone.now,
verbose_name="Date de création",
),
),
(
"expiration_date",
models.DateTimeField(
default=django.utils.timezone.now,
verbose_name="Date d'expiration",
),
),
(
"duree_keyword",
models.CharField(
choices=[
("SHORT", "pour une durée de 1 jour"),
("LONG", "pour une durée de 1 an"),
(
"EUS_03_20",
"jusqu’à la fin de l’état d’urgence sanitaire ",
),
],
max_length=16,
null=True,
verbose_name="Durée",
),
),
(
"is_remote",
models.BooleanField(
default=False, verbose_name="Signé à distance ?"
),
),
(
"organisation",
models.ForeignKey(
default=aidants_connect_web.models.get_staff_organisation_name_id,
on_delete=django.db.models.deletion.PROTECT,
related_name="mandats",
to="aidants_connect_web.organisation",
),
),
(
"usager",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="mandats",
to="aidants_connect_web.usager",
),
),
],
),
migrations.CreateModel(
name="Journal",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"action",
models.CharField(
choices=[
("connect_aidant", "Connexion d'un aidant"),
(
"activity_check_aidant",
"Reprise de connexion d'un aidant",
),
("franceconnect_usager", "FranceConnexion d'un usager"),
(
"update_email_usager",
"L'email de l'usager a été modifié",
),
("create_attestation", "Création d'une attestation"),
("create_autorisation", "Création d'une autorisation"),
("use_autorisation", "Utilisation d'une autorisation"),
("cancel_autorisation", "Révocation d'une autorisation"),
],
max_length=30,
),
),
("creation_date", models.DateTimeField(auto_now_add=True)),
("demarche", models.CharField(blank=True, max_length=100, null=True)),
("duree", models.IntegerField(blank=True, null=True)),
("access_token", models.TextField(blank=True, null=True)),
("autorisation", models.IntegerField(blank=True, null=True)),
(
"attestation_hash",
models.CharField(blank=True, max_length=100, null=True),
),
("additional_information", models.TextField(blank=True, null=True)),
("is_remote_mandat", models.BooleanField(default=False)),
(
"aidant",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="journal_entries",
to=settings.AUTH_USER_MODEL,
),
),
(
"usager",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name="journal_entries",
to="aidants_connect_web.usager",
),
),
],
options={
"verbose_name": "entrée de journal",
"verbose_name_plural": "entrées de journal",
},
),
migrations.CreateModel(
name="Connection",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("state", models.TextField()),
("nonce", models.TextField(default="No Nonce Provided")),
(
"connection_type",
models.CharField(
choices=[("FS", "FC as FS"), ("FI", "FC as FI")],
default="FI",
max_length=2,
),
),
(
"demarches",
django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(default="No démarche"),
null=True,
size=None,
),
),
(
"duree_keyword",
models.CharField(
choices=[
("SHORT", "pour une durée de 1 jour"),
("LONG", "pour une durée de 1 an"),
(
"EUS_03_20",
"jusqu’à la fin de l’état d’urgence sanitaire ",
),
],
max_length=16,
null=True,
),
),
("mandat_is_remote", models.BooleanField(default=False)),
(
"expires_on",
models.DateTimeField(
default=aidants_connect_web.models.default_connection_expiration_date
),
),
("access_token", models.TextField(default="No token provided")),
("code", models.TextField()),
("demarche", models.TextField(default="No demarche provided")),
("complete", models.BooleanField(default=False)),
(
"aidant",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="connections",
to=settings.AUTH_USER_MODEL,
),
),
(
"autorisation",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="connections",
to="aidants_connect_web.autorisation",
),
),
(
"usager",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="connections",
to="aidants_connect_web.usager",
),
),
],
options={
"verbose_name": "connexion",
},
),
migrations.AddField(
model_name="autorisation",
name="mandat",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="autorisations",
to="aidants_connect_web.mandat",
),
),
migrations.AddField(
model_name="aidant",
name="organisation",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="aidants",
to="aidants_connect_web.organisation",
),
),
migrations.AddField(
model_name="aidant",
name="user_permissions",
field=models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.Permission",
verbose_name="user permissions",
),
),
]
| en | 0.813184 | # Generated by Django 3.1.1 on 2021-01-11 16:43 | 2.099229 | 2 |
code1/car.py | ManthanKeim/code-attempt1 | 0 | 6612830 | <reponame>ManthanKeim/code-attempt1
class car:
slogan = None
population=0
def __init__(self, name="Default"):
self.name = name
car.population+=1
def start(self,sound):
print(sound)
def set_slogan(self, data):
car.slogan= data
print(car.slogan)
mycar = car("HELLO")
print(mycar.name)
mycar.start("BLOB BLOLB")
mycar.slogan
bhs=car("\n")
print(bhs.name)
mycar.set_slogan("DSADAAS")
car.slogan="dafds"
print(car.slogan)
print(car.population)
| class car:
slogan = None
population=0
def __init__(self, name="Default"):
self.name = name
car.population+=1
def start(self,sound):
print(sound)
def set_slogan(self, data):
car.slogan= data
print(car.slogan)
mycar = car("HELLO")
print(mycar.name)
mycar.start("BLOB BLOLB")
mycar.slogan
bhs=car("\n")
print(bhs.name)
mycar.set_slogan("DSADAAS")
car.slogan="dafds"
print(car.slogan)
print(car.population) | none | 1 | 3.505239 | 4 | |
pants/ingredients/tests/test_ingredient.py | osagga/PriceAndNutritionTrackingSystem | 72 | 6612831 |
from decimal import Decimal
from django.test import TestCase
from ingredients.models import Ingredient
class IngredientTestCase(TestCase):
def setUp(self):
Ingredient.objects.create(
name="milk",
kilojoules=10000,
protein=40,
fibre=0,
carbohydrate=50,
sugar=40,
fat=20,
serving=250,
)
def test_nutrition_data(self):
"""Test nutrition data returned correctly, including ratios, per serve etc"""
milk = Ingredient.objects.get(name='milk')
ndata = milk.nutrition_data
# basic
self.assertEqual(ndata['kilojoules'], 10000)
self.assertEqual(ndata['protein'], 40)
self.assertEqual(ndata['fibre'], 0)
self.assertEqual(ndata['carbohydrate'], 50)
self.assertEqual(ndata['sugar'], 40)
self.assertEqual(ndata['fat'], 20)
# also required
self.assertEqual(ndata['cost'], None)
self.assertEqual(ndata['grams'], 1000) # per-KG
# ratios / calculated
self.assertEqual(ndata['protein_per_j'], 4)
self.assertEqual(ndata['fibre_per_j'], 0)
self.assertEqual(ndata['kilocalories'], Decimal('2390.06'))
|
from decimal import Decimal
from django.test import TestCase
from ingredients.models import Ingredient
class IngredientTestCase(TestCase):
def setUp(self):
Ingredient.objects.create(
name="milk",
kilojoules=10000,
protein=40,
fibre=0,
carbohydrate=50,
sugar=40,
fat=20,
serving=250,
)
def test_nutrition_data(self):
"""Test nutrition data returned correctly, including ratios, per serve etc"""
milk = Ingredient.objects.get(name='milk')
ndata = milk.nutrition_data
# basic
self.assertEqual(ndata['kilojoules'], 10000)
self.assertEqual(ndata['protein'], 40)
self.assertEqual(ndata['fibre'], 0)
self.assertEqual(ndata['carbohydrate'], 50)
self.assertEqual(ndata['sugar'], 40)
self.assertEqual(ndata['fat'], 20)
# also required
self.assertEqual(ndata['cost'], None)
self.assertEqual(ndata['grams'], 1000) # per-KG
# ratios / calculated
self.assertEqual(ndata['protein_per_j'], 4)
self.assertEqual(ndata['fibre_per_j'], 0)
self.assertEqual(ndata['kilocalories'], Decimal('2390.06'))
| en | 0.889592 | Test nutrition data returned correctly, including ratios, per serve etc # basic # also required # per-KG # ratios / calculated | 2.760818 | 3 |
src/mainsite/admin.py | Nyrio/personal-site | 0 | 6612832 | from django.contrib import admin
from mainsite.models import User, BlogPost, BlogComment, Category
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
pass
@admin.register(BlogPost)
class BlogPostAdmin(admin.ModelAdmin):
pass
@admin.register(BlogComment)
class BlogCommentAdmin(admin.ModelAdmin):
pass
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
pass
| from django.contrib import admin
from mainsite.models import User, BlogPost, BlogComment, Category
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
pass
@admin.register(BlogPost)
class BlogPostAdmin(admin.ModelAdmin):
pass
@admin.register(BlogComment)
class BlogCommentAdmin(admin.ModelAdmin):
pass
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
pass
| none | 1 | 1.702736 | 2 | |
src/test/test_launches.py | EmonCodingBackEndf/alshapton8 | 7 | 6612833 |
import sys
sys.path.append('../')
import pytest
import spacexpython
from spacexpython.exceptions import *
from spacexpython.utils import *
from .tutils import *
def test_launches():
launches_data = ''
launches_result = keyOrder(alphaOrder(
readJSONFile('launches/all.json')), 'flight_number')
try:
launches_data = keyOrder(alphaOrder(
spacexpython.launches.alllaunches('', 1)), 'flight_number')
except spacexpython.utils.SpaceXReadTimeOut:
pytest.xfail("Space/X API Read Timed Out")
print("Failure on launches.all")
assert launches_data == launches_result
|
import sys
sys.path.append('../')
import pytest
import spacexpython
from spacexpython.exceptions import *
from spacexpython.utils import *
from .tutils import *
def test_launches():
launches_data = ''
launches_result = keyOrder(alphaOrder(
readJSONFile('launches/all.json')), 'flight_number')
try:
launches_data = keyOrder(alphaOrder(
spacexpython.launches.alllaunches('', 1)), 'flight_number')
except spacexpython.utils.SpaceXReadTimeOut:
pytest.xfail("Space/X API Read Timed Out")
print("Failure on launches.all")
assert launches_data == launches_result
| none | 1 | 2.130125 | 2 | |
admit/test/test_Flow1N.py | astroumd/admit | 4 | 6612834 | <gh_stars>1-10
#! /usr/bin/env python
#
#
# similar to test_FM, but in the official ADMIT environment
# these are meant to be able to run without CASA, ie. in a
# vanilla python environment
#
# you might need to run
# rm ../at/__init__.py ../at/__init__.pyc ; touch ../at/__init__.py
# before, and reset this file using
# dtdGenerator
# if CASA sits in the way
# performance (on nemo2): time test_Flow_5.py > /dev/null
# touch=False touch=True
# 100 ...
# 1000 0.582u 0.096s 0:00.68 98.5% 0.794u 2.267s 0:03.19 95.6%
# 10000 4.004u 0.522s 0:04.57 98.9% 5.401u 22.515s 0:28.56 97.7%
#
#
# (10000,True) is the default bench, thus nemo2 goes in 1:21
# inferno goes in 1:53 (yipes, and /dev/shm didn't help)
# subaru 1:52
#
import sys, os
from admit.AT import AT
import admit.Admit as admit
from admit.at.File_AT import File_AT
from admit.at.Flow1N_AT import Flow1N_AT
if __name__ == '__main__':
n = 3
touch = True
subdir = False
subdir = True
# pick where admit will do its work, any cmdline argument will be the dirname
if len(sys.argv) > 1:
a = admit.Admit(sys.argv[1])
else: # or else the current directory
a = admit.Admit()
print 'Flow11: new admit?',a.new
a1 = File_AT()
i1 = a.addtask(a1)
a1.setkey('file','Flow1N.dat')
a1.setkey('touch',touch)
a2 = Flow1N_AT()
i2 = a.addtask(a2, [(i1,0)])
a2.setkey('n',n)
a2.setkey('touch',touch)
a2.setkey('subdir',subdir)
#
if True:
# continue with a Flow11 for each BDP created by Flow1N
from admit.at.Flow11_AT import Flow11_AT
a.run() # need to run the flow, otherwise #BDP's unknown
n1 = len(a2) # of course n1 = n, but we don't know this
a3 = range(n1) # a list of AT's
i3 = range(n1) # a list of ATID's
for i in range(n1):
a3[i] = Flow11_AT()
i3[i] = a.addtask(a3[i], [(i2,i)])
a3[i].setkey('touch',touch)
#
a.run()
#
a.write()
| #! /usr/bin/env python
#
#
# similar to test_FM, but in the official ADMIT environment
# these are meant to be able to run without CASA, ie. in a
# vanilla python environment
#
# you might need to run
# rm ../at/__init__.py ../at/__init__.pyc ; touch ../at/__init__.py
# before, and reset this file using
# dtdGenerator
# if CASA sits in the way
# performance (on nemo2): time test_Flow_5.py > /dev/null
# touch=False touch=True
# 100 ...
# 1000 0.582u 0.096s 0:00.68 98.5% 0.794u 2.267s 0:03.19 95.6%
# 10000 4.004u 0.522s 0:04.57 98.9% 5.401u 22.515s 0:28.56 97.7%
#
#
# (10000,True) is the default bench, thus nemo2 goes in 1:21
# inferno goes in 1:53 (yipes, and /dev/shm didn't help)
# subaru 1:52
#
import sys, os
from admit.AT import AT
import admit.Admit as admit
from admit.at.File_AT import File_AT
from admit.at.Flow1N_AT import Flow1N_AT
if __name__ == '__main__':
n = 3
touch = True
subdir = False
subdir = True
# pick where admit will do its work, any cmdline argument will be the dirname
if len(sys.argv) > 1:
a = admit.Admit(sys.argv[1])
else: # or else the current directory
a = admit.Admit()
print 'Flow11: new admit?',a.new
a1 = File_AT()
i1 = a.addtask(a1)
a1.setkey('file','Flow1N.dat')
a1.setkey('touch',touch)
a2 = Flow1N_AT()
i2 = a.addtask(a2, [(i1,0)])
a2.setkey('n',n)
a2.setkey('touch',touch)
a2.setkey('subdir',subdir)
#
if True:
# continue with a Flow11 for each BDP created by Flow1N
from admit.at.Flow11_AT import Flow11_AT
a.run() # need to run the flow, otherwise #BDP's unknown
n1 = len(a2) # of course n1 = n, but we don't know this
a3 = range(n1) # a list of AT's
i3 = range(n1) # a list of ATID's
for i in range(n1):
a3[i] = Flow11_AT()
i3[i] = a.addtask(a3[i], [(i2,i)])
a3[i].setkey('touch',touch)
#
a.run()
#
a.write() | en | 0.747566 | #! /usr/bin/env python # # # similar to test_FM, but in the official ADMIT environment # these are meant to be able to run without CASA, ie. in a # vanilla python environment # # you might need to run # rm ../at/__init__.py ../at/__init__.pyc ; touch ../at/__init__.py # before, and reset this file using # dtdGenerator # if CASA sits in the way # performance (on nemo2): time test_Flow_5.py > /dev/null # touch=False touch=True # 100 ... # 1000 0.582u 0.096s 0:00.68 98.5% 0.794u 2.267s 0:03.19 95.6% # 10000 4.004u 0.522s 0:04.57 98.9% 5.401u 22.515s 0:28.56 97.7% # # # (10000,True) is the default bench, thus nemo2 goes in 1:21 # inferno goes in 1:53 (yipes, and /dev/shm didn't help) # subaru 1:52 # # pick where admit will do its work, any cmdline argument will be the dirname # or else the current directory # # continue with a Flow11 for each BDP created by Flow1N # need to run the flow, otherwise #BDP's unknown # of course n1 = n, but we don't know this # a list of AT's # a list of ATID's # # | 2.237532 | 2 |
dibs/lsp.py | hadro/dibs | 17 | 6612835 | <gh_stars>10-100
'''
lsp.py: DIBS interface to LSPs.
Copyright
---------
Copyright (c) 2021 by the California Institute of Technology. This code
is open-source software released under a 3-clause BSD license. Please see the
file "LICENSE" for more information.
'''
from abc import ABC, abstractmethod
from coif import cover_image
from commonpy.network_utils import net
from dataclasses import dataclass
import os
from os.path import realpath, dirname, join, exists, isabs
import pokapi
from pokapi import Folio
import re
from sidetrack import log
from textwrap import wrap
from topi import Tind
from .settings import config, resolved_path
# Classes implementing interface to specific LSPs.
# .............................................................................
@dataclass
class LSPRecord():
'''Common abstraction for records returned by different LSP's.'''
id : str
url : str
title : str
author : str
publisher : str
edition : str
year : str
isbn_issn : str
class LSPInterface(ABC):
'''Abstract interface class for getting a record from an LSP.
All concrete implementations of this class are assumed to have at least
a URL for the API of the server, and may have additional parameters on
a per-class basis.
'''
def __init__(self, url = None):
'''Create an interface for the server at "url".'''
self.url = url
def __repr__(self):
'''Return a string representing this interface object.'''
return "<{} for {}>".format(self.__class__.__name__, self.url)
@abstractmethod
def record(self, barcode = None):
'''Return a record for the item identified by the "barcode".'''
pass
class TindInterface(LSPInterface):
'''Interface layer for TIND hosted LSP servers.'''
def __init__(self, url = None, thumbnails_dir = None):
'''Create an interface for the server at "url".'''
self.url = url
self._thumbnails_dir = thumbnails_dir
self._tind = Tind(url)
def record(self, barcode = None):
'''Return a record for the item identified by the "barcode".'''
try:
rec = self._tind.item(barcode = barcode).parent
title = rec.title
if rec.subtitle:
title += ': ' + rec.subtitle
log(f'record for {barcode} has id {rec.tind_id} in {self.url}')
thumbnail_file = join(self._thumbnails_dir, barcode + '.jpg')
# Don't overwrite existing images.
if not exists(thumbnail_file):
if rec.thumbnail_url:
save_thumbnail(thumbnail_file, url = rec.thumbnail_url)
elif rec.isbn_issn:
save_thumbnail(thumbnail_file, isbn = rec.isbn_issn)
else:
log(f"{barcode} lacks ISBN & thumbnail URL => no thumbnail")
else:
log(f'thumbnail image already exists in {thumbnail_file}')
return LSPRecord(id = rec.tind_id,
url = rec.tind_url,
title = truncated_title(rec.title),
author = rec.author,
publisher = rec.publisher,
year = rec.year,
edition = rec.edition,
isbn_issn = rec.isbn_issn)
except:
log(f'could not find {barcode} in TIND')
raise ValueError('No such barcode {barcode} in {self.url}')
class FolioInterface(LSPInterface):
'''Interface layer for FOLIO hosted LSP servers.'''
def __init__(self, url = None, token = None, tenant_id = None,
an_prefix = None, page_template = None, thumbnails_dir = None):
'''Create an interface for the server at "url".'''
self.url = url
self._token = token
self._tenant_id = tenant_id
self._an_prefix = an_prefix
self._page_tmpl = page_template
self._thumbnails_dir = thumbnails_dir
self._folio = Folio(okapi_url = url,
okapi_token = token,
tenant_id = tenant_id,
an_prefix = an_prefix)
def record(self, barcode = None):
'''Return a record for the item identified by the "barcode".
This will return None if no such entry can be found in FOLIO.
It will raise a ValueError exception if an entry is found but lacks
the 3 most basic metadata fields of title, author and year.
'''
try:
rec = self._folio.record(barcode = barcode)
except pokapi.exceptions.NotFound:
log(f'could not find {barcode} in FOLIO')
return None
log(f'record for {barcode} has id {rec.id}')
if not all([rec.title, rec.author, rec.year]):
log(f'record for {barcode} in FOLIO lacks minimum metadata')
raise ValueError('Got incomplete record for {barcode} in {self.url}')
thumbnail_file = join(self._thumbnails_dir, barcode + '.jpg')
try:
if not exists(thumbnail_file):
if rec.isbn_issn:
save_thumbnail(thumbnail_file, isbn = rec.isbn_issn)
else:
log(f"{rec.id} has no ISBN/ISSN => can't get a thumbnail")
else:
log(f'thumbnail image already exists in {thumbnail_file}')
except Exception as ex:
# Log it but otherwise we won't fail just because of this.
log(f'failed to save thumbnail for {barcode}: ' + str(ex))
url = self._page_tmpl.format(accession_number = rec.accession_number)
return LSPRecord(id = rec.id,
url = url,
title = truncated_title(rec.title),
author = rec.author,
year = rec.year,
publisher = rec.publisher or '',
edition = rec.edition or '',
isbn_issn = rec.isbn_issn or '')
class UnconfiguredInterface(LSPInterface):
'''Dummy interface, for when no LSP is chosen.'''
def __repr__(self):
'''Return a string representing this interface object.'''
return "<{}>".format(self.__class__.__name__)
def record(self, barcode = None):
'''Return a record for the item identified by the "barcode".'''
return LSPRecord(id = 'LSP not configured',
url = '',
title = 'LSP not configured',
author = 'LSP not configured',
publisher = 'LSP not configured',
year = 'LSP not configured',
edition = 'LSP not configured',
isbn_issn = '')
# Primary exported class.
# .............................................................................
class LSP(LSPInterface):
'''LSP abstraction class.'''
def __new__(cls, *args, **kwds):
# This implements a Singleton pattern by storing the object we create
# and returning the same one if the class constructor is called again.
lsp = cls.__dict__.get("__lsp_interface__")
if lsp is not None:
log(f'Using previously-created LSP object {str(cls)}')
return lsp
# Read common configuration variables.
thumbnails_dir = resolved_path(config('THUMBNAILS_DIR', section = 'dibs'))
log(f'assuming thumbnails dir is {thumbnails_dir}')
# Select the appropriate interface type and create the object.
lsp_type = config('LSP_TYPE').lower()
if lsp_type == 'folio':
url = config('FOLIO_OKAPI_URL', section = 'folio')
token = config('FOLIO_OKAPI_TOKEN', section = 'folio')
tenant_id = config('FOLIO_OKAPI_TENANT_ID', section = 'folio')
an_prefix = config('FOLIO_ACCESSION_PREFIX', section = 'folio')
page_template = config('EDS_PAGE_TEMPLATE', section = 'folio')
log(f'Using FOLIO URL {url} with tenant id {tenant_id}')
lsp = FolioInterface(url = url,
token = token,
tenant_id = tenant_id,
an_prefix = an_prefix,
page_template = page_template,
thumbnails_dir = thumbnails_dir)
elif lsp_type == 'tind':
url = config('TIND_SERVER_URL', section = 'tind')
log(f'Using TIND URL {url}')
lsp = TindInterface(url, thumbnails_dir = thumbnails_dir)
else:
lsp = UnconfiguredInterface()
# Store the interface object (to implement the Singleton pattern).
cls.__lsp_interface__ = lsp
return lsp
# Internal utilities.
# .............................................................................
def save_thumbnail(dest_file, url = None, isbn = None):
image = None
cc_user = config('CC_USER', section = 'contentcafe', default = None)
cc_password = config('<PASSWORD>', section = 'contentcafe', default = None)
cc_login = (cc_user, cc_password) if (cc_user and cc_password) else None
if isbn:
url, image = cover_image(isbn, size = 'L', cc_login = cc_login)
log(f'cover_image returned image at {url}')
# We were either given a url in the call, or we found one using the isbn.
elif url:
(response, error) = net('get', url)
if not error and response.status_code == 200:
log(f'got image from {url}')
image = response.content
if image:
log(f'will save cover image in {dest_file}')
with open(dest_file, 'wb') as file:
file.write(image)
else:
log(f'no cover image found for {url}')
def probable_issn(value):
return len(value) < 10 and '-' in value
def truncated_title(title):
modified_title = re.split(':|;|\.', title)[0].strip()
if len(modified_title) > 60:
return wrap(modified_title, 60)[0] + ' ...'
else:
return modified_title
| '''
lsp.py: DIBS interface to LSPs.
Copyright
---------
Copyright (c) 2021 by the California Institute of Technology. This code
is open-source software released under a 3-clause BSD license. Please see the
file "LICENSE" for more information.
'''
from abc import ABC, abstractmethod
from coif import cover_image
from commonpy.network_utils import net
from dataclasses import dataclass
import os
from os.path import realpath, dirname, join, exists, isabs
import pokapi
from pokapi import Folio
import re
from sidetrack import log
from textwrap import wrap
from topi import Tind
from .settings import config, resolved_path
# Classes implementing interface to specific LSPs.
# .............................................................................
@dataclass
class LSPRecord():
'''Common abstraction for records returned by different LSP's.'''
id : str
url : str
title : str
author : str
publisher : str
edition : str
year : str
isbn_issn : str
class LSPInterface(ABC):
'''Abstract interface class for getting a record from an LSP.
All concrete implementations of this class are assumed to have at least
a URL for the API of the server, and may have additional parameters on
a per-class basis.
'''
def __init__(self, url = None):
'''Create an interface for the server at "url".'''
self.url = url
def __repr__(self):
'''Return a string representing this interface object.'''
return "<{} for {}>".format(self.__class__.__name__, self.url)
@abstractmethod
def record(self, barcode = None):
'''Return a record for the item identified by the "barcode".'''
pass
class TindInterface(LSPInterface):
'''Interface layer for TIND hosted LSP servers.'''
def __init__(self, url = None, thumbnails_dir = None):
'''Create an interface for the server at "url".'''
self.url = url
self._thumbnails_dir = thumbnails_dir
self._tind = Tind(url)
def record(self, barcode = None):
'''Return a record for the item identified by the "barcode".'''
try:
rec = self._tind.item(barcode = barcode).parent
title = rec.title
if rec.subtitle:
title += ': ' + rec.subtitle
log(f'record for {barcode} has id {rec.tind_id} in {self.url}')
thumbnail_file = join(self._thumbnails_dir, barcode + '.jpg')
# Don't overwrite existing images.
if not exists(thumbnail_file):
if rec.thumbnail_url:
save_thumbnail(thumbnail_file, url = rec.thumbnail_url)
elif rec.isbn_issn:
save_thumbnail(thumbnail_file, isbn = rec.isbn_issn)
else:
log(f"{barcode} lacks ISBN & thumbnail URL => no thumbnail")
else:
log(f'thumbnail image already exists in {thumbnail_file}')
return LSPRecord(id = rec.tind_id,
url = rec.tind_url,
title = truncated_title(rec.title),
author = rec.author,
publisher = rec.publisher,
year = rec.year,
edition = rec.edition,
isbn_issn = rec.isbn_issn)
except:
log(f'could not find {barcode} in TIND')
raise ValueError('No such barcode {barcode} in {self.url}')
class FolioInterface(LSPInterface):
'''Interface layer for FOLIO hosted LSP servers.'''
def __init__(self, url = None, token = None, tenant_id = None,
an_prefix = None, page_template = None, thumbnails_dir = None):
'''Create an interface for the server at "url".'''
self.url = url
self._token = token
self._tenant_id = tenant_id
self._an_prefix = an_prefix
self._page_tmpl = page_template
self._thumbnails_dir = thumbnails_dir
self._folio = Folio(okapi_url = url,
okapi_token = token,
tenant_id = tenant_id,
an_prefix = an_prefix)
def record(self, barcode = None):
'''Return a record for the item identified by the "barcode".
This will return None if no such entry can be found in FOLIO.
It will raise a ValueError exception if an entry is found but lacks
the 3 most basic metadata fields of title, author and year.
'''
try:
rec = self._folio.record(barcode = barcode)
except pokapi.exceptions.NotFound:
log(f'could not find {barcode} in FOLIO')
return None
log(f'record for {barcode} has id {rec.id}')
if not all([rec.title, rec.author, rec.year]):
log(f'record for {barcode} in FOLIO lacks minimum metadata')
raise ValueError('Got incomplete record for {barcode} in {self.url}')
thumbnail_file = join(self._thumbnails_dir, barcode + '.jpg')
try:
if not exists(thumbnail_file):
if rec.isbn_issn:
save_thumbnail(thumbnail_file, isbn = rec.isbn_issn)
else:
log(f"{rec.id} has no ISBN/ISSN => can't get a thumbnail")
else:
log(f'thumbnail image already exists in {thumbnail_file}')
except Exception as ex:
# Log it but otherwise we won't fail just because of this.
log(f'failed to save thumbnail for {barcode}: ' + str(ex))
url = self._page_tmpl.format(accession_number = rec.accession_number)
return LSPRecord(id = rec.id,
url = url,
title = truncated_title(rec.title),
author = rec.author,
year = rec.year,
publisher = rec.publisher or '',
edition = rec.edition or '',
isbn_issn = rec.isbn_issn or '')
class UnconfiguredInterface(LSPInterface):
'''Dummy interface, for when no LSP is chosen.'''
def __repr__(self):
'''Return a string representing this interface object.'''
return "<{}>".format(self.__class__.__name__)
def record(self, barcode = None):
'''Return a record for the item identified by the "barcode".'''
return LSPRecord(id = 'LSP not configured',
url = '',
title = 'LSP not configured',
author = 'LSP not configured',
publisher = 'LSP not configured',
year = 'LSP not configured',
edition = 'LSP not configured',
isbn_issn = '')
# Primary exported class.
# .............................................................................
class LSP(LSPInterface):
'''LSP abstraction class.'''
def __new__(cls, *args, **kwds):
# This implements a Singleton pattern by storing the object we create
# and returning the same one if the class constructor is called again.
lsp = cls.__dict__.get("__lsp_interface__")
if lsp is not None:
log(f'Using previously-created LSP object {str(cls)}')
return lsp
# Read common configuration variables.
thumbnails_dir = resolved_path(config('THUMBNAILS_DIR', section = 'dibs'))
log(f'assuming thumbnails dir is {thumbnails_dir}')
# Select the appropriate interface type and create the object.
lsp_type = config('LSP_TYPE').lower()
if lsp_type == 'folio':
url = config('FOLIO_OKAPI_URL', section = 'folio')
token = config('FOLIO_OKAPI_TOKEN', section = 'folio')
tenant_id = config('FOLIO_OKAPI_TENANT_ID', section = 'folio')
an_prefix = config('FOLIO_ACCESSION_PREFIX', section = 'folio')
page_template = config('EDS_PAGE_TEMPLATE', section = 'folio')
log(f'Using FOLIO URL {url} with tenant id {tenant_id}')
lsp = FolioInterface(url = url,
token = token,
tenant_id = tenant_id,
an_prefix = an_prefix,
page_template = page_template,
thumbnails_dir = thumbnails_dir)
elif lsp_type == 'tind':
url = config('TIND_SERVER_URL', section = 'tind')
log(f'Using TIND URL {url}')
lsp = TindInterface(url, thumbnails_dir = thumbnails_dir)
else:
lsp = UnconfiguredInterface()
# Store the interface object (to implement the Singleton pattern).
cls.__lsp_interface__ = lsp
return lsp
# Internal utilities.
# .............................................................................
def save_thumbnail(dest_file, url = None, isbn = None):
image = None
cc_user = config('CC_USER', section = 'contentcafe', default = None)
cc_password = config('<PASSWORD>', section = 'contentcafe', default = None)
cc_login = (cc_user, cc_password) if (cc_user and cc_password) else None
if isbn:
url, image = cover_image(isbn, size = 'L', cc_login = cc_login)
log(f'cover_image returned image at {url}')
# We were either given a url in the call, or we found one using the isbn.
elif url:
(response, error) = net('get', url)
if not error and response.status_code == 200:
log(f'got image from {url}')
image = response.content
if image:
log(f'will save cover image in {dest_file}')
with open(dest_file, 'wb') as file:
file.write(image)
else:
log(f'no cover image found for {url}')
def probable_issn(value):
return len(value) < 10 and '-' in value
def truncated_title(title):
modified_title = re.split(':|;|\.', title)[0].strip()
if len(modified_title) > 60:
return wrap(modified_title, 60)[0] + ' ...'
else:
return modified_title | en | 0.79067 | lsp.py: DIBS interface to LSPs. Copyright --------- Copyright (c) 2021 by the California Institute of Technology. This code is open-source software released under a 3-clause BSD license. Please see the file "LICENSE" for more information. # Classes implementing interface to specific LSPs. # ............................................................................. Common abstraction for records returned by different LSP's. Abstract interface class for getting a record from an LSP. All concrete implementations of this class are assumed to have at least a URL for the API of the server, and may have additional parameters on a per-class basis. Create an interface for the server at "url". Return a string representing this interface object. Return a record for the item identified by the "barcode". Interface layer for TIND hosted LSP servers. Create an interface for the server at "url". Return a record for the item identified by the "barcode". # Don't overwrite existing images. Interface layer for FOLIO hosted LSP servers. Create an interface for the server at "url". Return a record for the item identified by the "barcode". This will return None if no such entry can be found in FOLIO. It will raise a ValueError exception if an entry is found but lacks the 3 most basic metadata fields of title, author and year. # Log it but otherwise we won't fail just because of this. Dummy interface, for when no LSP is chosen. Return a string representing this interface object. Return a record for the item identified by the "barcode". # Primary exported class. # ............................................................................. LSP abstraction class. # This implements a Singleton pattern by storing the object we create # and returning the same one if the class constructor is called again. # Read common configuration variables. # Select the appropriate interface type and create the object. # Store the interface object (to implement the Singleton pattern). # Internal utilities. # ............................................................................. # We were either given a url in the call, or we found one using the isbn. | 2.381185 | 2 |
snippet_ranger/tests/models.py | isabella232/snippet-ranger | 11 | 6612836 | import os
LIB_NAME = "test_lib"
DATA_DIR = os.path.join(os.path.dirname(__file__), "data/")
TEST_LIB = os.path.join(DATA_DIR, "test_lib.asdf")
TEST_REPO = os.path.join(DATA_DIR, "source_test_repo.asdf")
SNIPPET = os.path.join(DATA_DIR, "snippet_test_repo.asdf")
| import os
LIB_NAME = "test_lib"
DATA_DIR = os.path.join(os.path.dirname(__file__), "data/")
TEST_LIB = os.path.join(DATA_DIR, "test_lib.asdf")
TEST_REPO = os.path.join(DATA_DIR, "source_test_repo.asdf")
SNIPPET = os.path.join(DATA_DIR, "snippet_test_repo.asdf")
| none | 1 | 1.785636 | 2 | |
ReglesDuJeu.py | TornierCPE/ProjetPythonFirstYear | 0 | 6612837 | """
ProjetPythonFirstYear - Petit jeu de labyrinthe python/turtle
Auteur: <NAME>.
Date: 18/05/2021
Rôle : ReglesDuJeu est le sous programme qui s'occupe de tout ce qui concerne les règles du jeu, les évenements comme les objets et les
portes, ainsi que la victoire.
Entrée: Import du sous programme s'occupant du dessin du labyrinthe
Variables utilisées (déclarées en fin de fichier) : MatricePlan, Pas, TotalObjet , DictObjets, DictQuestions, coordInventaire
"""
#=================================Import des fichiers et bibliothèques=================================================#
from CONFIGS import * # Toutes les variables imposées par l'énoncé
from turtle import * # Import de la bibliothèque Turtle
from enum import Enum # Import de la bibliothèque Enum
from DessinChateau import *
#======================================================================================================================#
#=========================================Déclaration des fonctions====================================================#
def creer_dictionnaire(fichier_des_objets):
"""
Fonction servant à lire des fichiers et récupérer les données qu'ils contiennent
fichier_des_objets: type = String / Fichier que l'on souhaite lire afin d'en récupérer les données
Returns: type = Dictionnaire / Dictionnaire (objet ou questions) en fonction de l'appel de la fonction en fin de page,
format = case:objet/question (tuple:string)
"""
dico = {} # Création du dictionnaire
with open(fichier_des_objets, mode="r", encoding="utf-8") as file:
donnes = file.readlines()
for i in donnes:
case, objet = eval(i)
dico[case] = objet
return dico
def casePossible(case):
"""
Fonction permettant en premier lieu de savoir si une case est atteignable ou non
Dans un second temps en fonction du type de la case, on execute une action particulière
case: type = Tuple / Case concernée
Return: type = Boolean / Indique si le joueur peut se déplacer sur la case
"""
try:
typeOfCase = MatricePlan[case[0]][case[1]] # Integer which will have the number of the type of cell
except: # Gère le cas improbable ou le joueur essayerait de sortir du plan
return False
if (typeOfCase == Case.LAMBDA.value): # 0 = Case lambda
return True
elif (typeOfCase == Case.VICTOIRE.value): # 2 = Case correspondant à la sortie du labyrinthe
messageVictoire()
return True
elif (typeOfCase == Case.PORTE.value): # 3 = Case correspondant à une porte
if (poser_question(case)):
return True
else:
return False
elif (typeOfCase == Case.OBJET.value): # 4 = Case contenant un objet
ramasser_objet(case)
return True
else: # Autre cas 1 = Mur infranchissable
return False
def ramasser_objet(case):
"""
Fonction servant à gérer les cases contenant un objet, affichant l'objet ramasser et en l'affichant dans l'inventaire
case: type = Tuple / Case concernée
Il n'y a pas de "return" mais la fonction a bien permit de ramasser l'objet et a remplacer la couleur objet par une couleur de case normale
permettant de savoir qu'il n'y a plus d'objet à ramasser à cet endroit
"""
global TotalObjet # Variable stockant l'information du nombre d'objets trouvé dans le labyrinthe, global afin de pouvoir la modifier
# à d'autre endroits du code
inventaire.up()
inventaire.goto(coordInventaire[0], coordInventaire[1] - 15 * TotalObjet) # -15 simulant un saut de ligne
inventaire.down()
inventaire.write("N°" + str(TotalObjet + 1) + ": " + DicoObjets[case], font=("Arial", 10, "normal"))
evenement.clear()
evenement.write("Bien vu l'aveugle, vous avez trouvé: " + DicoObjets[case], font=("Arial", 12, "bold")) # Affiche l'annonce de découverte de l'objet
TotalObjet += 1
tracer_case(case, COULEUR_CASES, Pas)
MatricePlan[case[0]][case[1]] = 0 # Passe la case de case objet à case lambda
def poser_question(case):
"""
Cette fonction sert à poser la question correspondante à la case au joueur
case: type = Tuple / Case concernée
Returns: type = Boolean / Renvoie si la réponse donnée par le joueur est la bonne ou non
"""
evenement.clear()
evenement.write("Cette porte est fermée, il vous faut répondre à la question.", font=("Arial", 12, "bold"))
reponse = textinput("Porte", DicoQuestions[case][0]) # Récupère la réponse du joueur
listen() # Attends la réponse du joueur
if (reponse == DicoQuestions[case][1]): # Si la réponse est bonne
tracer_case(case, COULEUR_CASES, Pas)
MatricePlan[case[0]][case[1]] = 0 # Il n'y a plus de porte la case devient donc lambda
evenement.clear()
evenement.write("B<NAME> sherlock, *la porte s'ouvre*", font=("Arial", 12, "bold"))
return True
else: # Si la réponse est mauvaise
evenement.clear()
evenement.write("Aïe Aïe Aïe, heuresement vous pouvez réessayer, *une voix ricane au loin*", font=("Arial", 12, "bold"))
return False
def messageVictoire():
"""
Fonction servant à afficher le message de victoire du jeu au joueur, pas de return.
"""
evenement.clear()
evenement.write("Eh bah quand même ! En plus y'a même pas de minautore, bien joué !", font=("Arial", 12, "bold"))
def victoire(case):
"""
Fonction detectant si on se trouve sur la case de victoire
case: type = Tuple / Case correspondante
Returns: type = Boolean / Permet de savoir si le joueur a gagné ou non.
"""
if (MatricePlan[case[0]][case[1]] == Case.VICTOIRE.value):
return True
else:
return False
class Case(Enum):
"""
Enumeration des cases, permet de limitée les erreurs lors de l'utilisation des types de cases. On ne compare
pas des chaines de caractères où les erreurs d'orthographes sont vites arrivées et plante le programme
on compare des nombres type = Int
"""
LAMBDA = 0
MUR = 1
VICTOIRE = 2
PORTE = 3
OBJET = 4
#======================================================================================================================#
#=======================================Déclaration des variables======================================================#
MatricePlan = lireFichier(fichier_plan)
Pas = calculer_pas(MatricePlan)
TotalObjet = 0
DicoObjets = creer_dictionnaire(fichier_objets) # Dictionnaire contenant les objets, format = case/objet (Tuple/String)
DicoQuestions = creer_dictionnaire(fichier_questions) # Dictionnaire contenant les questions, format = case/réponse (Tuple/String)
coordInventaire = (POINT_AFFICHAGE_INVENTAIRE[0], POINT_AFFICHAGE_INVENTAIRE[1] - 15) # Coordonnées turtle de la première ligne de l'inventaire
#======================================================================================================================#
#====================================Définition des robots turtle======================================================#
inventaire = Turtle() # Définition du turtle qui va s'occuper de l'inventaire
inventaire.ht()
inventaire.up()
inventaire.goto(POINT_AFFICHAGE_INVENTAIRE[0], POINT_AFFICHAGE_INVENTAIRE[1])
inventaire.down()
inventaire.write("Inventaire", font=("Arial", 10, "bold"))
evenement = Turtle() # Définition du turtle s'occupant des annonce au joueur
evenement.ht()
evenement.up()
evenement.goto(POINT_AFFICHAGE_ANNONCES[0], POINT_AFFICHAGE_ANNONCES[1])
evenement.down()
#======================================================================================================================#
#==================================Lancement des fonctions utiles======================================================#
afficher_plan(MatricePlan)
#======================================================================================================================# | """
ProjetPythonFirstYear - Petit jeu de labyrinthe python/turtle
Auteur: <NAME>.
Date: 18/05/2021
Rôle : ReglesDuJeu est le sous programme qui s'occupe de tout ce qui concerne les règles du jeu, les évenements comme les objets et les
portes, ainsi que la victoire.
Entrée: Import du sous programme s'occupant du dessin du labyrinthe
Variables utilisées (déclarées en fin de fichier) : MatricePlan, Pas, TotalObjet , DictObjets, DictQuestions, coordInventaire
"""
#=================================Import des fichiers et bibliothèques=================================================#
from CONFIGS import * # Toutes les variables imposées par l'énoncé
from turtle import * # Import de la bibliothèque Turtle
from enum import Enum # Import de la bibliothèque Enum
from DessinChateau import *
#======================================================================================================================#
#=========================================Déclaration des fonctions====================================================#
def creer_dictionnaire(fichier_des_objets):
"""
Fonction servant à lire des fichiers et récupérer les données qu'ils contiennent
fichier_des_objets: type = String / Fichier que l'on souhaite lire afin d'en récupérer les données
Returns: type = Dictionnaire / Dictionnaire (objet ou questions) en fonction de l'appel de la fonction en fin de page,
format = case:objet/question (tuple:string)
"""
dico = {} # Création du dictionnaire
with open(fichier_des_objets, mode="r", encoding="utf-8") as file:
donnes = file.readlines()
for i in donnes:
case, objet = eval(i)
dico[case] = objet
return dico
def casePossible(case):
"""
Fonction permettant en premier lieu de savoir si une case est atteignable ou non
Dans un second temps en fonction du type de la case, on execute une action particulière
case: type = Tuple / Case concernée
Return: type = Boolean / Indique si le joueur peut se déplacer sur la case
"""
try:
typeOfCase = MatricePlan[case[0]][case[1]] # Integer which will have the number of the type of cell
except: # Gère le cas improbable ou le joueur essayerait de sortir du plan
return False
if (typeOfCase == Case.LAMBDA.value): # 0 = Case lambda
return True
elif (typeOfCase == Case.VICTOIRE.value): # 2 = Case correspondant à la sortie du labyrinthe
messageVictoire()
return True
elif (typeOfCase == Case.PORTE.value): # 3 = Case correspondant à une porte
if (poser_question(case)):
return True
else:
return False
elif (typeOfCase == Case.OBJET.value): # 4 = Case contenant un objet
ramasser_objet(case)
return True
else: # Autre cas 1 = Mur infranchissable
return False
def ramasser_objet(case):
"""
Fonction servant à gérer les cases contenant un objet, affichant l'objet ramasser et en l'affichant dans l'inventaire
case: type = Tuple / Case concernée
Il n'y a pas de "return" mais la fonction a bien permit de ramasser l'objet et a remplacer la couleur objet par une couleur de case normale
permettant de savoir qu'il n'y a plus d'objet à ramasser à cet endroit
"""
global TotalObjet # Variable stockant l'information du nombre d'objets trouvé dans le labyrinthe, global afin de pouvoir la modifier
# à d'autre endroits du code
inventaire.up()
inventaire.goto(coordInventaire[0], coordInventaire[1] - 15 * TotalObjet) # -15 simulant un saut de ligne
inventaire.down()
inventaire.write("N°" + str(TotalObjet + 1) + ": " + DicoObjets[case], font=("Arial", 10, "normal"))
evenement.clear()
evenement.write("Bien vu l'aveugle, vous avez trouvé: " + DicoObjets[case], font=("Arial", 12, "bold")) # Affiche l'annonce de découverte de l'objet
TotalObjet += 1
tracer_case(case, COULEUR_CASES, Pas)
MatricePlan[case[0]][case[1]] = 0 # Passe la case de case objet à case lambda
def poser_question(case):
"""
Cette fonction sert à poser la question correspondante à la case au joueur
case: type = Tuple / Case concernée
Returns: type = Boolean / Renvoie si la réponse donnée par le joueur est la bonne ou non
"""
evenement.clear()
evenement.write("Cette porte est fermée, il vous faut répondre à la question.", font=("Arial", 12, "bold"))
reponse = textinput("Porte", DicoQuestions[case][0]) # Récupère la réponse du joueur
listen() # Attends la réponse du joueur
if (reponse == DicoQuestions[case][1]): # Si la réponse est bonne
tracer_case(case, COULEUR_CASES, Pas)
MatricePlan[case[0]][case[1]] = 0 # Il n'y a plus de porte la case devient donc lambda
evenement.clear()
evenement.write("B<NAME> sherlock, *la porte s'ouvre*", font=("Arial", 12, "bold"))
return True
else: # Si la réponse est mauvaise
evenement.clear()
evenement.write("Aïe Aïe Aïe, heuresement vous pouvez réessayer, *une voix ricane au loin*", font=("Arial", 12, "bold"))
return False
def messageVictoire():
"""
Fonction servant à afficher le message de victoire du jeu au joueur, pas de return.
"""
evenement.clear()
evenement.write("Eh bah quand même ! En plus y'a même pas de minautore, bien joué !", font=("Arial", 12, "bold"))
def victoire(case):
"""
Fonction detectant si on se trouve sur la case de victoire
case: type = Tuple / Case correspondante
Returns: type = Boolean / Permet de savoir si le joueur a gagné ou non.
"""
if (MatricePlan[case[0]][case[1]] == Case.VICTOIRE.value):
return True
else:
return False
class Case(Enum):
"""
Enumeration des cases, permet de limitée les erreurs lors de l'utilisation des types de cases. On ne compare
pas des chaines de caractères où les erreurs d'orthographes sont vites arrivées et plante le programme
on compare des nombres type = Int
"""
LAMBDA = 0
MUR = 1
VICTOIRE = 2
PORTE = 3
OBJET = 4
#======================================================================================================================#
#=======================================Déclaration des variables======================================================#
MatricePlan = lireFichier(fichier_plan)
Pas = calculer_pas(MatricePlan)
TotalObjet = 0
DicoObjets = creer_dictionnaire(fichier_objets) # Dictionnaire contenant les objets, format = case/objet (Tuple/String)
DicoQuestions = creer_dictionnaire(fichier_questions) # Dictionnaire contenant les questions, format = case/réponse (Tuple/String)
coordInventaire = (POINT_AFFICHAGE_INVENTAIRE[0], POINT_AFFICHAGE_INVENTAIRE[1] - 15) # Coordonnées turtle de la première ligne de l'inventaire
#======================================================================================================================#
#====================================Définition des robots turtle======================================================#
inventaire = Turtle() # Définition du turtle qui va s'occuper de l'inventaire
inventaire.ht()
inventaire.up()
inventaire.goto(POINT_AFFICHAGE_INVENTAIRE[0], POINT_AFFICHAGE_INVENTAIRE[1])
inventaire.down()
inventaire.write("Inventaire", font=("Arial", 10, "bold"))
evenement = Turtle() # Définition du turtle s'occupant des annonce au joueur
evenement.ht()
evenement.up()
evenement.goto(POINT_AFFICHAGE_ANNONCES[0], POINT_AFFICHAGE_ANNONCES[1])
evenement.down()
#======================================================================================================================#
#==================================Lancement des fonctions utiles======================================================#
afficher_plan(MatricePlan)
#======================================================================================================================# | fr | 0.946626 | ProjetPythonFirstYear - Petit jeu de labyrinthe python/turtle
Auteur: <NAME>.
Date: 18/05/2021
Rôle : ReglesDuJeu est le sous programme qui s'occupe de tout ce qui concerne les règles du jeu, les évenements comme les objets et les
portes, ainsi que la victoire.
Entrée: Import du sous programme s'occupant du dessin du labyrinthe
Variables utilisées (déclarées en fin de fichier) : MatricePlan, Pas, TotalObjet , DictObjets, DictQuestions, coordInventaire #=================================Import des fichiers et bibliothèques=================================================# # Toutes les variables imposées par l'énoncé # Import de la bibliothèque Turtle # Import de la bibliothèque Enum #======================================================================================================================# #=========================================Déclaration des fonctions====================================================# Fonction servant à lire des fichiers et récupérer les données qu'ils contiennent
fichier_des_objets: type = String / Fichier que l'on souhaite lire afin d'en récupérer les données
Returns: type = Dictionnaire / Dictionnaire (objet ou questions) en fonction de l'appel de la fonction en fin de page,
format = case:objet/question (tuple:string) # Création du dictionnaire Fonction permettant en premier lieu de savoir si une case est atteignable ou non
Dans un second temps en fonction du type de la case, on execute une action particulière
case: type = Tuple / Case concernée
Return: type = Boolean / Indique si le joueur peut se déplacer sur la case # Integer which will have the number of the type of cell # Gère le cas improbable ou le joueur essayerait de sortir du plan # 0 = Case lambda # 2 = Case correspondant à la sortie du labyrinthe # 3 = Case correspondant à une porte # 4 = Case contenant un objet # Autre cas 1 = Mur infranchissable Fonction servant à gérer les cases contenant un objet, affichant l'objet ramasser et en l'affichant dans l'inventaire
case: type = Tuple / Case concernée
Il n'y a pas de "return" mais la fonction a bien permit de ramasser l'objet et a remplacer la couleur objet par une couleur de case normale
permettant de savoir qu'il n'y a plus d'objet à ramasser à cet endroit # Variable stockant l'information du nombre d'objets trouvé dans le labyrinthe, global afin de pouvoir la modifier # à d'autre endroits du code # -15 simulant un saut de ligne # Affiche l'annonce de découverte de l'objet # Passe la case de case objet à case lambda Cette fonction sert à poser la question correspondante à la case au joueur
case: type = Tuple / Case concernée
Returns: type = Boolean / Renvoie si la réponse donnée par le joueur est la bonne ou non # Récupère la réponse du joueur # Attends la réponse du joueur # Si la réponse est bonne # Il n'y a plus de porte la case devient donc lambda # Si la réponse est mauvaise Fonction servant à afficher le message de victoire du jeu au joueur, pas de return. Fonction detectant si on se trouve sur la case de victoire
case: type = Tuple / Case correspondante
Returns: type = Boolean / Permet de savoir si le joueur a gagné ou non. Enumeration des cases, permet de limitée les erreurs lors de l'utilisation des types de cases. On ne compare
pas des chaines de caractères où les erreurs d'orthographes sont vites arrivées et plante le programme
on compare des nombres type = Int #======================================================================================================================# #=======================================Déclaration des variables======================================================# # Dictionnaire contenant les objets, format = case/objet (Tuple/String) # Dictionnaire contenant les questions, format = case/réponse (Tuple/String) # Coordonnées turtle de la première ligne de l'inventaire #======================================================================================================================# #====================================Définition des robots turtle======================================================# # Définition du turtle qui va s'occuper de l'inventaire # Définition du turtle s'occupant des annonce au joueur #======================================================================================================================# #==================================Lancement des fonctions utiles======================================================# #======================================================================================================================# | 3.27092 | 3 |
examples/conversation.py | 4masaka/AsyncLine | 1 | 6612838 | from AsyncLine import *
import asyncio
cl = Client('ios')
cl.login(name="mybot", qr=True)
data = {
"name": None,
"old": None,
}
@cl.hooks(type=26, filters=Filters.command("start") & Filters.private)
async def start_conversation(client, msg):
await client.talk.sendMessage(msg.from_, "Hello stranger, what your name?")
"""
This method will be trigger conversation.
Note: type must be 26 (Receive Message) and use this in private chat
using Filters.private
<func>:
cl.poll.conversation(....
args func:
msg = (Message, require), Message from this comversation
callback = (callable, require), function for next conversation
done = (bool, optional), pass True if want this conversation ended
"""
client.poll.conversation(msg, callback_name)
async def callback_name(msg):
data["name"] = msg.text
await asyncio.sleep(1.3)
await cl.talk.sendMessage(msg.from_, "Okay, now how old are you?")
#done == True, after user send old this conversation will be ended
cl.poll.conversation(msg, callback_old, done=True)
async def callback_old(msg):
data["old"] = msg.text
await cl.talk.sendMessage(msg.from_,
"Nice too meet you, {} now i know your name and old {}".format(
data["name"], data["old"]))
cl.poll.streams() | from AsyncLine import *
import asyncio
cl = Client('ios')
cl.login(name="mybot", qr=True)
data = {
"name": None,
"old": None,
}
@cl.hooks(type=26, filters=Filters.command("start") & Filters.private)
async def start_conversation(client, msg):
await client.talk.sendMessage(msg.from_, "Hello stranger, what your name?")
"""
This method will be trigger conversation.
Note: type must be 26 (Receive Message) and use this in private chat
using Filters.private
<func>:
cl.poll.conversation(....
args func:
msg = (Message, require), Message from this comversation
callback = (callable, require), function for next conversation
done = (bool, optional), pass True if want this conversation ended
"""
client.poll.conversation(msg, callback_name)
async def callback_name(msg):
data["name"] = msg.text
await asyncio.sleep(1.3)
await cl.talk.sendMessage(msg.from_, "Okay, now how old are you?")
#done == True, after user send old this conversation will be ended
cl.poll.conversation(msg, callback_old, done=True)
async def callback_old(msg):
data["old"] = msg.text
await cl.talk.sendMessage(msg.from_,
"Nice too meet you, {} now i know your name and old {}".format(
data["name"], data["old"]))
cl.poll.streams() | en | 0.752511 | This method will be trigger conversation. Note: type must be 26 (Receive Message) and use this in private chat using Filters.private <func>: cl.poll.conversation(.... args func: msg = (Message, require), Message from this comversation callback = (callable, require), function for next conversation done = (bool, optional), pass True if want this conversation ended #done == True, after user send old this conversation will be ended | 3.167963 | 3 |
nn/network/base.py | seuqaj114/paig | 33 | 6612839 | import os
import sys
import shutil
import logging
import numpy as np
import tensorflow as tf
from nn.utils.misc import log_metrics, zipdir
logger = logging.getLogger("tf")
root_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..")
OPTIMIZERS = {
"adam": tf.train.AdamOptimizer,
"rmsprop": tf.train.RMSPropOptimizer,
"momentum": lambda x: tf.train.MomentumOptimizer(x, 0.9),
"sgd": tf.train.GradientDescentOptimizer
}
class BaseNet:
def __init__(self):
self.train_metrics = {}
self.eval_metrics = {}
# Extra functions to be ran at train/valid/test time
# that can be defined by the children
# Should have the format:
# self.extra_valid_fns = [
# (valid_fn1, args, kwargs),
# ...
# ]
self.extra_train_fns = []
self.extra_valid_fns = []
self.extra_test_fns = []
self.sess = tf.Session()
def run_extra_fns(self, type):
if type == "train":
extra_fns = self.extra_train_fns
elif type == "valid":
extra_fns = self.extra_valid_fns
else:
extra_fns = self.extra_test_fns
for fn, args, kwargs in extra_fns:
fn(*args, **kwargs)
def feedforward(self):
raise NotImplementedError
def compute_loss(self):
raise NotImplementedError
def build_graph(self):
raise NotImplementedError
def get_data(self, data_iterators):
self.train_iterator, self.valid_iterator, self.test_iterator = data_iterators
def get_iterator(self, type):
if type == "train":
eval_iterator = self.train_iterator
elif type == "valid":
eval_iterator = self.valid_iterator
elif type == "test":
eval_iterator = self.test_iterator
return eval_iterator
def initialize_graph(self,
save_dir,
use_ckpt,
ckpt_dir=""):
self.save_dir = save_dir
self.saver = tf.train.Saver()
if os.path.exists(save_dir):
if use_ckpt:
restore = True
if ckpt_dir:
restore_dir = ckpt_dir
else:
restore_dir = save_dir
else:
logger.info("Folder exists, deleting...")
shutil.rmtree(save_dir)
os.makedirs(save_dir)
restore = False
else:
os.makedirs(save_dir)
if use_ckpt:
restore = True
restore_dir = ckpt_dir
else:
restore = False
if restore:
self.saver.restore(self.sess, os.path.join(restore_dir, "model.ckpt"))
self.sess.run(self.lr.assign(self.base_lr))
else:
self.sess.run(tf.global_variables_initializer())
def build_optimizer(self, base_lr, optimizer="adam", anneal_lr=True):
self.base_lr = base_lr
self.anneal_lr = anneal_lr
self.lr = tf.Variable(base_lr, trainable=False, name="base_lr")
self.optimizer = OPTIMIZERS[optimizer](self.lr)
self.train_op = self.optimizer.minimize(self.loss)
def get_batch(self, batch_size, iterator):
batch_x, batch_y = iterator.next_batch(batch_size)
if batch_y is None:
feed_dict = {self.input:batch_x}
else:
feed_dict = {self.input:batch_x, self.target:batch_y}
return feed_dict, (batch_x, batch_y)
def add_train_logger(self):
log_path = os.path.join(self.save_dir, "log.txt")
fh = logging.FileHandler(log_path)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)
def train(self,
epochs,
batch_size,
save_every_n_epochs,
eval_every_n_epochs,
print_interval,
debug=False):
self.add_train_logger()
zipdir(root_path, self.save_dir)
logger.info("\n".join(sys.argv))
step = 0
# Run validation once before starting training
if not debug and epochs > 0:
valid_metrics_results = self.eval(batch_size, type='valid')
log_metrics(logger, "valid - epoch=%s"%0, valid_metrics_results)
for ep in range(1, epochs+1):
if self.anneal_lr:
if ep == int(0.75*epochs):
self.sess.run(tf.assign(self.lr, self.lr/5))
while self.train_iterator.epochs_completed < ep:
feed_dict, _ = self.get_batch(batch_size, self.train_iterator)
results, _ = self.sess.run(
[self.train_metrics, self.train_op], feed_dict=feed_dict)
self.run_extra_fns("train")
if step % print_interval == 0:
log_metrics(logger, "train - iter=%s"%step, results)
step += 1
if ep % eval_every_n_epochs == 0:
valid_metrics_results = self.eval(batch_size, type='valid')
log_metrics(logger, "valid - epoch=%s"%ep, valid_metrics_results)
if ep % save_every_n_epochs == 0:
self.saver.save(self.sess, os.path.join(self.save_dir, "model.ckpt"))
test_metrics_results = self.eval(batch_size, type='test')
log_metrics(logger, "test - epoch=%s"%epochs, test_metrics_results)
def eval(self,
batch_size,
type='valid'):
eval_metrics_results = {k:[] for k in self.eval_metrics.keys()}
eval_outputs = {"input":[], "output":[]}
eval_iterator = self.get_iterator(type)
eval_iterator.reset_epoch()
while eval_iterator.get_epoch() < 1:
if eval_iterator.X.shape[0] < 100:
batch_size = eval_iterator.X.shape[0]
feed_dict, _ = self.get_batch(batch_size, eval_iterator)
fetches = {k:v for k, v in self.eval_metrics.items()}
fetches["output"] = self.output
fetches["input"] = self.input
results = self.sess.run(fetches, feed_dict=feed_dict)
for k in self.eval_metrics.keys():
eval_metrics_results[k].append(results[k])
eval_outputs["input"].append(results["input"])
eval_outputs["output"].append(results["output"])
eval_metrics_results = {k:np.mean(v, axis=0) for k,v in eval_metrics_results.items()}
np.savez_compressed(os.path.join(self.save_dir, "outputs.npz"),
input=np.concatenate(eval_outputs["input"], axis=0),
output=np.concatenate(eval_outputs["output"], axis=0))
self.run_extra_fns(type)
return eval_metrics_results
| import os
import sys
import shutil
import logging
import numpy as np
import tensorflow as tf
from nn.utils.misc import log_metrics, zipdir
logger = logging.getLogger("tf")
root_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..")
OPTIMIZERS = {
"adam": tf.train.AdamOptimizer,
"rmsprop": tf.train.RMSPropOptimizer,
"momentum": lambda x: tf.train.MomentumOptimizer(x, 0.9),
"sgd": tf.train.GradientDescentOptimizer
}
class BaseNet:
def __init__(self):
self.train_metrics = {}
self.eval_metrics = {}
# Extra functions to be ran at train/valid/test time
# that can be defined by the children
# Should have the format:
# self.extra_valid_fns = [
# (valid_fn1, args, kwargs),
# ...
# ]
self.extra_train_fns = []
self.extra_valid_fns = []
self.extra_test_fns = []
self.sess = tf.Session()
def run_extra_fns(self, type):
if type == "train":
extra_fns = self.extra_train_fns
elif type == "valid":
extra_fns = self.extra_valid_fns
else:
extra_fns = self.extra_test_fns
for fn, args, kwargs in extra_fns:
fn(*args, **kwargs)
def feedforward(self):
raise NotImplementedError
def compute_loss(self):
raise NotImplementedError
def build_graph(self):
raise NotImplementedError
def get_data(self, data_iterators):
self.train_iterator, self.valid_iterator, self.test_iterator = data_iterators
def get_iterator(self, type):
if type == "train":
eval_iterator = self.train_iterator
elif type == "valid":
eval_iterator = self.valid_iterator
elif type == "test":
eval_iterator = self.test_iterator
return eval_iterator
def initialize_graph(self,
save_dir,
use_ckpt,
ckpt_dir=""):
self.save_dir = save_dir
self.saver = tf.train.Saver()
if os.path.exists(save_dir):
if use_ckpt:
restore = True
if ckpt_dir:
restore_dir = ckpt_dir
else:
restore_dir = save_dir
else:
logger.info("Folder exists, deleting...")
shutil.rmtree(save_dir)
os.makedirs(save_dir)
restore = False
else:
os.makedirs(save_dir)
if use_ckpt:
restore = True
restore_dir = ckpt_dir
else:
restore = False
if restore:
self.saver.restore(self.sess, os.path.join(restore_dir, "model.ckpt"))
self.sess.run(self.lr.assign(self.base_lr))
else:
self.sess.run(tf.global_variables_initializer())
def build_optimizer(self, base_lr, optimizer="adam", anneal_lr=True):
self.base_lr = base_lr
self.anneal_lr = anneal_lr
self.lr = tf.Variable(base_lr, trainable=False, name="base_lr")
self.optimizer = OPTIMIZERS[optimizer](self.lr)
self.train_op = self.optimizer.minimize(self.loss)
def get_batch(self, batch_size, iterator):
batch_x, batch_y = iterator.next_batch(batch_size)
if batch_y is None:
feed_dict = {self.input:batch_x}
else:
feed_dict = {self.input:batch_x, self.target:batch_y}
return feed_dict, (batch_x, batch_y)
def add_train_logger(self):
log_path = os.path.join(self.save_dir, "log.txt")
fh = logging.FileHandler(log_path)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)
def train(self,
epochs,
batch_size,
save_every_n_epochs,
eval_every_n_epochs,
print_interval,
debug=False):
self.add_train_logger()
zipdir(root_path, self.save_dir)
logger.info("\n".join(sys.argv))
step = 0
# Run validation once before starting training
if not debug and epochs > 0:
valid_metrics_results = self.eval(batch_size, type='valid')
log_metrics(logger, "valid - epoch=%s"%0, valid_metrics_results)
for ep in range(1, epochs+1):
if self.anneal_lr:
if ep == int(0.75*epochs):
self.sess.run(tf.assign(self.lr, self.lr/5))
while self.train_iterator.epochs_completed < ep:
feed_dict, _ = self.get_batch(batch_size, self.train_iterator)
results, _ = self.sess.run(
[self.train_metrics, self.train_op], feed_dict=feed_dict)
self.run_extra_fns("train")
if step % print_interval == 0:
log_metrics(logger, "train - iter=%s"%step, results)
step += 1
if ep % eval_every_n_epochs == 0:
valid_metrics_results = self.eval(batch_size, type='valid')
log_metrics(logger, "valid - epoch=%s"%ep, valid_metrics_results)
if ep % save_every_n_epochs == 0:
self.saver.save(self.sess, os.path.join(self.save_dir, "model.ckpt"))
test_metrics_results = self.eval(batch_size, type='test')
log_metrics(logger, "test - epoch=%s"%epochs, test_metrics_results)
def eval(self,
batch_size,
type='valid'):
eval_metrics_results = {k:[] for k in self.eval_metrics.keys()}
eval_outputs = {"input":[], "output":[]}
eval_iterator = self.get_iterator(type)
eval_iterator.reset_epoch()
while eval_iterator.get_epoch() < 1:
if eval_iterator.X.shape[0] < 100:
batch_size = eval_iterator.X.shape[0]
feed_dict, _ = self.get_batch(batch_size, eval_iterator)
fetches = {k:v for k, v in self.eval_metrics.items()}
fetches["output"] = self.output
fetches["input"] = self.input
results = self.sess.run(fetches, feed_dict=feed_dict)
for k in self.eval_metrics.keys():
eval_metrics_results[k].append(results[k])
eval_outputs["input"].append(results["input"])
eval_outputs["output"].append(results["output"])
eval_metrics_results = {k:np.mean(v, axis=0) for k,v in eval_metrics_results.items()}
np.savez_compressed(os.path.join(self.save_dir, "outputs.npz"),
input=np.concatenate(eval_outputs["input"], axis=0),
output=np.concatenate(eval_outputs["output"], axis=0))
self.run_extra_fns(type)
return eval_metrics_results
| en | 0.777378 | # Extra functions to be ran at train/valid/test time # that can be defined by the children # Should have the format: # self.extra_valid_fns = [ # (valid_fn1, args, kwargs), # ... # ] # Run validation once before starting training | 2.237911 | 2 |
tests/commands/test_base.py | simonbru/taxi | 17 | 6612840 | import os
def test_run_without_config_file_creates_config_file(cli, config):
os.remove(config.path)
cli('alias', input=''.join([
'y\n', 'dummy\n', 'janedoe\n', 'password\n',
'timesheets.example.com\n', 'vim\n'
]))
assert os.path.exists(config.path)
def test_wizard_constructs_correct_url_with_token(cli, config):
os.remove(config.path)
cli('alias', input=''.join([
'y\n', 'dummy\n', 'token\n', '\n', 'timesheets.example.com\n',
'vim\n'
]))
with open(config.path, 'r') as f:
config = f.read()
assert 'dummy://token<EMAIL>' in config
| import os
def test_run_without_config_file_creates_config_file(cli, config):
os.remove(config.path)
cli('alias', input=''.join([
'y\n', 'dummy\n', 'janedoe\n', 'password\n',
'timesheets.example.com\n', 'vim\n'
]))
assert os.path.exists(config.path)
def test_wizard_constructs_correct_url_with_token(cli, config):
os.remove(config.path)
cli('alias', input=''.join([
'y\n', 'dummy\n', 'token\n', '\n', 'timesheets.example.com\n',
'vim\n'
]))
with open(config.path, 'r') as f:
config = f.read()
assert 'dummy://token<EMAIL>' in config
| none | 1 | 2.579009 | 3 | |
tests/sample_middleware.py | daveoconnor/tartiflette-middleware | 0 | 6612841 | <filename>tests/sample_middleware.py
from tartiflette_middleware import BaseMiddleware
class ConcreteMiddlewareNoLabel(BaseMiddleware):
async def __aenter__(self):
pass
async def __aexit__(self, exc_type, exc_val, exc_tb):
pass
class ConcreteMiddleware(BaseMiddleware):
label = 'CAExample'
async def __aenter__(self):
pass
async def __aexit__(self, exc_type, exc_val, exc_tb):
pass
class ConcreteWorkingMiddleware(BaseMiddleware):
label = 'CAWorkingExample'
async def __aenter__(self):
await self.store_request_data('foo')
async def __aexit__(self, exc_type, exc_val, exc_tb):
pass
| <filename>tests/sample_middleware.py
from tartiflette_middleware import BaseMiddleware
class ConcreteMiddlewareNoLabel(BaseMiddleware):
async def __aenter__(self):
pass
async def __aexit__(self, exc_type, exc_val, exc_tb):
pass
class ConcreteMiddleware(BaseMiddleware):
label = 'CAExample'
async def __aenter__(self):
pass
async def __aexit__(self, exc_type, exc_val, exc_tb):
pass
class ConcreteWorkingMiddleware(BaseMiddleware):
label = 'CAWorkingExample'
async def __aenter__(self):
await self.store_request_data('foo')
async def __aexit__(self, exc_type, exc_val, exc_tb):
pass
| none | 1 | 2.231497 | 2 | |
server/tradecore/__init__.py | githubalvin/pandorabox | 0 | 6612842 | <reponame>githubalvin/pandorabox
from .control import TradeController | from .control import TradeController | none | 1 | 1.076794 | 1 | |
dst/nn/__init__.py | gooppe/deep-summarization-toolkit | 7 | 6612843 | from .modules import (Transformer,
TransformerEncoderLayer,
TransformerDecoderLayer,
PBATransformerEncoderLayer,
PBATransformerDecoderLayer,
ScaledDotProductAttention,
MultiHeadAttention,
MultiHeadPhrasalAttentionBase,
MultiHeadHomogeneousAttention,
MultiHeadHeterogeneousAttention,
MultiHeadInterleavedAttention,
PositionalEmbedding,
PositionWise,
RNNEncoder,
RNNDecoder,
NLLvMF)
| from .modules import (Transformer,
TransformerEncoderLayer,
TransformerDecoderLayer,
PBATransformerEncoderLayer,
PBATransformerDecoderLayer,
ScaledDotProductAttention,
MultiHeadAttention,
MultiHeadPhrasalAttentionBase,
MultiHeadHomogeneousAttention,
MultiHeadHeterogeneousAttention,
MultiHeadInterleavedAttention,
PositionalEmbedding,
PositionWise,
RNNEncoder,
RNNDecoder,
NLLvMF)
| none | 1 | 0.944611 | 1 | |
tester/test_model/test_usage.py | seaioheroes/TorCMS-master | 0 | 6612844 | # -*- coding:utf-8 -*-
from torcms.model.usage_model import MUsage
def Test():
assert MUsage()
| # -*- coding:utf-8 -*-
from torcms.model.usage_model import MUsage
def Test():
assert MUsage()
| en | 0.736017 | # -*- coding:utf-8 -*- | 1.349273 | 1 |
tests/test_underscore_hyphens.py | itajaja/hologram | 6 | 6612845 | <reponame>itajaja/hologram
import pytest
from dataclasses import dataclass, field
from hologram import JsonSchemaMixin, ValidationError
from hologram.helpers import HyphenatedJsonSchemaMixin
@dataclass
class HasUnderscoreConverts(HyphenatedJsonSchemaMixin):
a_thing: str = field(metadata={"preserve_underscore": True})
other_thing: str
@dataclass
class ContainsHasUnderscoreConverts(JsonSchemaMixin):
things: HasUnderscoreConverts
@pytest.fixture
def underscore():
return HasUnderscoreConverts(a_thing="foo", other_thing="bar")
@pytest.fixture
def underscore_dict():
return {"a_thing": "foo", "other-thing": "bar"}
@pytest.fixture
def contains(underscore):
return ContainsHasUnderscoreConverts(things=underscore)
@pytest.fixture
def contains_dict(underscore_dict):
return {"things": underscore_dict}
@pytest.fixture
def bad_dict():
return {"a_thing": "foo", "other_thing": "bar"}
def test_base(underscore, underscore_dict, bad_dict):
assert HasUnderscoreConverts.from_dict(underscore_dict) == underscore
assert underscore.to_dict() == underscore_dict
with pytest.raises(ValidationError):
HasUnderscoreConverts.from_dict(bad_dict)
def test_nested(contains, contains_dict, bad_dict):
assert ContainsHasUnderscoreConverts.from_dict(contains_dict) == contains
assert contains.to_dict() == contains_dict
with pytest.raises(ValidationError):
ContainsHasUnderscoreConverts.from_dict({"things": bad_dict})
| import pytest
from dataclasses import dataclass, field
from hologram import JsonSchemaMixin, ValidationError
from hologram.helpers import HyphenatedJsonSchemaMixin
@dataclass
class HasUnderscoreConverts(HyphenatedJsonSchemaMixin):
a_thing: str = field(metadata={"preserve_underscore": True})
other_thing: str
@dataclass
class ContainsHasUnderscoreConverts(JsonSchemaMixin):
things: HasUnderscoreConverts
@pytest.fixture
def underscore():
return HasUnderscoreConverts(a_thing="foo", other_thing="bar")
@pytest.fixture
def underscore_dict():
return {"a_thing": "foo", "other-thing": "bar"}
@pytest.fixture
def contains(underscore):
return ContainsHasUnderscoreConverts(things=underscore)
@pytest.fixture
def contains_dict(underscore_dict):
return {"things": underscore_dict}
@pytest.fixture
def bad_dict():
return {"a_thing": "foo", "other_thing": "bar"}
def test_base(underscore, underscore_dict, bad_dict):
assert HasUnderscoreConverts.from_dict(underscore_dict) == underscore
assert underscore.to_dict() == underscore_dict
with pytest.raises(ValidationError):
HasUnderscoreConverts.from_dict(bad_dict)
def test_nested(contains, contains_dict, bad_dict):
assert ContainsHasUnderscoreConverts.from_dict(contains_dict) == contains
assert contains.to_dict() == contains_dict
with pytest.raises(ValidationError):
ContainsHasUnderscoreConverts.from_dict({"things": bad_dict}) | none | 1 | 2.441966 | 2 | |
database.py | SoyBison/BandcampVAE | 0 | 6612846 | from sqlalchemy import create_engine, Column, String, DateTime
import configparser
import datetime
from sqlalchemy.orm import declarative_base, sessionmaker
config = configparser.ConfigParser()
config.read('config.ini')
dbconf = config["DATABASE"]
uname = dbconf['UserName']
pword = dbconf['Password']
addrs = dbconf['Address']
dname = dbconf['Database']
connstring = f'mysql+pymysql://{uname}:{pword}@{addrs}/{dname}?charset=utf8mb4'
ENGINE = create_engine(connstring)
Base = declarative_base()
Session = sessionmaker(bind=ENGINE)
class Album(Base):
__tablename__ = 'albums'
id = Column(String(255), primary_key=True, unique=True)
artist = Column(String(2048))
title = Column(String(2048))
tags = Column(String(1024))
url_title = Column(String(255))
store = Column(String(255))
url = Column(String(255))
def __repr__(self):
return f"<Album(title={self.title}, artist={self.artist})>"
class Store(Base):
__tablename__ = 'stores'
store_name = Column(String(255), primary_key=True, unique=True)
created_date = Column(DateTime, default=datetime.datetime.utcnow())
Album.__table__.create(bind=ENGINE, checkfirst=True)
Store.__table__.create(bind=ENGINE, checkfirst=True)
| from sqlalchemy import create_engine, Column, String, DateTime
import configparser
import datetime
from sqlalchemy.orm import declarative_base, sessionmaker
config = configparser.ConfigParser()
config.read('config.ini')
dbconf = config["DATABASE"]
uname = dbconf['UserName']
pword = dbconf['Password']
addrs = dbconf['Address']
dname = dbconf['Database']
connstring = f'mysql+pymysql://{uname}:{pword}@{addrs}/{dname}?charset=utf8mb4'
ENGINE = create_engine(connstring)
Base = declarative_base()
Session = sessionmaker(bind=ENGINE)
class Album(Base):
__tablename__ = 'albums'
id = Column(String(255), primary_key=True, unique=True)
artist = Column(String(2048))
title = Column(String(2048))
tags = Column(String(1024))
url_title = Column(String(255))
store = Column(String(255))
url = Column(String(255))
def __repr__(self):
return f"<Album(title={self.title}, artist={self.artist})>"
class Store(Base):
__tablename__ = 'stores'
store_name = Column(String(255), primary_key=True, unique=True)
created_date = Column(DateTime, default=datetime.datetime.utcnow())
Album.__table__.create(bind=ENGINE, checkfirst=True)
Store.__table__.create(bind=ENGINE, checkfirst=True)
| none | 1 | 2.69854 | 3 | |
mediagoblin/tests/test_staticdirect.py | saksham1115/mediagoblin | 60 | 6612847 | <filename>mediagoblin/tests/test_staticdirect.py
from mediagoblin.tools import staticdirect
def test_staticdirect():
sdirect = staticdirect.StaticDirect(
{None: "/static/",
"theme": "http://example.org/themestatic"})
assert sdirect("css/monkeys.css") == "/static/css/monkeys.css"
assert sdirect("images/lollerskate.png", "theme") == \
"http://example.org/themestatic/images/lollerskate.png"
| <filename>mediagoblin/tests/test_staticdirect.py
from mediagoblin.tools import staticdirect
def test_staticdirect():
sdirect = staticdirect.StaticDirect(
{None: "/static/",
"theme": "http://example.org/themestatic"})
assert sdirect("css/monkeys.css") == "/static/css/monkeys.css"
assert sdirect("images/lollerskate.png", "theme") == \
"http://example.org/themestatic/images/lollerskate.png"
| none | 1 | 2.16816 | 2 | |
random_python_stuff/fixxbuzz.py | brookeaddison/cssi_stuffs | 0 | 6612848 | """ My implementation of fizzbuzz. """
def fizzbuzz(number):
if number is 3 %== 0:
print ('fizz'):
elif number is not 3 %== 0:
print ('buzz')
| """ My implementation of fizzbuzz. """
def fizzbuzz(number):
if number is 3 %== 0:
print ('fizz'):
elif number is not 3 %== 0:
print ('buzz')
| en | 0.546022 | My implementation of fizzbuzz. | 4.054217 | 4 |
setup.py | nmheim/ConvLSTM_pytorch | 0 | 6612849 | from setuptools import setup, find_packages
setup(
name='convlstm',
description='Implementation of Convolutional LSTM in PyTorch',
author='ndrplz/ConvLSTM_pytorch',
packages=find_packages(),
version=0.1,
install_requires=["torch"],
)
| from setuptools import setup, find_packages
setup(
name='convlstm',
description='Implementation of Convolutional LSTM in PyTorch',
author='ndrplz/ConvLSTM_pytorch',
packages=find_packages(),
version=0.1,
install_requires=["torch"],
)
| none | 1 | 1.247825 | 1 | |
user_credential_test.py | alex-muliande/password-locker | 1 | 6612850 | from user import User
from user import Credential
import unittest
import pyperclip
class TestUser(unittest.TestCase):
def setUp(self):
self.new_user = User(
'alex', 'nad', '<EMAIL>', '0727719206', '1234')
def test__init__(self):
self.assertEqual(self.new_user.first_name, 'alex')
self.assertEqual(self.new_user.last_name, 'nad')
self.assertEqual(self.new_user.email, '<EMAIL>')
self.assertEqual(self.new_user.phone_number, '0727719206')
self.assertEqual(self.new_user.password, '<PASSWORD>')
def tearDown(self):
Credential.credential_list = []
User.users_list = []
def test_save_user(self):
'''
Method to test if we can save the user details
'''
self.new_user.save_user()
test_user = User('alex', 'nad', '<EMAIL>',
'0727719206', '1234')
test_user.save_user()
self.assertEqual(len(User.users_list), 2)
def test_delete_user(self):
'''
Method to test if we can delete a user
'''
self.new_user.save_user()
test_user = User('alex', 'nad', '<EMAIL>',
'0727719206', '1234')
test_user.save_user()
test_user.delete_user()
self.assertEqual(len(User.users_list), 1)
class TestCredential(unittest.TestCase):
def setUp(self):
self.new_credential = Credential(
'alex', 'twitter', 'alex-muliande', '12345')
def test__init__(self):
self.assertEqual(self.new_credential.user_name, 'alex')
self.assertEqual(self.new_credential.site_name, 'twitter')
self.assertEqual(self.new_credential.account_name, 'alex-muliande')
self.assertEqual(self.new_credential.password, '<PASSWORD>')
# Testing credentials
def tearDown(self):
Credential.credential_list = []
User.users_list = []
def test_save_credentials(self):
self.new_credential.save_credentials()
twitter = Credential('alex', 'twitter', 'alex-muliande', '12345')
twitter.save_credentials()
self.assertEqual(len(Credential.credential_list), 2)
def test_delete_credentials(self):
self.new_credential.save_credentials()
twitter = Credential('alex', 'twitter', 'alex-muliande', '12345')
twitter.save_credentials()
twitter.delete_credentials()
self.assertEqual(len(Credential.credential_list), 1)
def test_find_by_site_name(self):
'''
Test to check if the find_by_account_type method returns the correct credential
'''
self.new_credential.save_credentials()
twitter = Credential('alex', 'twitter', 'alex-muliande', '12345')
twitter.save_credentials()
credential_found = Credential.find_by_site_name('twitter')
self.assertEqual(credential_found, twitter)
def test_copy_credentials(self):
'''
A funtcion to test to check if the copy a credential method copies the correct credential
'''
self.new_credential.save_credentials()
twitter = Credential('alex', 'twitter', 'alex-muliande', '12345')
twitter.save_credentials()
found_credential = None
for credential in Credential.credential_list:
found_credential = Credential.find_by_site_name(
credential.site_name)
return pyperclip.copy(found_credential.password)
Credential.copy_credentials(self.new_credential.site_name)
self.assertEqual('12345', pyperclip.paste())
print(pyperclip.paste())
if __name__ == '__main__':
unittest.main()
| from user import User
from user import Credential
import unittest
import pyperclip
class TestUser(unittest.TestCase):
def setUp(self):
self.new_user = User(
'alex', 'nad', '<EMAIL>', '0727719206', '1234')
def test__init__(self):
self.assertEqual(self.new_user.first_name, 'alex')
self.assertEqual(self.new_user.last_name, 'nad')
self.assertEqual(self.new_user.email, '<EMAIL>')
self.assertEqual(self.new_user.phone_number, '0727719206')
self.assertEqual(self.new_user.password, '<PASSWORD>')
def tearDown(self):
Credential.credential_list = []
User.users_list = []
def test_save_user(self):
'''
Method to test if we can save the user details
'''
self.new_user.save_user()
test_user = User('alex', 'nad', '<EMAIL>',
'0727719206', '1234')
test_user.save_user()
self.assertEqual(len(User.users_list), 2)
def test_delete_user(self):
'''
Method to test if we can delete a user
'''
self.new_user.save_user()
test_user = User('alex', 'nad', '<EMAIL>',
'0727719206', '1234')
test_user.save_user()
test_user.delete_user()
self.assertEqual(len(User.users_list), 1)
class TestCredential(unittest.TestCase):
def setUp(self):
self.new_credential = Credential(
'alex', 'twitter', 'alex-muliande', '12345')
def test__init__(self):
self.assertEqual(self.new_credential.user_name, 'alex')
self.assertEqual(self.new_credential.site_name, 'twitter')
self.assertEqual(self.new_credential.account_name, 'alex-muliande')
self.assertEqual(self.new_credential.password, '<PASSWORD>')
# Testing credentials
def tearDown(self):
Credential.credential_list = []
User.users_list = []
def test_save_credentials(self):
self.new_credential.save_credentials()
twitter = Credential('alex', 'twitter', 'alex-muliande', '12345')
twitter.save_credentials()
self.assertEqual(len(Credential.credential_list), 2)
def test_delete_credentials(self):
self.new_credential.save_credentials()
twitter = Credential('alex', 'twitter', 'alex-muliande', '12345')
twitter.save_credentials()
twitter.delete_credentials()
self.assertEqual(len(Credential.credential_list), 1)
def test_find_by_site_name(self):
'''
Test to check if the find_by_account_type method returns the correct credential
'''
self.new_credential.save_credentials()
twitter = Credential('alex', 'twitter', 'alex-muliande', '12345')
twitter.save_credentials()
credential_found = Credential.find_by_site_name('twitter')
self.assertEqual(credential_found, twitter)
def test_copy_credentials(self):
'''
A funtcion to test to check if the copy a credential method copies the correct credential
'''
self.new_credential.save_credentials()
twitter = Credential('alex', 'twitter', 'alex-muliande', '12345')
twitter.save_credentials()
found_credential = None
for credential in Credential.credential_list:
found_credential = Credential.find_by_site_name(
credential.site_name)
return pyperclip.copy(found_credential.password)
Credential.copy_credentials(self.new_credential.site_name)
self.assertEqual('12345', pyperclip.paste())
print(pyperclip.paste())
if __name__ == '__main__':
unittest.main()
| en | 0.725392 | Method to test if we can save the user details Method to test if we can delete a user # Testing credentials Test to check if the find_by_account_type method returns the correct credential A funtcion to test to check if the copy a credential method copies the correct credential | 3.210648 | 3 |